swift 自定義蒙層相機

直接上效果
21567495249_.pic.jpg

自定義相機這個就沒得說了集成的AVFoundation 百度都有

直接說蒙層吧
//繪製遮罩層

 func drawCoverView() {
    let view = UIView(frame: self.view.bounds)
    view.backgroundColor = .black
    view.alpha = 0.5
    self.view.addSubview(view)
    let bpath = UIBezierPath(roundedRect: self.view.bounds,cornerRadius: 0)
    let bpath2 = UIBezierPath(roundedRect: CGRect(x: horizontally(viewWidth: photoWidth), y: verticalCentralization(viewHeight: photoHeigth), width: photoWidth, height: photoHeigth), cornerRadius: 0)
    bpath.append(bpath2.reversing())
    let shapeLayer = CAShapeLayer.init()
    shapeLayer.path = bpath.cgPath
    view.layer.mask = shapeLayer
    
}

關鍵點
創建一個全屏view
bpath 先繪製一個全屏的蒙層
bpath2 通過reversing()方法反向繪製透明的款
這個是最簡單粗暴的玩法

import UIKit
import AVFoundation

class ScannerVC: UIViewController {
    var back_but:UIButton?
    var photoBut:UIButton?
    var lightBut:UIButton?
    var callback: ((UIImage)->Void)?
    
//
    //捕獲設備,通常是前置攝像頭,後置攝像頭,麥克風(音頻輸入)
    var device:AVCaptureDevice?
    
    //AVCaptureDeviceInput 代表輸入設備,他使用AVCaptureDevice 來初始化
    var input:AVCaptureDeviceInput?
    
    
    //當啓動攝像頭開始捕獲輸入
    var output:AVCaptureMetadataOutput?
    
    var  ImageOutPut:AVCaptureStillImageOutput?
    
    //session:由他把輸入輸出結合在一起,並開始啓動捕獲設備(攝像頭)
    var  session:AVCaptureSession?
    
    //圖像預覽層,實時顯示捕獲的圖像
    var previewLayer:AVCaptureVideoPreviewLayer?

    
    var canCa = false
    
    var imageView:UIImageView?
    var image:UIImage?

    var maskLayer:CAShapeLayer?//半透明黑色遮罩
    var effectiveRectLayer: CAShapeLayer?//有效區域框
    
     var photoWidth = K_Screen_width-40
     var photoHeigth = Int(Double(K_Screen_width-40) / 1.6)
    
    
    var focusView: UIView? //聚焦
    
    var isLightOn = false


    override func viewDidLoad() {
        super.viewDidLoad()
        self.view.backgroundColor = .black
         drawCoverView()
        createView()
        canCa = canUserCamear()
        if(canCa){
            customUI()
            customCamera()
        }
    }


    //繪製遮罩層
     func drawCoverView() {
        let view = UIView(frame: self.view.bounds)
        view.backgroundColor = .black
        view.alpha = 0.5
        self.view.addSubview(view)
        let bpath = UIBezierPath(roundedRect: self.view.bounds,cornerRadius: 0)
        let bpath2 = UIBezierPath(roundedRect: CGRect(x: horizontally(viewWidth: photoWidth), y: verticalCentralization(viewHeight: photoHeigth), width: photoWidth, height: photoHeigth), cornerRadius: 0)
        bpath.append(bpath2.reversing())
        let shapeLayer = CAShapeLayer.init()
        shapeLayer.path = bpath.cgPath
        view.layer.mask = shapeLayer
        
    }
    //設置聚焦
    func customUI(){
    
        focusView = UIView(frame: CGRect(x: 0, y: 0, width: 70, height: 70))
        focusView?.layer.borderWidth = 1.0
        focusView?.layer.borderColor = UIColor.green.cgColor
        focusView?.backgroundColor = .clear
        focusView?.isHidden = true
        self.view.addSubview(focusView!)
//        設置手勢
        let tapGesture = UITapGestureRecognizer(target: self, action: #selector(focusGesture(gesture:)))
        self.view.addGestureRecognizer(tapGesture)
    }

    @objc func focusGesture(gesture:UITapGestureRecognizer){
        let point = gesture.location(in: gesture.view)
        focusAtPoint(point: point)
    }
    func focusAtPoint(point:CGPoint){
        let size  = self.view.bounds.size
        let focusPorint = CGPoint(x: point.y / size.height, y: 1-point.x/size.width)
        do{
            try device?.lockForConfiguration()
            //焦點
            if((self.device?.isFocusModeSupported(AVCaptureDevice.FocusMode.autoFocus))!){
                self.device?.focusPointOfInterest = focusPorint
                self.device?.focusMode = AVCaptureDevice.FocusMode.autoFocus
            }
            //曝光
            if((self.device?.isExposureModeSupported(AVCaptureDevice.ExposureMode.autoExpose))!){
                self.device?.exposurePointOfInterest = focusPorint
                self.device?.exposureMode = AVCaptureDevice.ExposureMode.autoExpose
            }
            self.device?.unlockForConfiguration()
            focusView?.center = point
            focusView?.isHidden = false
            UIView.animate(withDuration: 0.3, animations: {
                self.focusView?.transform = CGAffineTransform(scaleX: 1.25, y: 1.25)
            }) { (finished) in
                UIView.animate(withDuration: 0.5, animations: {
                    self.focusView?.transform = CGAffineTransform.identity
                }, completion: { (finished) in
                    self.focusView?.isHidden = true
                })
            }
            
        }catch{
            return
        }
        
    }
    //相機初始化
    func customCamera()  {
        maskLayer = CAShapeLayer.init()
        self.view.backgroundColor = .white
        //  使用AVMediaTypeVideo 指明self.device代表視頻,默認使用後置攝像頭進行初始化
        self.device = AVCaptureDevice.default(for: AVMediaType.video)
        //使用設備初始化輸入
        do {
             self.input = try AVCaptureDeviceInput(device: self.device!)
        }catch {
            print(error)
            return
        }
//            self.input = AVCaptureDeviceInput.init(device: self.device!)
        //生成輸出對象
        self.output = AVCaptureMetadataOutput.init()
        self.ImageOutPut = AVCaptureStillImageOutput.init()
        //生成會話,用來結合輸入輸出
        self.session = AVCaptureSession.init()
        if((self.session?.canSetSessionPreset(AVCaptureSession.Preset.hd1920x1080))!){
            self.session?.sessionPreset = AVCaptureSession.Preset.hd1920x1080;

        }
        if(self.session!.canAddInput(self.input!)){
            self.session!.addInput(self.input!)
        }
        
        if(self.session!.canAddOutput(self.ImageOutPut!)){
            self.session!.addOutput(self.ImageOutPut!)
        }
        
        
            //使用self.session,初始化預覽層,self.session負責驅動input進行信息的採集,layer負責把圖像渲染顯示
        self.previewLayer = AVCaptureVideoPreviewLayer.init(session: session!)
        self.previewLayer?.frame = CGRect(x: 0, y: 0, width: K_Screen_width, height: K_Screen_height)
        self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
        self.view.layer.insertSublayer(self.previewLayer!, at: 0)
        
            //開始啓動
        self.session?.startRunning()
        do{
            if(try self.device?.lockForConfiguration() ==  nil && self.device!.isFlashModeSupported(AVCaptureDevice.FlashMode.auto)){
                self.device?.flashMode = AVCaptureDevice.FlashMode.auto
                
            }
        }catch{
            print(error)
        }
        
            //自動白平衡
        if(self.device!.isWhiteBalanceModeSupported(AVCaptureDevice.WhiteBalanceMode.autoWhiteBalance)){
            self.device?.whiteBalanceMode = AVCaptureDevice.WhiteBalanceMode.autoWhiteBalance
        }else{
            self.device?.unlockForConfiguration()
        }

    }
    


    func createView()  {
        let topHight = Int(barHeight) + Int((self.navigationController?.navigationBar.frame.size.height)!);
        back_but = UIButton(type: .custom);
        let back=UIImage(named: "white_black");
        back_but?.frame =  CGRect(x: 20, y: CGFloat(topHight/2), width: (back?.size.width)!, height: (back?.size.height)!)
        back_but?.addTarget(self, action: #selector(backPage), for: .touchUpInside)
        back_but?.setBackgroundImage(back, for: .normal)
        
        photoBut = UIButton.init()
        photoBut?.addTarget(self, action: #selector(shutterCamera), for: .touchUpInside)
        photoBut?.setBackgroundImage(UIImage(named: "startBtn"), for: .normal)
        photoBut?.frame = CGRect(x: horizontally(viewWidth: 70), y:bottomY - 70, width: 70, height:70)
//        photoBut?.layer.cornerRadius = 35
        self.view.addSubview(photoBut!)
        self.view.addSubview(back_but!)
        
        
        
        let labele = UILabel();
        let width1 = ga_widthForComment(str: "請將身份證正面置入框中,注意光線", fontSize: 16)
        let height1 = ga_heightForComment(str: "請將身份證正面置入框中,注意光線", fontSize: 16, width: width1)
        labele.frame = CGRect(x: horizontally(viewWidth: Int(width1)), y: Int(K_Screen_height/2) - (photoHeigth/2) - Int(height1+10), width: Int(width1), height:  Int(height1))
        labele.text = "請將身份證正面置入框中,注意光線"
        labele.textColor = .white
        labele.font=UIFont.systemFont(ofSize: 16)
        self.view.addSubview(labele)
        
        let width2 = ga_widthForComment(str: "閃光燈", fontSize: 16)
        let height2 = ga_heightForComment(str: "閃光燈", fontSize: 16, width: width1)
        
        lightBut = UIButton(frame: CGRect(x: CGFloat(K_Screen_width -  Int(20 + width2)), y:  CGFloat(topHight/2), width: width2, height: height2))
        lightBut?.setTitle("閃光燈", for: .normal)
//        lightBut?.titleLabel?.textColor = .groupTableViewBackground
        lightBut?.setTitleColor(.groupTableViewBackground, for: .normal)
        lightBut?.titleLabel?.font = UIFont.systemFont(ofSize: 16)
        lightBut?.addTarget(self, action: #selector(light), for: .touchUpInside)
        

        self.view.addSubview(lightBut!)
        
        //邊框線條。start
        let view2 = UIView(frame:CGRect(x: 18, y: Int(K_Screen_height/2) - (photoHeigth/2) - 4, width: 32, height: 2))
        view2.backgroundColor = .white
        self.view.addSubview(view2)
        
        let view3 = UIView(frame:CGRect(x: K_Screen_width - 50, y: Int(K_Screen_height/2) - (photoHeigth/2) - 4, width: 32, height: 2))
        view3.backgroundColor = .white
        self.view.addSubview(view3)
        
        
        
        let view4 = UIView(frame:CGRect(x: 18, y: Int(K_Screen_height/2) + (photoHeigth/2) + 2, width: 32, height: 2))
        view4.backgroundColor = .white
        self.view.addSubview(view4)
        
        let view5 = UIView(frame:CGRect(x: K_Screen_width - 50, y: Int(K_Screen_height/2) + (photoHeigth/2) + 2, width: 32, height: 2))
        view5.backgroundColor = .white
        self.view.addSubview(view5)
        
        
        let view6 = UIView(frame:CGRect(x: 16, y: Int(K_Screen_height/2) - (photoHeigth/2)-4, width: 2, height: 32))
        view6.backgroundColor = .white
        self.view.addSubview(view6)
        
        
        let view7 = UIView(frame:CGRect(x: K_Screen_width - 18, y: Int(K_Screen_height/2) - (photoHeigth/2)-4, width: 2, height: 32))
        view7.backgroundColor = .white
        self.view.addSubview(view7)
        
        
        let view8 = UIView(frame:CGRect(x: 16, y: Int(K_Screen_height/2) + (photoHeigth/2)-28, width: 2, height: 32))
        view8.backgroundColor = .white
        self.view.addSubview(view8)
        
        let view9 = UIView(frame:CGRect(x:  K_Screen_width - 18, y: Int(K_Screen_height/2) + (photoHeigth/2)-28, width: 2, height: 32))
        view9.backgroundColor = .white
        self.view.addSubview(view9)
        //--end---
    }
    
    @objc func backPage(){
        self.navigationController?.popViewController(animated: true);
    }
    
    //相機權限
    func canUserCamear() -> Bool {
                let authStatus = AVCaptureDevice.authorizationStatus(for: AVMediaType.video)
        if(authStatus == AVAuthorizationStatus.denied){
//            let alertView = UIAlertView.init(title: "請打開相機權限", message: "設置-隱私-相機", delegate: self, cancelButtonTitle: "確定",otherButtonTitles: "取消");
//            alertView.show()
            let alertController = UIAlertController(title: " 請打開相機權限", message: "設置-隱私-相機", preferredStyle: .alert)
            let cancelAction = UIAlertAction(title: "取消", style: .cancel) { (UIAlertAction) in
                self.backPage()
            }
            let okAction = UIAlertAction(title: "確定", style: .default) { (UIAlertAction) in
                let url = URL(string: UIApplication.openSettingsURLString)
                if (UIApplication.shared.canOpenURL(url!)){
                    UIApplication.shared.openURL(url!)
                }
            }
            alertController.addAction(cancelAction)
            alertController.addAction(okAction)
            self.present(alertController, animated: true, completion: nil)
            
            
            return false
        }else{
            return true
        }
        return true
    }
  
    @objc func light(){
        do{
            try device?.lockForConfiguration()
            if(!isLightOn){
                device?.torchMode = AVCaptureDevice.TorchMode.on
                isLightOn = true
//                self.lightBut?.titleLabel?.textColor = .green
                lightBut?.setTitleColor(.green, for: .normal)

            }else{
                device?.torchMode = AVCaptureDevice.TorchMode.off
                isLightOn = false
//                self.lightBut?.titleLabel?.textColor = .groupTableViewBackground
                lightBut?.setTitleColor(.groupTableViewBackground, for: .normal)

            }
            device?.unlockForConfiguration()
        }catch{
            return
        }
       
    }
    
    @objc func shutterCamera(){
        let videoConnection = self.ImageOutPut?.connection(with: AVMediaType.video)
        videoConnection?.videoOrientation = AVCaptureVideoOrientation.portrait
        if(!(videoConnection != nil)){
            return
        }
        self.ImageOutPut?.captureStillImageAsynchronously(from: videoConnection!, completionHandler: { (imageDataSampleBuffer, error) in
            if(imageDataSampleBuffer == nil){
                return
            }

            let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer!)
            
            
            self.image = UIImage.init(data: imageData!)
            self.session?.stopRunning()
            
      
            
            //計算比例
            let aspectWidth  = self.image!.size.width / CGFloat(K_Screen_width)
            let aspectHeight = self.image!.size.height / CGFloat(K_Screen_height)
//            圖片繪製區域
                    var scaledImageRect = CGRect.zero
            scaledImageRect.size.width  = CGFloat(self.photoWidth) * CGFloat(aspectWidth)
            scaledImageRect.size.height = CGFloat(self.photoHeigth) * CGFloat(aspectHeight)
            scaledImageRect.origin.x    = CGFloat(horizontally(viewWidth: self.photoWidth)) * CGFloat(aspectWidth)
            scaledImageRect.origin.y    = CGFloat(verticalCentralization(viewHeight: self.photoHeigth)) * CGFloat(aspectHeight)
            
            let i = self.imageFromImage(image: self.fixOrientation(image: self.image!), rect: scaledImageRect)
            self.imageView  = UIImageView(frame:  CGRect(x: horizontally(viewWidth: self.photoWidth), y: verticalCentralization(viewHeight: self.photoHeigth), width: self.photoWidth, height: self.photoHeigth))
            self.imageView?.contentMode = UIView.ContentMode.scaleAspectFill
//            self.view.insertSubview(self.imageView!, belowSubview: but)
            self.imageView?.layer.masksToBounds = true
       
            self.imageView?.image = i
            self.callback?(i)
           self.backPage()
//            self.view.addSubview(self.imageView!)

            
        })
    }
    
    
    
    func scaled(to newSize: CGSize,size:CGSize) -> UIImage {
        //計算比例
        let aspectWidth  = newSize.width/size.width
        let aspectHeight = newSize.height/size.height
        let aspectRatio = max(aspectWidth, aspectHeight)
        
        //圖片繪製區域
        var scaledImageRect = CGRect.zero
        scaledImageRect.size.width  = size.width * aspectRatio
        scaledImageRect.size.height = size.height * aspectRatio
        scaledImageRect.origin.x    = 0
        scaledImageRect.origin.y    = 0
        
        //繪製並獲取最終圖片
        UIGraphicsBeginImageContextWithOptions(newSize, false, 0.0)//圖片不失真
//        drem(in: scaledImageRect)
    
        let scaledImage = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext()
        
        return scaledImage!
    }
    
    /**
     *從圖片中按指定的位置大小截取圖片的一部分
     * UIImage image 原始的圖片
     * CGRect rect 要截取的區域
     */
    func imageFromImage(image:UIImage,rect:CGRect) -> UIImage {
        //將UIImage轉換成CGImageRef
        let sourceImageRef = image.cgImage
         //按照給定的矩形區域進行剪裁
        let newImageRef = sourceImageRef?.cropping(to: rect)
        let newImage =  UIImage.init(cgImage: newImageRef!)
        return newImage
    }

    
//    //按下的效果
//    -(void)touchDown{
//    self.saveBtn.backgroundColor = [UIColor colorFromHexValue:0x9B0000];
//    }
//
//    //按下拖出按鈕鬆手還原
//    -(void)touchUpOutside{
//    self.saveBtn.backgroundColor = [UIColor colorFromHexValue:0xFF2741];
//    }

 
    
    
    func fixOrientation(image:UIImage) -> UIImage {
        if image.imageOrientation == .up {
            return image
        }
        
        var transform = CGAffineTransform.identity
        
        switch image.imageOrientation {
        case .down, .downMirrored:
            transform = transform.translatedBy(x: image.size.width, y: image.size.height)
            transform = transform.rotated(by: .pi)
            break
            
        case .left, .leftMirrored:
            transform = transform.translatedBy(x: image.size.width, y: 0)
            transform = transform.rotated(by: .pi / 2)
            break
            
        case .right, .rightMirrored:
            transform = transform.translatedBy(x: 0, y: image.size.height)
            transform = transform.rotated(by: -.pi / 2)
            break
            
        default:
            break
        }
        
        switch image.imageOrientation {
        case .upMirrored, .downMirrored:
            transform = transform.translatedBy(x: image.size.width, y: 0)
            transform = transform.scaledBy(x: -1, y: 1)
            break
            
        case .leftMirrored, .rightMirrored:
            transform = transform.translatedBy(x: image.size.height, y: 0);
            transform = transform.scaledBy(x: -1, y: 1)
            break
            
        default:
            break
        }
        
        let ctx = CGContext(data: nil, width: Int(image.size.width), height: Int(image.size.height), bitsPerComponent: image.cgImage!.bitsPerComponent, bytesPerRow: 0, space: image.cgImage!.colorSpace!, bitmapInfo: image.cgImage!.bitmapInfo.rawValue)
        ctx?.concatenate(transform)
        
        switch image.imageOrientation {
        case .left, .leftMirrored, .right, .rightMirrored:
            ctx?.draw(image.cgImage!, in: CGRect(x: CGFloat(0), y: CGFloat(0), width: CGFloat(image.size.height), height: CGFloat(image.size.width)))
            break
            
        default:
            ctx?.draw(image.cgImage!, in: CGRect(x: CGFloat(0), y: CGFloat(0), width: CGFloat(image.size.width), height: CGFloat(image.size.height)))
            break
        }
        
        let cgimg: CGImage = (ctx?.makeImage())!
        let img = UIImage(cgImage: cgimg)
        
        return img
    }
}

跳轉到這個相機頁面並返回中間的圖片

      let vc = ScannerVC()
        vc.callback = { image in
            print(image)
            self.idImage = image
        
        }
        self.navigationController?.pushViewController(vc, animated: true);

額外說一個坑 自定義相機拍的照片 賦到imageview 是正常的 時間上是旋轉了90度的 裁剪區域圖片要注意下 fixOrientation這個方法是處理了旋轉圖片的
如果這個文章對你有幫助就點下贊吧
源碼地址

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章