iOS 人脸识别(检测)

来源:互联网 发布:淘宝订单业务流程 编辑:程序博客网 时间:2024/05/15 21:24

本文用的是系统自带的人脸识别功能,跟扫二维码条形码是一样的,但是系统只能识别出这是人的脸,至于高级的判断这张脸是谁的,需要更高级的第三方库了。

这里是检测到有人脸,然后三秒后自动拍照并存储到本地。

import UIKitimport ImageIOimport AVFoundationclass ScanFaceViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {        fileprivate var session = AVCaptureSession()    fileprivate var stillImageOutput = AVCaptureStillImageOutput()    fileprivate var layer = AVCaptureVideoPreviewLayer()    fileprivate var timer: Timer?    fileprivate var label = UILabel()        override func viewDidLoad() {        super.viewDidLoad()                self.label.frame = CGRect(x: 0, y: 100, width: ScreenWidth, height: 21)        self.label.text = "等待验证"        self.label.textColor = UIColor.red        self.navigationController?.view.addSubview(self.label)                self.startSession()    }        override func viewWillDisappear(_ animated: Bool) {        super.viewWillDisappear(animated)        self.label.removeFromSuperview()    }        fileprivate func startSession() {        var captureDevice: AVCaptureDevice?        if let array = AVCaptureDevice.devices() {            for i in array {                let device = i as! AVCaptureDevice                if device.hasMediaType(AVMediaTypeVideo) {                    if device.position == AVCaptureDevicePosition.front {                        captureDevice = device                        break                    }                }            }        }else {            return        }        do {            let input = try AVCaptureDeviceInput.init(device: captureDevice)            let output = AVCaptureMetadataOutput()            self.session.addInput(input)            self.session.addOutput(output)                        output.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)            output.metadataObjectTypes = [AVMetadataObjectTypeFace]//            output.rectOfInterest = CGRect(x: 135/ScreenHeight, y: (ScreenWidth-190)/(2*ScreenWidth), width: 240/ScreenHeight, height: 190/ScreenWidth)                        self.stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]            self.session.addOutput(self.stillImageOutput)                        self.layer = AVCaptureVideoPreviewLayer(session: self.session)            self.layer.videoGravity = AVLayerVideoGravityResizeAspectFill            self.layer.frame = UIScreen.main.bounds                        self.view.layer.addSublayer(self.layer)            self.session.startRunning()        }catch {            return        }            }        func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {        if let meta = metadataObjects.first as? AVMetadataFaceObject {            let a = self.layer.transformedMetadataObject(for: meta) as! AVMetadataFaceObject            let faceRect = CGRect(x: a.bounds.origin.x-10, y: a.bounds.origin.y-10, width: a.bounds.size.width+20, height: a.bounds.size.height+20)            if faceRect.origin.x <= 70 && faceRect.size.width >= ScreenWidth-140 {                if self.timer == nil {                    self.timer = Timer.scheduledTimer(timeInterval: 3, target: self, selector: #selector(ScanFaceViewController.isStop), userInfo: nil, repeats: false)                    self.startUpdateView()                }            }        }else {            self.timer?.invalidate()            self.timer = nil            self.stopUpdateView()        }    }        fileprivate func startUpdateView() {        if self.timer != nil {            self.label.text = "验证中"            UIView.animate(withDuration: 0.2, animations: {                 self.label.alpha = 0.2            }, completion: { (finish) in                UIView.animate(withDuration: 0.2, animations: {                     self.label.alpha = 1                }, completion: { (finish) in                    self.startUpdateView()                })            })        }    }        fileprivate func stopUpdateView() {        self.label.text = "等待验证"        self.label.alpha = 1    }        @objc fileprivate func isStop() {        self.timer?.invalidate()        self.timer = nil        self.catchImage()        self.session.stopRunning()    }        @objc fileprivate func catchImage() {        var imageConnection: AVCaptureConnection?        for c in self.stillImageOutput.connections {            let connection = c as! AVCaptureConnection            for p in connection.inputPorts {                let port = p as! AVCaptureInputPort                if port.mediaType == AVMediaTypeVideo {                    imageConnection = connection                    break                }            }            if let _ = imageConnection {                break            }        }        self.stillImageOutput.captureStillImageAsynchronously(from: imageConnection!, completionHandler: { (imageDataSampleBuffer, error) -> Void in            if let bu = imageDataSampleBuffer {                let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(bu)                let facePhoto = UIImage(data: imageData!)                let formatter = DateFormatter()                formatter.dateFormat = "yyyy-MM-dd HH:mm:ss"                self.saveImage(image: facePhoto!, saveName: "\(formatter.string(from: Date()))")            }        })    }        fileprivate func saveImage(image: UIImage, saveName: String) {        if let data = UIImageJPEGRepresentation(image, 0.8) {            let fullPath = NSHomeDirectory().appending("/Documents").appending("/\(saveName)")            NSData(data: data).write(toFile: fullPath, atomically: false)        }    }}

用的是Swift3.0写的,如果没更新到最新的Swift,会报错

4 0
原创粉丝点击