简体   繁体   中英

How to initiate iPhone camera properly

I`m trying to build simple app which will scan barcodes and having this piece of code

var captureSession: AVCaptureSession?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var codeView: UIView?

override func viewDidLoad() {
    super.viewDidLoad()

    let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera], mediaType: AVMediaType.video, position: .back)
    guard let captureDevice = discoverySession.devices.first else {
        print("Camre initialization faild.")
        return
    }
    do {
        let input = try AVCaptureDeviceInput(device: captureDevice)
        captureSession?.addInput(input)

        let captureMetadataOutput = AVCaptureMetadataOutput()
        captureSession?.addOutput(captureMetadataOutput)
        captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
        captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]

        videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
        videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
        videoPreviewLayer?.frame = view.layer.bounds
        view.layer.addSublayer(videoPreviewLayer!)
        captureSession?.startRunning()

    } catch let error as NSError {
        print("Error \(error.localizedDescription)")
    }

}

I run application on physical device Xs Max with iOS 12.3.1 and it unexpectadly crashes on the line

 captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]

with error "libc++abi.dylib: terminating with uncaught exception of type NSException"

import AVFoundation   


var captureSession: AVCaptureSession!
var videoPreviewLayer: AVCaptureVideoPreviewLayer!

override func viewDidLoad() {
    super.viewDidLoad()

    captureSession = AVCaptureSession()
    guard let captureDevice = AVCaptureDevice.default(for: .video) else {return}

    let videoInput: AVCaptureDeviceInput

    do {
        videoInput = try AVCaptureDeviceInput(device: captureDevice)
    } catch {
        // handle error
        return
    }

    if (captureSession.canAddInput(videoInput)) {
        captureSession.addInput(videoInput)
    } else {
        // handle error
        return
    }

    let metadataOutput = AVCaptureMetadataOutput()

    if (captureSession.canAddOutput(metadataOutput)) {
        captureSession.addOutput(metadataOutput)

        metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
        metadataOutput.metadataObjectTypes = [.qr]
    } else {
        // handle error
        return
    }

    videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    videoPreviewLayer.frame = view.layer.bounds
    videoPreviewLayer.videoGravity = .resizeAspectFill
    view.layer.addSublayer(videoPreviewLayer)

    captureSession.startRunning()
}

This code should help you, also remember to add AVCaptureMetadataOutputObjectsDelegate

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM