简体   繁体   中英

Camera Preview Layer not Loading

I am attempting to make a video preview layer and it seems to be working but nothing is showing up on the screen. Here is the code I used:

var captureSession = AVCaptureSession()
var backCamera: AVCaptureDevice?
var frontCamera: AVCaptureDevice?
var currentCamera: AVCaptureDevice?

var photoOutput: AVCapturePhotoOutput?

var cameraPreviewLayer: AVCaptureVideoPreviewLayer?
//UIOutlets
@IBOutlet weak var cameraView: UIView!
@IBOutlet weak var captureImageView: UIImageView!

override func viewDidLoad() {
    super.viewDidLoad()
    // Do any additional setup after loading the view

    self.cameraSetupSession()
    self.setupDevice()
    self.setupInputOutput()
    self.setupPerviewLayer()
    self.startRunningCaptureSession()

}

func cameraSetupSession() {
   print("1")
    captureSession.sessionPreset = AVCaptureSession.Preset.photo

}

func setupDevice() {
    print("2")
    let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: .video , position: .unspecified)

    let devices = deviceDiscoverySession.devices

    for device in devices{

        if device.position == .back{

            backCamera = device

        }else if device.position == .front{

            frontCamera = device

        }

    }
   currentCamera = backCamera
}

func setupInputOutput() {
    print("3")
    do{

        let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
        captureSession.addInput(captureDeviceInput)
        photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format:[AVVideoCodecKey:AVVideoCodecType.jpeg])], completionHandler: nil)

    }catch{

        displayAlert(title: "Error", message: "There was an error acseesing your camera feed.", okMessage: "Okay")
    }

}

func setupPerviewLayer() {
  print("4")
    cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    cameraPreviewLayer?.videoGravity = .resizeAspectFill
    cameraPreviewLayer?.masksToBounds = true
    cameraPreviewLayer?.connection?.videoOrientation = .portrait
    self.view.layer.insertSublayer(cameraPreviewLayer!, at: 0)


}

func startRunningCaptureSession() {
 print("5")
    self.captureSession.startRunning()
    print(self.captureSession.isRunning)
}`

My log output during this is:

1

2

2017-11-23 12:36:04.042296-0500 Instagram Clone[353:15568] [MC] System group container for systemgroup.com.apple.configurationprofiles path is /private/var/containers/Shared/SystemGroup/systemgroup.com.apple.configurationprofiles

2017-11-23 12:36:04.043014-0500 Instagram Clone[353:15568] [MC] Reading from public effective user settings.

3

4

5

true

One of the only positive signs I have is that it does ask permission to use the camera when I Delete and clean the app. Please, any and all ideas and help are very much appreciated.

Looking at the code the one thing I can see missing is you aren't linking the session to the preview layer.

Try adding the following to setupPerviewLayer()

cameraPreviewLayer?.session = captureSession

I took your code and changed some parts until work. And it works now. Here is your code.

import UIKit
import AVFoundation
import CoreVideo

class ViewController: UIViewController {

@IBOutlet var recordButton: UIButton!

var windowOrientation: UIInterfaceOrientation {
    return view.window?.windowScene?.interfaceOrientation ?? .unknown
}


var captureSession = AVCaptureSession()
var backCamera: AVCaptureDevice?
var frontCamera: AVCaptureDevice?
var currentCamera: AVCaptureDevice?

var videoOutput = AVCaptureVideoDataOutput()
var videoDataOutputQueue: DispatchQueue!


var photoOutput: AVCapturePhotoOutput?


//var cameraPreviewLayer: AVCaptureVideoPreviewLayer?
var cameraPreviewLayer:CALayer!
//UIOutlets
@IBOutlet weak var cameraView: UIView!
@IBOutlet weak var captureImageView: UIImageView!



override func viewDidLoad() {
    super.viewDidLoad()
    
    self.cameraSetupSession()
    self.setupDevice()
    self.setupInputOutput()
    self.setupPreviewLayer(on: cameraView)
    self.startRunningCaptureSession()
    // Do any additional setup after loading the view.
}

func cameraSetupSession(){
    captureSession.sessionPreset = AVCaptureSession.Preset.photo
}

func setupDevice() {
    print("2")
    
    let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInUltraWideCamera], mediaType: .video , position: .unspecified)
    
    let devices = deviceDiscoverySession.devices
    for device in devices{

        if device.position == .back{
            

            backCamera = device

        }else if device.position == .front{

            frontCamera = device

        }

    }
   currentCamera = backCamera

    
    
}

func setupInputOutput() {
    print("3")
    do{

        let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
        print(captureDeviceInput)
        captureSession.addInput(captureDeviceInput)
        
        /*
        let settings: [String : Any] = [
                    kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA),
                ]
                
        videoOutput.videoSettings = settings
        //videoOutput.setSampleBufferDelegate(self, queue: sessionQueue)
        if captureSession.canAddOutput(videoOutput) {
            captureSession.addOutput(videoOutput)
        }
        */
        
        
        let videoOutput = AVCaptureMovieFileOutput()
        /*
        videoOutput = AVCaptureVideoDataOutput()
        videoDataOutputQueue = DispatchQueue(label: "VideoDataOutputQueue")
        videoOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
        
        
        */
        
        
        
        captureSession.addOutput(videoOutput)
        
        
        
        //photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format:[AVVideoCodecKey:AVVideoCodecType.jpeg])], completionHandler: nil)
        //captureSession.addOutput(photoOutput!)

    }catch{

        return
    }

}

func setupPreviewLayer(on view: UIView) {
  print("4")
    /*
    cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
    cameraPreviewLayer?.masksToBounds = true
    cameraPreviewLayer?.connection?.videoOrientation = .portrait
    self.view.layer.insertSublayer(cameraPreviewLayer!, at: 0)
    cameraPreviewLayer?.frame = view.frame
    cameraPreviewLayer?.session = self.captureSession
    */
    cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    self.view.layer.addSublayer(self.cameraPreviewLayer)
    self.cameraPreviewLayer.frame = self.view.layer.frame
    
    
    
}

func startRunningCaptureSession() {
 print("5")
    self.captureSession.startRunning()
    print(self.captureSession.isRunning)
}

Commented parts which I changed. So that you can see the difference.

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM