简体   繁体   中英

AVCaptureFileOutputRecordingDelegate not writing to file, Swift 2

My view controller contains a preview layer, which projects the image from my camera live. When pressing and holding a button, my code is supposed to record a video, and write it to a temporary file locally. This worked well with Swift 1.2 and Xcode 6, but stopped working after I converted the code to Swift 2 when updating to Xcode 7.

When I let go of the button, the captureOutput doesn´t get called, and there is no file written to the given path.

Some relevant code follows.

I would appreciate any help!

import UIKit
import MobileCoreServices
import AVFoundation
import AVKit




class ViewControllerPhoto: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate, UIPickerViewDelegate, UIGestureRecognizerDelegate, ACEDrawingViewDelegate, UITextViewDelegate, AVCaptureFileOutputRecordingDelegate, UITableViewDelegate, UITableViewDataSource {

    @IBOutlet weak var captureButton: UIButton!

    var videoCheck: Bool = false

    let captureSession = AVCaptureSession()
    var previewLayer : AVCaptureVideoPreviewLayer?
    var captureDevice : AVCaptureDevice?

    var movieFileOutput = AVCaptureMovieFileOutput()
    var imageData: NSData!
    var outputPath: NSString!
    var outputURL: NSURL!

    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view, typically from a nib.


        if captureSession.canSetSessionPreset(AVCaptureSessionPresetMedium) {

        captureSession.sessionPreset = AVCaptureSessionPresetMedium

        }

        let devices = AVCaptureDevice.devices()

        // Loop through all the capture devices on this phone
        for device in devices {
            // Make sure this particular device supports video
            if (device.hasMediaType(AVMediaTypeVideo)) {
                // Finally check the position and confirm we've got the back camera
                if(device.position == AVCaptureDevicePosition.Back) {
                    captureDevice = device as? AVCaptureDevice
                    if captureDevice != nil {
                        print("Capture device found")


                        beginSession() 
                    }
                }
            }

        }

        self.videoCheck = false

    }

    func beginSession() {

        stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
        if captureSession.canAddOutput(stillImageOutput) {
            captureSession.addOutput(stillImageOutput)
        }
        configureDevice()

        var err : NSError? = nil
       // captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err))
        do{
        try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice))
        }
        catch{
            print("error: \(err?.localizedDescription)")
        }

        previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        self.view.layer.addSublayer(previewLayer!)
        previewLayer?.frame = self.view.layer.frame

        previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill

        if captureSession.canAddOutput(movieFileOutput) {

            self.captureSession.addOutput(movieFileOutput)

        }

        // SET CONNECTION PROPERTIES

        var captureConnection: AVCaptureConnection = movieFileOutput.connectionWithMediaType(AVMediaTypeVideo)
        if captureConnection.supportsVideoOrientation {

        captureConnection.videoOrientation = AVCaptureVideoOrientation.Portrait

        }

        var audioDevice: AVCaptureDevice = AVCaptureDevice.devicesWithMediaType(AVMediaTypeAudio)[0] as! AVCaptureDevice
        do{

        let audioDeviceInput: AVCaptureDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
            if captureSession.canAddInput(audioDeviceInput) {

                captureSession.addInput(audioDeviceInput)

            }
        }
        catch {
            print("error")
        }

        captureSession.startRunning()
    }



    func captureVideo() {
        outputPath = (NSURL(fileURLWithPath: NSTemporaryDirectory())).URLByAppendingPathComponent("movie.mov").absoluteString as NSString

outputURL = NSURL(fileURLWithPath: outputPath as String)

        let fileManager: NSFileManager = NSFileManager.defaultManager()

        if outputURL.path != nil{

            if fileManager.fileExistsAtPath(outputURL.path!) {

                do{
                    try fileManager.removeItemAtPath(outputPath as String)

                }
                catch{
                print(error)
                }

            }
        }
        self.movieFileOutput.startRecordingToOutputFileURL(outputURL, recordingDelegate: self)

    }


    func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice {
        let devices: NSArray = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
        for device in devices {
            if(device.position == position){
                return device as! AVCaptureDevice
            }
        }
        return AVCaptureDevice()
    }

    @IBAction func captureButtonIsLongPressed(sender: UILongPressGestureRecognizer) {

        if sender.state == UIGestureRecognizerState.Began {

            videoCheck = true
            captureVideo()

        }

        else if sender.state == UIGestureRecognizerState.Ended{

            self.movieFileOutput.stopRecording()

        }

    }

    func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
        print("Output")

        playVideo()

    }
    func playVideo() {

        let path = outputPath
        let url = outputURL

let player = AVPlayer(URL: url)
        let playerLayer = AVPlayerLayer(player: player)
        playerLayer.frame = self.view.bounds

        player.play()

    }

}

Figured it out after a lot of hazzle. My mistake was lying in the line

outputPath = (NSURL(fileURLWithPath: NSTemporaryDirectory())).URLByAppendingPathComponent("movie.mov").absoluteString as NSString

Of course this is a NSString, not a NSPath. Changing to these lines fixed my problem:

outputURL = NSURL(fileURLWithPath: NSTemporaryDirectory(), isDirectory: true).URLByAppendingPathComponent("movie.mov")
outputPath = outputURL.path

Hope this helps anyone!

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM