简体   繁体   中英

Swift 3 Record Video - Audio is missing from the mp4 file

This code produces a local mp4 file which is missing its audio. If I look at this file in quicktime player inspector it has H264 format but no AAC audio. If I playback the file in the app there is no sound.

Most bizarrely, I can upload the same file to a server as a multipart document, and it is fine. I can download the file from the server and it has the AAC audio data.

class VideoViewController: UIViewController, AVCaptureFileOutputRecordingDelegate
{
    var captureDevice : AVCaptureDevice! // check capture device availability
    var videoInput:AVCaptureDeviceInput?
    let captureSession = AVCaptureSession() // to create capture session
    var previewLayer : AVCaptureVideoPreviewLayer? // to add video inside container
    var videoFileOutput:AVCaptureMovieFileOutput!
    var audioDevice:AVCaptureDevice?
    var audioInput:AVCaptureDeviceInput?

    var playerController = AVPlayerViewController()

    @IBOutlet weak var videoView: UIView!

    override func viewDidLoad()
    {
        super.viewDidLoad()
    }

    override func viewDidDisappear(_ animated: Bool)
    {
        shutDown()
    }

    func isVideoSetup() -> Bool
    {
        if(captureDevice == nil)
        {
            return false
        }

        return true
    }

    func setupCamera()
    {

        print("setupCamera")

        audioDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
        print("Audio capture device found")

        //Do any additional setup after loading the view, typically from a nib.
        // you have 3 option High quality recording, Medium quality recording and Low quality recording
        captureSession.sessionPreset = AVCaptureSessionPresetLow

        let deviceDescoverySession = AVCaptureDeviceDiscoverySession.init(deviceTypes: [AVCaptureDeviceType.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: AVCaptureDevicePosition.front)

        if(deviceDescoverySession != nil)
        {
            for device in deviceDescoverySession!.devices
            {
                if device.position == AVCaptureDevicePosition.front
                {
                    print("Video capture device found")
                    captureDevice = device
                    setupCamera2()
                    return
                }
            }
        }

        print("Capture device not found")
        fatalError(errorMsg:"Camera was not found.")

    }

    //Configuring & Initializing the camera
    func setupCamera2()
    {
        print("setupCamera2")

        do {
            audioInput = try AVCaptureDeviceInput(device: audioDevice)
            print("audio input created")

        } catch {
            print("Unable to add audio device to the recording.")
            fatalError(errorMsg:"Unable to access audio device.")
            return
        }



        if let device = captureDevice {
            do{
                try device.lockForConfiguration()

            }catch{
                print("error")
            }
        }

        let err : NSError? = nil

        do{
            videoInput = try AVCaptureDeviceInput(device: captureDevice)
            captureSession.addInput(videoInput)
        }catch
        {
            print("error")
            fatalError(errorMsg:"Unknown error.")
            return
        }

        if err != nil
        {
            print("error: \(String(describing: err?.localizedDescription))")
            fatalError(errorMsg:String(describing: err?.localizedDescription))
            return
        }

        self.captureSession.addInput(audioInput)
        print("added audio device")

        previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        // thumbnail is a ui container, your camera shows inside this container
        previewLayer?.frame = videoView.layer.bounds
        previewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.portrait
        previewLayer?.videoGravity = AVLayerVideoGravityResizeAspect
        videoView.layer.addSublayer(previewLayer!)
        captureSession.startRunning()
        captureDevice?.unlockForConfiguration()
    }



    func startRecording()
    {
        if(captureSession.outputs.count > 0)
        //if(captureSession.canAddOutput(videoFileOutput))
        {
            print("Resetting inputs")
            captureSession.removeInput(audioInput)
            captureSession.removeInput(videoInput)
            captureSession.removeOutput(videoFileOutput)
            setupCamera2()
        }

        videoFileOutput = AVCaptureMovieFileOutput()
        videoFileOutput.movieFragmentInterval = CMTime(seconds: 1, preferredTimescale: 30)

        captureSession.addOutput(videoFileOutput)

        do
        {
            try FileManager.default.removeItem(at: dataMgr.videoFileURL)
        }
        catch
        {
        }

        let recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self
        videoFileOutput.startRecording(toOutputFileURL: dataMgr.videoFileURL, recordingDelegate: recordingDelegate)
    }

    func stopRecording()
    {
        //To end recording just call this function
        videoFileOutput.stopRecording()

    }


    func shutDown()
    {
        if(captureSession.outputs.count > 0)
        {
            captureSession.removeInput(audioInput)
            captureSession.removeInput(videoInput)
            captureSession.removeOutput(videoFileOutput)
        }

        captureSession.stopRunning()
    }


    func playVideo()
    {
        let item = AVPlayerItem(url: dataMgr.videoFileURL)
        let player = AVPlayer(playerItem: item)
        playerController = AVPlayerViewController()
        playerController.player = player
        playerController.view.frame = CGRect(x:videoView.frame.origin.x, y:videoView.frame.origin.y, width: videoView.frame.width, height: videoView.frame.height)
        self.addChildViewController(playerController)
        videoView.addSubview(playerController.view)
    }

    func stopPlaying()
    {
        //playerController.view.removeSubviews()
        playerController.removeFromParentViewController()
        videoView.removeSubviews()
        utils.removeAllSubviews(vw: videoView)
    }


    @available(iOS 4.0, *)
    public func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!)
    {
        print("capture did finish")
        print(captureOutput);
        print(outputFileURL);
        print("size of file=" + String(utils.getFileSize(fileURL: outputFileURL)))
    }

    func fatalError(errorMsg:String)
    {
        let alert = UIAlertController(title: "Error", message: errorMsg, preferredStyle: UIAlertControllerStyle.alert)    
        alert.addAction(UIAlertAction(title: "OK", style: .default, handler: { action in
            app.switchScreens(newscreen:"StartViewController")

        }))
        self.present(alert, animated: true, completion: nil)

    }
}

This is crazy but I solved this from changing the filename I am saving from

let videoFileName = "mysavefile.mp4";

to

let videoFileName = "mysavefile.mov";

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM