簡體   English   中英

AVAssetWriterInput appendSampleBuffer:無法追加樣本緩沖區:必須先啟動一個會話(使用 -AVAssetWriter startSessionAtSourceTime:)”

[英]AVAssetWriterInput appendSampleBuffer: Cannot append sample buffer: Must start a session (using -AVAssetWriter startSessionAtSourceTime:) first'

我正在使用ARVideoKit來錄制屏幕(ReplayKit 對此不起作用),有時我可以毫無問題地錄制和保存。 其他時候我記錄,當我去保存時,我會崩潰:

** 由於未捕獲的異常“NSInternalInconsistencyException”而終止應用程序,原因:“*** -[AVAssetWriterInput appendSampleBuffer:] 無法追加示例緩沖區:必須先啟動會話(使用 -[AVAssetWriter startSessionAtSourceTime:)”

查看 StackTrace 它是一個__pthread__kill並且它發生在thread 83

在此處輸入圖片說明

特別是在這個DispatchQueue

let audioBufferQueue = DispatchQueue(label: "com.ahmedbekhit.AudioBufferQueue")

我怎樣才能防止這種情況發生?

這是文件中的代碼:

import AVFoundation
import CoreImage
import UIKit

@available(iOS 11.0, *)
class WritAR: NSObject, AVCaptureAudioDataOutputSampleBufferDelegate {
    private var assetWriter: AVAssetWriter!
    private var videoInput: AVAssetWriterInput!
    private var audioInput: AVAssetWriterInput!
    private var session: AVCaptureSession!

    private var pixelBufferInput: AVAssetWriterInputPixelBufferAdaptor!
    private var videoOutputSettings: Dictionary<String, AnyObject>!
    private var audioSettings: [String: Any]?

    let audioBufferQueue = DispatchQueue(label: "com.ahmedbekhit.AudioBufferQueue")

    private var isRecording: Bool = false

    weak var delegate: RecordARDelegate?
    var videoInputOrientation: ARVideoOrientation = .auto

    init(output: URL, width: Int, height: Int, adjustForSharing: Bool, audioEnabled: Bool, orientaions:[ARInputViewOrientation], queue: DispatchQueue, allowMix: Bool) {
        super.init()
        do {
            assetWriter = try AVAssetWriter(outputURL: output, fileType: AVFileType.mp4)
        } catch {
            // FIXME: handle when failed to allocate AVAssetWriter.
            return
        }
        if audioEnabled {
            if allowMix {
                let audioOptions: AVAudioSession.CategoryOptions = [.mixWithOthers , .allowBluetooth, .defaultToSpeaker, .interruptSpokenAudioAndMixWithOthers]
                try? AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord, mode: AVAudioSession.Mode.spokenAudio, options: audioOptions)
                try? AVAudioSession.sharedInstance().setActive(true)
            }
            AVAudioSession.sharedInstance().requestRecordPermission({ permitted in
                if permitted {
                    self.prepareAudioDevice(with: queue)
                }
            })
        }

        //HEVC file format only supports A10 Fusion Chip or higher.
        //to support HEVC, make sure to check if the device is iPhone 7 or higher
        videoOutputSettings = [
            AVVideoCodecKey: AVVideoCodecType.h264 as AnyObject,
            AVVideoWidthKey: width as AnyObject,
            AVVideoHeightKey: height as AnyObject
        ]

        let attributes: [String: Bool] = [
            kCVPixelBufferCGImageCompatibilityKey as String: true,
            kCVPixelBufferCGBitmapContextCompatibilityKey as String: true
        ]
        videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoOutputSettings)

        videoInput.expectsMediaDataInRealTime = true
        pixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoInput, sourcePixelBufferAttributes: nil)

        var angleEnabled: Bool {
            for v in orientaions {
                if UIDevice.current.orientation.rawValue == v.rawValue {
                    return true
                }
            }
            return false
        }

        var recentAngle: CGFloat = 0
        var rotationAngle: CGFloat = 0
        switch UIDevice.current.orientation {
        case .landscapeLeft:
            rotationAngle = -90
            recentAngle = -90
        case .landscapeRight:
            rotationAngle = 90
            recentAngle = 90
        case .faceUp, .faceDown, .portraitUpsideDown:
            rotationAngle = recentAngle
        default:
            rotationAngle = 0
            recentAngle = 0
        }

        if !angleEnabled {
            rotationAngle = 0
        }

        var t = CGAffineTransform.identity

        switch videoInputOrientation {
        case .auto:
            t = t.rotated(by: ((rotationAngle*CGFloat.pi) / 180))
        case .alwaysPortrait:
            t = t.rotated(by: 0)
        case .alwaysLandscape:
            if rotationAngle == 90 || rotationAngle == -90 {
                t = t.rotated(by: ((rotationAngle * CGFloat.pi) / 180))
            } else {
                t = t.rotated(by: ((-90 * CGFloat.pi) / 180))
            }
        }

        videoInput.transform = t

        if assetWriter.canAdd(videoInput) {
            assetWriter.add(videoInput)
        } else {
            delegate?.recorder(didFailRecording: assetWriter.error, and: "An error occurred while adding video input.")
            isWritingWithoutError = false
        }
        assetWriter.shouldOptimizeForNetworkUse = adjustForSharing
    }

    func prepareAudioDevice(with queue: DispatchQueue) {
        let device: AVCaptureDevice = AVCaptureDevice.default(for: .audio)!
        var audioDeviceInput: AVCaptureDeviceInput?
        do {
            audioDeviceInput = try AVCaptureDeviceInput(device: device)
        } catch {
            audioDeviceInput = nil
        }

        let audioDataOutput = AVCaptureAudioDataOutput()
        audioDataOutput.setSampleBufferDelegate(self, queue: queue)

        session = AVCaptureSession()
        session.sessionPreset = .medium
        session.usesApplicationAudioSession = true
        session.automaticallyConfiguresApplicationAudioSession = false

        if session.canAddInput(audioDeviceInput!) {
            session.addInput(audioDeviceInput!)
        }
        if session.canAddOutput(audioDataOutput) {
            session.addOutput(audioDataOutput)
        }


        audioSettings = audioDataOutput.recommendedAudioSettingsForAssetWriter(writingTo: .m4v) as? [String: Any]

        audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioSettings)
        audioInput.expectsMediaDataInRealTime = true

        audioBufferQueue.async {
            self.session.startRunning()
        }

        if assetWriter.canAdd(audioInput) {
            assetWriter.add(audioInput)
        }
    }

    var startingVideoTime: CMTime?
    var isWritingWithoutError: Bool?
    var currentDuration: TimeInterval = 0 // Seconds

    func insert(pixel buffer: CVPixelBuffer, with intervals: CFTimeInterval) {
        let time: CMTime = CMTime(seconds: intervals, preferredTimescale: 1000000)
        insert(pixel: buffer, with: time)
    }

    func insert(pixel buffer: CVPixelBuffer, with time: CMTime) {
        if assetWriter.status == .unknown {
            guard startingVideoTime == nil else {
                isWritingWithoutError = false
                return
            }
            startingVideoTime = time
            if assetWriter.startWriting() {
                assetWriter.startSession(atSourceTime: startingVideoTime!)
                currentDuration = 0
                isRecording = true
                isWritingWithoutError = true
            } else {
                delegate?.recorder(didFailRecording: assetWriter.error, and: "An error occurred while starting the video session.")
                currentDuration = 0
                isRecording = false
                isWritingWithoutError = false
            }
        } else if assetWriter.status == .failed {
            delegate?.recorder(didFailRecording: assetWriter.error, and: "Video session failed while recording.")
            logAR.message("An error occurred while recording the video, status: \(assetWriter.status.rawValue), error: \(assetWriter.error!.localizedDescription)")
            currentDuration = 0
            isRecording = false
            isWritingWithoutError = false
            return
        }

        if videoInput.isReadyForMoreMediaData {
            append(pixel: buffer, with: time)
            currentDuration = time.seconds - startingVideoTime!.seconds
            isRecording = true
            isWritingWithoutError = true
            delegate?.recorder?(didUpdateRecording: currentDuration)
        }
    }

    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        if let input = audioInput {
            audioBufferQueue.async { [weak self] in
                if input.isReadyForMoreMediaData && (self?.isRecording)! {
                    input.append(sampleBuffer)
                }
            }
        }
    }

    func pause() {
        isRecording = false
    }

    func end(writing finished: @escaping () -> Void) {
        if let session = session {
            if session.isRunning {
                session.stopRunning()
            }
        }

        if assetWriter.status == .writing {
            assetWriter.finishWriting(completionHandler: finished)
        }
    }

    func cancel() {
        if let session = session {
            if session.isRunning {
                session.stopRunning()
            }
        }
        assetWriter.cancelWriting()
    }
}

@available(iOS 11.0, *)
private extension WritAR {
    func append(pixel buffer: CVPixelBuffer, with time: CMTime) {
        pixelBufferInput.append(buffer, withPresentationTime: time)
    }
}

//Simple Logging to show logs only while debugging.
class logAR {
    class func message(_ message: String) {
        #if DEBUG
            print("ARVideoKit @ \(Date().timeIntervalSince1970):- \(message)")
        #endif
    }

    class func remove(from path: URL?) {
        if let file = path?.path {
            let manager = FileManager.default
            if manager.fileExists(atPath: file) {
                do{
                    try manager.removeItem(atPath: file)
                    self.message("Successfuly deleted media file from cached after exporting to Camera Roll.")
                } catch let error {
                    self.message("An error occurred while deleting cached media: \(error)")
                }
            }
        }
    }
}

這是導致此錯誤的代碼:

func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
    if let input = audioInput {
        audioBufferQueue.async { [weak self] in
            if input.isReadyForMoreMediaData && (self?.isRecording)! {
                input.append(sampleBuffer)
            }
        }
    }
}

在調用input.append(..)之前,您應該確保您的 AVAssetWriter 會話已啟動。 AVAssetWriter 似乎沒有返回會話狀態的屬性,因此您應該向 WritAR 類添加一個isSessionStarted標志。
然后在調用input.append(..)檢查此標志並(重新)啟動會話(如果需要)。

編輯:編寫一個輔助函數來啟動會話:

func startSessionIfNeeded(atSourceTime time: CMTime) {
   if isSessionStarted {
      return
   }
   assetWriter.startSession(atSourceTime: time)
   isSessionStarted = true
}

在您的代碼中:

func insert(pixel buffer: CVPixelBuffer, with time: CMTime) {
    if assetWriter.status == .unknown {
        guard startingVideoTime == nil else {
            isWritingWithoutError = false
            return
        }
        startingVideoTime = time
        if assetWriter.startWriting() {
            assetWriter.startSession(atSourceTime: startingVideoTime!)
            currentDuration = 0
            isRecording = true
            isWritingWithoutError = true
        } else {
            ...
        }

將行assetWriter.startSession(atSourceTime: startingVideoTime!)替換為輔助函數startSessionIfNeeded(atSourceTime: startingVideoTime)

此外,更改您的 captureOutput 方法:

func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
    if let input = audioInput {
        audioBufferQueue.async { [weak self] in
            self?.startSessionIfNeeded(atSourceTime: self?.startingVideoTime)
            if input.isReadyForMoreMediaData && (self?.isRecording)! {
                input.append(sampleBuffer)
            }
        }
    }
}

崩潰可能是由於不正確的弱自我使用。 我在這里修復了潛在的崩潰 - https://github.com/AFathi/ARVideoKit/pull/122/files

檢查它是否有幫助。

暫無
暫無

聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.

 
粵ICP備18138465號  © 2020-2024 STACKOOM.COM