繁体   English   中英

ReplayKit – 使用内部音频录制屏幕视频

[英]ReplayKit – Recording Screen Video with Internal Audio

最近在做一个项目,我需要用App音频和麦克风音频录制系统屏幕。

我尝试了以下解决方案。

  1. 借助 Replay kit 及其功能 startRecording

调用函数之前

recorder.isMicrophoneEnabled = true startRecording(r: recorder)
    func startRecording(_ r: RPScreenRecorder) {
    
    r.startRecording(handler: { (error: Error?) -> Void in
        if error == nil { // Recording has started
      //      sender.title = "Stop"
            self.recorder.isMicrophoneEnabled = true
            
        } else {
            // Handle error
            print(error?.localizedDescription ?? "Unknown error")
        }
    })
}

问题是视频录制并保存到相机胶卷中,但保存的视频中既没有应用音频也没有麦克风音频记录

然后我尝试了 Replay Kit 的 screenCapture 功能。 代码如下

//MARK: Screen Recording
func startRecording(withFileName fileName: String, recordingHandler:@escaping (Error?)-> Void)
{
    if #available(iOS 11.0, *)
    {
        
        let fileURL = URL(fileURLWithPath: ReplayFileUtil.filePath(fileName))
   //     let updatedFileUrl = fileURL.appendingPathExtension(".mp4")
      //  removeFile(fileURL)
        let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
        let documentsDirectory = paths[0] as String
        let filePath : String = "\(documentsDirectory)/Replays/\(fileName)"
        if FileManager.default.fileExists(atPath: filePath) {
            print("sucess")
        }
        do {
        assetWriter = try? AVAssetWriter.init(url: fileURL, fileType: .mp4)
        } catch let error {
            print(error.localizedDescription)
        }

        var videoCleanApertureSettings = [
            AVVideoCleanApertureWidthKey : 320,
            AVVideoCleanApertureHeightKey : 480,
            AVVideoCleanApertureHorizontalOffsetKey : 10,
            AVVideoCleanApertureVerticalOffsetKey : 10
        ]
        var codecSettings = [
            AVVideoAverageBitRateKey : 960000,
            AVVideoMaxKeyFrameIntervalKey : 1,
            AVVideoCleanApertureKey : videoCleanApertureSettings
        ] as [String:Any]
        var videoOutputSettings = [
            AVVideoCodecKey : AVVideoCodecType.jpeg,
            AVVideoCompressionPropertiesKey : codecSettings,
            AVVideoWidthKey : 300,
            AVVideoHeightKey : 540
        ] as [String:Any]
        var channelLayout = AudioChannelLayout.init()
        channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_MPEG_5_1_D
        let audioOutputSettings: [String : Any] = [
            AVNumberOfChannelsKey: 6,

            AVFormatIDKey: kAudioFormatMPEG4AAC_HE,
            AVSampleRateKey: 44100,
            AVChannelLayoutKey: NSData(bytes: &channelLayout, length: MemoryLayout.size(ofValue: channelLayout)),
            ]

        audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioOutputSettings)
       videoInput  = AVAssetWriterInput (mediaType: AVMediaType.video, outputSettings: videoOutputSettings)
        
        videoInput.expectsMediaDataInRealTime = true
        audioInput.expectsMediaDataInRealTime = true
        
        if assetWriter.canAdd(videoInput) {
            print("Added video Input")
        assetWriter.add(videoInput)
        }
        assetWriter.add(audioInput)
        self.assetWriter.startWriting()
        
        RPScreenRecorder.shared().isMicrophoneEnabled = true
        let time = CMTime.init(value: 10, timescale: 1)
        self.assetWriter.startSession(atSourceTime: time)
        RPScreenRecorder.shared().startCapture(handler: { (sample, bufferType, error) in

            
            recordingHandler(error)
            
            if CMSampleBufferDataIsReady(sample)
            {
                if self.assetWriter.status == AVAssetWriterStatus.unknown
                {

               //     self.assetWriter.startWriting()
                    print(self.assetWriter.status)
                    
                }
                print(self.assetWriter.status)
                if self.assetWriter.status == AVAssetWriterStatus.failed {
                    print("Asset Writer failed")
                    print("Error occured, status = \(self.assetWriter.status.rawValue), \(self.assetWriter.error!.localizedDescription) \(String(describing: self.assetWriter.error))")
                    return
                }
                
                if (bufferType == .video)
                {
                    
                    if self.videoInput.isReadyForMoreMediaData
                    {
                        print("Buffer Video Print")
                        self.videoInput.append(sample)
                    }
                }
                
                if (bufferType == .audioApp || bufferType == .audioMic)
                {
                    if self.audioInput.isReadyForMoreMediaData
                    {
                        
                        print("Audio Buffer Came")
                        self.audioInput.append(sample)
                    }
                }
            }
            
        }) { (error) in
            recordingHandler(error)
            print(error?.localizedDescription)

        }
    } else
    {
        // Fallback on earlier versions
    }
}

请建议我做错了什么。

//
//  ScreenRecorder.swift
//  BugReporterTest
//
//  Created by Giridhar on 09/06/17.
//  Copyright © 2017 Giridhar. All rights reserved.
//
import Foundation
import ReplayKit
import AVKit
import Photos

class ScreenRecorder
{
    var assetWriter:AVAssetWriter!
    var videoInput:AVAssetWriterInput!
    var audioInput:AVAssetWriterInput!

    var startSesstion = false

  //  let viewOverlay = WindowUtil()

    //MARK: Screen Recording
    func startRecording(withFileName fileName: String, recordingHandler:@escaping (Error?)-> Void)
    {
        if #available(iOS 11.0, *)
        {
            let fileURL = URL(fileURLWithPath: ReplayFileUtil.filePath(fileName))
            assetWriter = try! AVAssetWriter(outputURL: fileURL, fileType:
                AVFileType.mp4)
            let videoOutputSettings: Dictionary<String, Any> = [
                AVVideoCodecKey : AVVideoCodecType.h264,
                AVVideoWidthKey : UIScreen.main.bounds.size.width,
                AVVideoHeightKey : UIScreen.main.bounds.size.height,
//                AVVideoCompressionPropertiesKey : [
//                    AVVideoAverageBitRateKey :425000, //96000
//                    AVVideoMaxKeyFrameIntervalKey : 1
//                ]
            ];
            var channelLayout = AudioChannelLayout.init()
            channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_MPEG_5_1_D
            let audioOutputSettings: [String : Any] = [
                AVNumberOfChannelsKey: 6,
                AVFormatIDKey: kAudioFormatMPEG4AAC_HE,
                AVSampleRateKey: 44100,
                AVChannelLayoutKey: NSData(bytes: &channelLayout, length: MemoryLayout.size(ofValue: channelLayout)),
                ]


            videoInput  = AVAssetWriterInput(mediaType: AVMediaType.video,outputSettings: videoOutputSettings)
            audioInput  = AVAssetWriterInput(mediaType: AVMediaType.audio,outputSettings: audioOutputSettings)

            videoInput.expectsMediaDataInRealTime = true
            audioInput.expectsMediaDataInRealTime = true

            assetWriter.add(videoInput)
            assetWriter.add(audioInput)

            RPScreenRecorder.shared().startCapture(handler: { (sample, bufferType, error) in
                recordingHandler(error)

                if CMSampleBufferDataIsReady(sample)
                {

                    DispatchQueue.main.async { [weak self] in
                        if self?.assetWriter.status == AVAssetWriterStatus.unknown {
                            print("AVAssetWriterStatus.unknown")
                            if !(self?.assetWriter.startWriting())! {
                                return
                            }
                            self?.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sample))
                            self?.startSesstion = true
                        }

//                    if self.assetWriter.status == AVAssetWriterStatus.unknown
//                    {
//                        self.assetWriter.startWriting()
//                        self.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sample))
//                        self?.startSesstion = true
                    }
                    if self.assetWriter.status == AVAssetWriterStatus.failed {

                        print("Error occured, status = \(String(describing: self.assetWriter.status.rawValue)), \(String(describing: self.assetWriter.error!.localizedDescription)) \(String(describing: self.assetWriter.error))")
                         recordingHandler(self.assetWriter.error)
                        return
                    }
                    if (bufferType == .video)
                    {
                        if(self.videoInput.isReadyForMoreMediaData) && self.startSesstion {
                            self.videoInput.append(sample)
                        }
                    }
                    if (bufferType == .audioApp)
                    {
                        if self.audioInput.isReadyForMoreMediaData
                        {
                            //print("Audio Buffer Came")
                            self.audioInput.append(sample)
                        }
                    }
                }
            }) { (error) in
                recordingHandler(error)
//                debugPrint(error)
            }
        } else
        {
            // Fallback on earlier versions
        }
    }
    func stopRecording(isBack: Bool, aPathName: String ,handler: @escaping (Error?) -> Void)
    {

        //var isSucessFullsave = false
        if #available(iOS 11.0, *)
        {
            self.startSesstion = false
            RPScreenRecorder.shared().stopCapture{ (error) in
                self.videoInput.markAsFinished()
                self.audioInput.markAsFinished()

                handler(error)
                if error == nil{
                    self.assetWriter.finishWriting{
                         self.startSesstion = false
                        print(ReplayFileUtil.fetchAllReplays())
                        if !isBack{
                            self.PhotosSaveWithAurtorise(aPathName: aPathName)
                        }else{
                            self.deleteDirectory()
                        }
                    }
                }else{
                     self.deleteDirectory()
                }
            }
        }else {
           // print("Fallback on earlier versions")
        }
    }
    func PhotosSaveWithAurtorise(aPathName: String)  {
        if PHPhotoLibrary.authorizationStatus() == .authorized {
            self.SaveToCamera(aPathName: aPathName)
        } else {
            PHPhotoLibrary.requestAuthorization({ (status) in
                if status == .authorized {
                    self.SaveToCamera(aPathName: aPathName)
                }
            })
        }
    }
    func SaveToCamera(aPathName: String){
        PHPhotoLibrary.shared().performChanges({
            PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: (ReplayFileUtil.fetchAllReplays().last)!)
        }) { saved, error in
            if saved {
                 addScreenCaptureVideo(aPath: aPathName)
                print("Save")
            }else{
                NotificationCenter.default.post(name: NSNotification.Name(rawValue: "isScreenRecordFaildToSave"), object: nil)
                print("error to save - \(error)")
            }
        }
    }
    func deleteDirectory()  {
        ReplayFileUtil.delete()
    }

}

暂无
暂无

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM