简体   繁体   English

保存自拍视频swift后视频的大小更改4

[英]The size of video change after save a selfie video swift 4

The size of video change after save a selfie video. 保存自拍视频后,视频大小会发生变化。 This problem only occurs when taking a selfie video. 仅在拍摄自拍视频时才会出现此问题。

how do I correctly support portrait and landscape orientations and have it reflected correctly in the video file output when I take a video selfie. 拍摄视频自拍时,如何正确支持纵向和横向方向并使其正确反映在视频文件输出中。 Below is the full source I have written : 以下是我写的完整资料:

// File to composit
    let asset = AVURLAsset(url: videoURL as URL)
    let composition = AVMutableComposition.init()
    composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)

    let clipVideoTrack = asset.tracks(withMediaType: AVMediaType.video)[0]


    // Rotate to potrait
    let transformer = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)

    var videoAssetOrientation_: UIImageOrientation = .up
    var isVideoAssetPortrait_: Bool = false

    let videoTransform:CGAffineTransform = clipVideoTrack.preferredTransform



    if (videoTransform.a == 0.0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0)
        || (videoTransform.a == 0.0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
        isVideoAssetPortrait_ = true
    }

    if videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0 {
        videoAssetOrientation_ = .right
        isVideoAssetPortrait_ = true
    }
    if videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0 {
        videoAssetOrientation_ = .left
        isVideoAssetPortrait_ = true
    }
    if videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0 {
        videoAssetOrientation_ = .up
    }
    if videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0 {
        videoAssetOrientation_ = .down
    }

    transformer.setTransform(clipVideoTrack.preferredTransform, at: kCMTimeZero)

    var naturalSize = CGSize()

    if isVideoAssetPortrait_ {
        naturalSize = CGSize(width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.width)
    } else {
        naturalSize = clipVideoTrack.naturalSize
    }

    var renderWidth: CGFloat!
    var renderHeight: CGFloat!

    renderWidth = naturalSize.width
    renderHeight = naturalSize.height

    let parentlayer = CALayer()
    let videoLayer = CALayer()
    let watermarkLayer = CALayer()

    watermarkLayer.contents = tempImageView.image?.cgImage
    watermarkLayer.opacity = 1.0

    parentlayer.frame = CGRect(x: 0, y: 0, width: renderWidth, height: renderHeight)
    videoLayer.frame = CGRect(x: 0, y: 0, width: renderWidth, height: renderHeight)
    watermarkLayer.frame = CGRect(x: 0, y: 0 ,width: renderWidth, height: renderHeight)

    parentlayer.addSublayer(videoLayer)
    parentlayer.addSublayer(watermarkLayer)

    let videoComposition = AVMutableVideoComposition()
    videoComposition.renderSize = CGSize(width: renderWidth, height: renderHeight)
    videoComposition.frameDuration = CMTimeMake(1, 30)
    videoComposition.renderScale = 1.0

    // Add watermark to video
    videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayers: [videoLayer], in: parentlayer)

    let instruction = AVMutableVideoCompositionInstruction()
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))


    instruction.layerInstructions = [transformer]
    videoComposition.instructions = [instruction]

Below is how if video look like when I take a video selfie 以下是我拍摄视频自拍时视频的样子

在此处输入图片说明

Frist write 拳头写

var switchCam = Bool()

than if camera switch to front write 比相机切换到前置写入

switchCam = true

in photo editor class write 在照片编辑器类中写

override public func viewDidLoad() {
        super.viewDidLoad()


        if switchCam {
            videoViewContainer.transform = CGAffineTransform(scaleX: -1, y: 1)
        } else {
            videoViewContainer.transform = CGAffineTransform(scaleX: 1, y: 1)
        } 
}

than save video 比保存视频

// Mark :- save a video photoLibrary //标记:-保存视频照片库

  func convertVideoAndSaveTophotoLibrary(videoURL: URL) {
        let documentsDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
        let myDocumentPath = URL(fileURLWithPath: documentsDirectory).appendingPathComponent("temp.mp4").absoluteString
        _ = NSURL(fileURLWithPath: myDocumentPath)
        let documentsDirectory2 = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL
        let filePath = documentsDirectory2.appendingPathComponent("video.mp4")
        deleteFile(filePath: filePath as NSURL)

        //Check if the file already exists then remove the previous file
        if FileManager.default.fileExists(atPath: myDocumentPath) {
            do { try FileManager.default.removeItem(atPath: myDocumentPath)
            } catch let error { print(error) }
        }

        // File to composit
        let asset = AVURLAsset(url: videoURL as URL)
        let composition = AVMutableComposition.init()
        composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)

        let clipVideoTrack = asset.tracks(withMediaType: AVMediaType.video)[0]


        // Rotate to potrait
        let transformer = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)

        var videoAssetOrientation_: UIImageOrientation = .up
        var isVideoAssetPortrait_: Bool = false

        let videoTransform:CGAffineTransform = clipVideoTrack.preferredTransform



        if (videoTransform.a == 0.0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0)
            || (videoTransform.a == 0.0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
            isVideoAssetPortrait_ = true
        }

        if videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0 {
            videoAssetOrientation_ = .right
            isVideoAssetPortrait_ = true
        }
        if videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0 {
            videoAssetOrientation_ = .left
            isVideoAssetPortrait_ = true
        }
        if videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0 {
            videoAssetOrientation_ = .up
        }
        if videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0 {
            videoAssetOrientation_ = .down
        }

        transformer.setTransform(clipVideoTrack.preferredTransform, at: kCMTimeZero)

        var naturalSize = CGSize()

        if isVideoAssetPortrait_ {
            naturalSize = CGSize(width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.width)
        } else {
            naturalSize = clipVideoTrack.naturalSize
        }







        var renderWidth: CGFloat!
        var renderHeight: CGFloat!

        renderWidth = naturalSize.width
        renderHeight = naturalSize.height

        let parentlayer = CALayer()
        let videoLayer = CALayer()
        let watermarkLayer = CALayer()
        let watermarkLayer2 = CALayer()



        watermarkLayer.contents = self.tempImageView.image?.cgImage
        watermarkLayer.opacity = 1.0


        parentlayer.frame = CGRect(x: 0, y: 0, width: renderWidth, height: renderHeight)
        videoLayer.frame = CGRect(x: 0, y: 0, width: renderWidth, height: renderHeight)

        watermarkLayer.frame = CGRect(x: 0, y: 0 ,width: renderWidth, height: renderHeight)
        watermarkLayer2.frame = CGRect(x: 0, y: 0 ,width: renderWidth, height: renderHeight)




        parentlayer.addSublayer(videoLayer)
        parentlayer.addSublayer(watermarkLayer)



        let videoComposition = AVMutableVideoComposition()
        videoComposition.renderSize = CGSize(width: renderWidth, height: renderHeight)
        videoComposition.frameDuration = CMTimeMake(1, 30)
        videoComposition.renderScale = 1.0

        // Add watermark to video
        videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayers: [videoLayer], in: parentlayer)



        if switchCam {
            let instruction = AVMutableVideoCompositionInstruction()
            instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))
            var transform:CGAffineTransform = CGAffineTransform(scaleX: -1.0, y: 1.0)
            transform = transform.translatedBy(x: -clipVideoTrack.naturalSize.width, y: 0.0)
            transform = transform.rotated(by: CGFloat(Double.pi/2))
            transform = transform.translatedBy(x: 0.0, y: -clipVideoTrack.naturalSize.width)

            transformer.setTransform(transform, at: kCMTimeZero)
            instruction.layerInstructions = [transformer]
            videoComposition.instructions = [instruction]

        } else {

            let instruction = AVMutableVideoCompositionInstruction()
            instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))


            instruction.layerInstructions = [transformer]
            videoComposition.instructions = [instruction]
        }



        let exporter = AVAssetExportSession.init(asset: asset, presetName: AVAssetExportPresetHighestQuality)
        exporter?.outputFileType = AVFileType.mov
        exporter?.outputURL = filePath
        exporter?.videoComposition = videoComposition

        exporter!.exportAsynchronously(completionHandler: {() -> Void in
            if exporter?.status == .completed {
                let outputURL: URL? = exporter?.outputURL
                PHPhotoLibrary.shared().performChanges({
                    PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL!)
                }) { saved, error in
                    if saved {
                        let fetchOptions = PHFetchOptions()
                        fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: true)]
                        let fetchResult = PHAsset.fetchAssets(with: .video, options: fetchOptions).lastObject
                        PHImageManager().requestAVAsset(forVideo: fetchResult!, options: nil, resultHandler: { (avurlAsset, audioMix, dict) in
                            let newObj = avurlAsset as! AVURLAsset
                            print(newObj.url)
                            DispatchQueue.main.async(execute: {
                                print(newObj.url.absoluteString)
                            })
                        })
                        print (fetchResult!)
                    }
                }
            }
        })


    }

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM