简体   繁体   English

如何在Swift 3中将UIImage数组导出为电影?

[英]How do I export UIImage array as a movie in Swift 3?

I need to export an array of UIImage and build a movie putting some text in front of the image and if it's possible music also. 我需要导出一个UIImage数组并构建一个电影,在图像前放置一些文本,如果它也可能是音乐。 Can you give me an hand with code? 你能帮我一把代码吗? I only have found something with Objective-c and old version of Swift . 我只找到了Objective-c和旧版Swift

This is the first answers I posted to the question: create movie from [UIImage], Swift 这是我发布到问题的第一个答案: 从[UIImage],Swift创建电影

Following is a copy of the answer: 以下是答案的副本:

I convert the objective-c code that posted by '@Cameron E' to Swift 3, and It's working. 我将'@Cameron E'发布的Objective-c代码转换为Swift 3,并且它正在运行。 the answer's link: @Cameron E's CEMovieMaker 答案的链接: @Cameron E的CEMovieMaker

import Foundation
import AVFoundation
import UIKit

public typealias CXEMovieMakerCompletion = (URL) -> Void
typealias CXEMovieMakerUIImageExtractor = (AnyObject) -> UIImage?


public class CXEImagesToVideo: NSObject{
var assetWriter:AVAssetWriter!
var writeInput:AVAssetWriterInput!
var bufferAdapter:AVAssetWriterInputPixelBufferAdaptor!
var videoSettings:[String : Any]!
var frameTime:CMTime!
var fileURL:URL!

var completionBlock: CXEMovieMakerCompletion?
var movieMakerUIImageExtractor:CXEMovieMakerUIImageExtractor?


public class func videoSettings(codec:String, width:Int, height:Int) -> [String: Any]{
    if(Int(width) % 16 != 0){
        print("warning: video settings width must be divisible by 16")
    }

    let videoSettings:[String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
                         AVVideoWidthKey: width,
                         AVVideoHeightKey: height]

    return videoSettings
}

public init(videoSettings: [String: Any]) {
    super.init()

    let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
    let tempPath = paths[0] + "/exprotvideo.mp4"
    if(FileManager.default.fileExists(atPath: tempPath)){
        guard (try? FileManager.default.removeItem(atPath: tempPath)) != nil else {
            print("remove path failed")
            return
        }
    }

    self.fileURL = URL(fileURLWithPath: tempPath)
    self.assetWriter = try! AVAssetWriter(url: self.fileURL, fileType: AVFileTypeQuickTimeMovie)

    self.videoSettings = videoSettings
    self.writeInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
    assert(self.assetWriter.canAdd(self.writeInput), "add failed")

    self.assetWriter.add(self.writeInput)
    let bufferAttributes:[String: Any] = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB)]
    self.bufferAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: self.writeInput, sourcePixelBufferAttributes: bufferAttributes)
    self.frameTime = CMTimeMake(1, 1)
}

public func createMovieFrom(urls: [URL], withCompletion: @escaping CXEMovieMakerCompletion){
    self.createMovieFromSource(images: urls as [AnyObject], extractor:{(inputObject:AnyObject) ->UIImage? in
        return UIImage(data: try! Data(contentsOf: inputObject as! URL))}, withCompletion: withCompletion)
}

public func createMovieFrom(images: [UIImage], withCompletion: @escaping CXEMovieMakerCompletion){
    self.createMovieFromSource(images: images, extractor: {(inputObject:AnyObject) -> UIImage? in
    return inputObject as? UIImage}, withCompletion: withCompletion)
}

func createMovieFromSource(images: [AnyObject], extractor: @escaping CXEMovieMakerUIImageExtractor, withCompletion: @escaping CXEMovieMakerCompletion){
    self.completionBlock = withCompletion

    self.assetWriter.startWriting()
    self.assetWriter.startSession(atSourceTime: kCMTimeZero)

    let mediaInputQueue = DispatchQueue(label: "mediaInputQueue")
    var i = 0
    let frameNumber = images.count

    self.writeInput.requestMediaDataWhenReady(on: mediaInputQueue){
        while(true){
            if(i >= frameNumber){
                break
            }

            if (self.writeInput.isReadyForMoreMediaData){
                var sampleBuffer:CVPixelBuffer?
                autoreleasepool{
                    let img = extractor(images[i])
                    if img == nil{
                        i += 1
                        print("Warning: counld not extract one of the frames")
                        //continue
                    }
                    sampleBuffer = self.newPixelBufferFrom(cgImage: img!.cgImage!)
                }
                if (sampleBuffer != nil){
                    if(i == 0){
                        self.bufferAdapter.append(sampleBuffer!, withPresentationTime: kCMTimeZero)
                    }else{
                        let value = i - 1
                        let lastTime = CMTimeMake(Int64(value), self.frameTime.timescale)
                        let presentTime = CMTimeAdd(lastTime, self.frameTime)
                        self.bufferAdapter.append(sampleBuffer!, withPresentationTime: presentTime)
                    }
                    i = i + 1
                }
            }
        }
        self.writeInput.markAsFinished()
        self.assetWriter.finishWriting {
            DispatchQueue.main.sync {
                self.completionBlock!(self.fileURL)
            }
        }
    }
}

func newPixelBufferFrom(cgImage:CGImage) -> CVPixelBuffer?{
    let options:[String: Any] = [kCVPixelBufferCGImageCompatibilityKey as String: true, kCVPixelBufferCGBitmapContextCompatibilityKey as String: true]
    var pxbuffer:CVPixelBuffer?
    let frameWidth = self.videoSettings[AVVideoWidthKey] as! Int
    let frameHeight = self.videoSettings[AVVideoHeightKey] as! Int

    let status = CVPixelBufferCreate(kCFAllocatorDefault, frameWidth, frameHeight, kCVPixelFormatType_32ARGB, options as CFDictionary?, &pxbuffer)
    assert(status == kCVReturnSuccess && pxbuffer != nil, "newPixelBuffer failed")

    CVPixelBufferLockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
    let pxdata = CVPixelBufferGetBaseAddress(pxbuffer!)
    let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
    let context = CGContext(data: pxdata, width: frameWidth, height: frameHeight, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pxbuffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
    assert(context != nil, "context is nil")

    context!.concatenate(CGAffineTransform.identity)
    context!.draw(cgImage, in: CGRect(x: 0, y: 0, width: cgImage.width, height: cgImage.height))
    CVPixelBufferUnlockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
    return pxbuffer
}
}

Usage: 用法:

        var uiImages = [UIImage]()

/** add image to uiImages */

let settings = CXEImagesToVideo.videoSettings(codec: AVVideoCodecH264, width: (uiImages[0].cgImage?.width)!, height: (uiImages[0].cgImage?.height)!)
let movieMaker = CXEImagesToVideo(videoSettings: settings)
movieMaker.createMovieFrom(images: uiImages){ (fileURL:URL) in
    let video = AVAsset(url: fileURL)
    let playerItem = AVPlayerItem(asset: video)
    let player = CXEPlayer()
    player.setPlayerItem(playerItem: playerItem)

    self.playerView.player = player
}

export or play the video with fileURL. 使用fileURL导出或播放视频。 Gist: ImagesToAssetURL async and sync 要点: ImagesToAssetURL异步和同步

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM