简体   繁体   中英

create movie from [UIImage], Swift

I've found export [UIImage] as movie , but its all in ObjectiveC and I can't figure it out for Swift.

I need to create a video from [UIImage]

Working on Zoul's answer from above link. part 1) Wire the writer

So Far I have:

let paths = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)
    let documentsURL = paths[0] as! NSURL
    let videoWriter:AVAssetWriter = AVAssetWriter(URL: documentsURL, fileType: AVFileTypeQuickTimeMovie, error: nil)

    var videoSettings: NSDictionary = NSDictionary(

I can't figure out the correct Swift version of his

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:480], AVVideoHeightKey,
nil];

I convert the objective-c code that posted by '@Cameron E' to Swift 3, and It's working. the answer's link: @Cameron E's CEMovieMaker

following is CXEImagesToVideo class:

//
//  CXEImagesToVideo.swift
//  VideoAPPTest
//
//  Created by Wulei on 16/12/14.
//  Copyright © 2016 wulei. All rights reserved.
//

import Foundation
import AVFoundation
import UIKit

typealias CXEMovieMakerCompletion = (URL) -> Void
typealias CXEMovieMakerUIImageExtractor = (AnyObject) -> UIImage?


public class CXEImagesToVideo: NSObject{
    var assetWriter:AVAssetWriter!
    var writeInput:AVAssetWriterInput!
    var bufferAdapter:AVAssetWriterInputPixelBufferAdaptor!
    var videoSettings:[String : Any]!
    var frameTime:CMTime!
    var fileURL:URL!

    var completionBlock: CXEMovieMakerCompletion?
    var movieMakerUIImageExtractor:CXEMovieMakerUIImageExtractor?


public class func videoSettings(codec:String, width:Int, height:Int) -> [String: Any]{
    if(Int(width) % 16 != 0){
        print("warning: video settings width must be divisible by 16")
    }

    let videoSettings:[String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
                         AVVideoWidthKey: width,
                         AVVideoHeightKey: height]

    return videoSettings
}

public init(videoSettings: [String: Any]) {
    super.init()

    let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
    let tempPath = paths[0] + "/exprotvideo.mp4"
    if(FileManager.default.fileExists(atPath: tempPath)){
        guard (try? FileManager.default.removeItem(atPath: tempPath)) != nil else {
            print("remove path failed")
            return
        }
    }

    self.fileURL = URL(fileURLWithPath: tempPath)
    self.assetWriter = try! AVAssetWriter(url: self.fileURL, fileType: AVFileTypeQuickTimeMovie)

    self.videoSettings = videoSettings
    self.writeInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
    assert(self.assetWriter.canAdd(self.writeInput), "add failed")

    self.assetWriter.add(self.writeInput)
    let bufferAttributes:[String: Any] = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB)]
    self.bufferAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: self.writeInput, sourcePixelBufferAttributes: bufferAttributes)
    self.frameTime = CMTimeMake(1, 10)
}

func createMovieFrom(urls: [URL], withCompletion: @escaping CXEMovieMakerCompletion){
    self.createMovieFromSource(images: urls as [AnyObject], extractor:{(inputObject:AnyObject) ->UIImage? in
        return UIImage(data: try! Data(contentsOf: inputObject as! URL))}, withCompletion: withCompletion)
}

func createMovieFrom(images: [UIImage], withCompletion: @escaping CXEMovieMakerCompletion){
    self.createMovieFromSource(images: images, extractor: {(inputObject:AnyObject) -> UIImage? in
    return inputObject as? UIImage}, withCompletion: withCompletion)
}

func createMovieFromSource(images: [AnyObject], extractor: @escaping CXEMovieMakerUIImageExtractor, withCompletion: @escaping CXEMovieMakerCompletion){
    self.completionBlock = withCompletion

    self.assetWriter.startWriting()
    self.assetWriter.startSession(atSourceTime: kCMTimeZero)

    let mediaInputQueue = DispatchQueue(label: "mediaInputQueue")
    var i = 0
    let frameNumber = images.count

    self.writeInput.requestMediaDataWhenReady(on: mediaInputQueue){
        while(true){
            if(i >= frameNumber){
                break
            }

            if (self.writeInput.isReadyForMoreMediaData){
                var sampleBuffer:CVPixelBuffer?
                autoreleasepool{
                    let img = extractor(images[i])
                    if img == nil{
                        i += 1
                        print("Warning: counld not extract one of the frames")
                        //continue
                    }
                    sampleBuffer = self.newPixelBufferFrom(cgImage: img!.cgImage!)
                }
                if (sampleBuffer != nil){
                    if(i == 0){
                        self.bufferAdapter.append(sampleBuffer!, withPresentationTime: kCMTimeZero)
                    }else{
                        let value = i - 1
                        let lastTime = CMTimeMake(Int64(value), self.frameTime.timescale)
                        let presentTime = CMTimeAdd(lastTime, self.frameTime)
                        self.bufferAdapter.append(sampleBuffer!, withPresentationTime: presentTime)
                    }
                    i = i + 1
                }
            }
        }
        self.writeInput.markAsFinished()
        self.assetWriter.finishWriting {
            DispatchQueue.main.sync {
                self.completionBlock!(self.fileURL)
            }
        }
    }
}

func newPixelBufferFrom(cgImage:CGImage) -> CVPixelBuffer?{
    let options:[String: Any] = [kCVPixelBufferCGImageCompatibilityKey as String: true, kCVPixelBufferCGBitmapContextCompatibilityKey as String: true]
    var pxbuffer:CVPixelBuffer?
    let frameWidth = self.videoSettings[AVVideoWidthKey] as! Int
    let frameHeight = self.videoSettings[AVVideoHeightKey] as! Int

    let status = CVPixelBufferCreate(kCFAllocatorDefault, frameWidth, frameHeight, kCVPixelFormatType_32ARGB, options as CFDictionary?, &pxbuffer)
    assert(status == kCVReturnSuccess && pxbuffer != nil, "newPixelBuffer failed")

    CVPixelBufferLockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
    let pxdata = CVPixelBufferGetBaseAddress(pxbuffer!)
    let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
    let context = CGContext(data: pxdata, width: frameWidth, height: frameHeight, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pxbuffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
    assert(context != nil, "context is nil")

    context!.concatenate(CGAffineTransform.identity)
    context!.draw(cgImage, in: CGRect(x: 0, y: 0, width: cgImage.width, height: cgImage.height))
    CVPixelBufferUnlockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
    return pxbuffer
}
}

Usage:

        var uiImages = [UIImage]()

    /** add image to uiImages */

    let settings = CXEImagesToVideo.videoSettings(codec: AVVideoCodecH264, width: (uiImages[0].cgImage?.width)!, height: (uiImages[0].cgImage?.height)!)
    let movieMaker = CXEImagesToVideo(videoSettings: settings)
    movieMaker.createMovieFrom(images: uiImages){ (fileURL:URL) in
        let video = AVAsset(url: fileURL)
        let playerItem = AVPlayerItem(asset: video)
        let avPlayer = AVPlayer(playerItem: playerItem)
        let playerLayer = AVPlayerLayer(player: avPlayer)
        playerLayer.frame = CGRect(x: 0, y: 0, width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.width * 3.0 / 4.0)
        self.view.layer.addSublayer(playerLayer)
        avPlayer.play()
    }

export or play the video with fileURL. There are two ways of async and sync. Gist:https://gist.github.com/Willib/b97b08d8d877ca5d875ff14abb4c3f1a

Constructing a Dictionary literal is straightforward:

import AVFoundation

let videoSettings = [
    AVVideoCodecKey: AVVideoCodecH264,
    AVVideoWidthKey: 640,
    AVVideoHeightKey: 480
]

As for everything else, I would encourage you to read through Apple's The Swift Programming Language to establish fundamentals first, rather than relying on SO or tutorials that happen to cover what you want to do. "Teach a man to fish", as they say.

**for swift 4.2 **

  • generate video from images and manually save it
  • images come from prev controller

//  VideoMakerViewController.swift 
// VideoMaker 
//Created by ISHA PATEL on 05/10/18. 
// Copyright © 2018 Isha Patel. All rights reserved.


import AVFoundation
import UIKit
import Photos
import AVKit
var tempurl=""

class VideoMakerViewController: UIViewController {

var images:[UIImage]=[]
@IBOutlet weak var videoview: UIView!

override func viewDidLoad() {
    super.viewDidLoad()

    DispatchQueue.main.async {
        let settings = RenderSettings()
        let imageAnimator = ImageAnimator(renderSettings: settings,imagearr: self.images)
        imageAnimator.render() {
            self.displayVideo()
        }
    }
}

func displayVideo()
{

    let u:String=tempurl
    let player = AVPlayer(url: URL(fileURLWithPath: u))
    let playerController = AVPlayerViewController()
    playerController.player = player
    self.addChild(playerController)
    videoview.addSubview(playerController.view)
    playerController.view.frame.size=(videoview.frame.size)
    playerController.view.contentMode = .scaleAspectFit
    playerController.view.backgroundColor=UIColor.clear
    videoview.backgroundColor=UIColor.clear
    player.play()
}

@IBAction func save(_ sender: UIBarButtonItem) {
    PHPhotoLibrary.requestAuthorization { status in
        guard status == .authorized else { return }

        let u:String=tempurl
        PHPhotoLibrary.shared().performChanges({
            PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: URL(fileURLWithPath: u) as URL)
        }) { success, error in
            if !success {
                print("Could not save video to photo library:", error!)
            }
        }
    }
}


}

struct RenderSettings {

var width: CGFloat = 1500
var height: CGFloat = 844
var fps: Int32 = 2   // 2 frames per second
var avCodecKey = AVVideoCodecType.h264
var videoFilename = "renderExportVideo"
var videoFilenameExt = "mp4"

var size: CGSize {
    return CGSize(width: width, height: height)
}

var outputURL: NSURL {

    let fileManager = FileManager.default
    if let tmpDirURL = try? fileManager.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: true) {
        return tmpDirURL.appendingPathComponent(videoFilename).appendingPathExtension(videoFilenameExt) as NSURL
    }
    fatalError("URLForDirectory() failed")
}
}

class VideoWriter {

let renderSettings: RenderSettings

var videoWriter: AVAssetWriter!
var videoWriterInput: AVAssetWriterInput!
var pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor!

var isReadyForData: Bool {
    return videoWriterInput?.isReadyForMoreMediaData ?? false
}

class func pixelBufferFromImage(image: UIImage, pixelBufferPool: CVPixelBufferPool, size: CGSize) -> CVPixelBuffer {

    var pixelBufferOut: CVPixelBuffer?

    let status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &pixelBufferOut)
    if status != kCVReturnSuccess {
        fatalError("CVPixelBufferPoolCreatePixelBuffer() failed")
    }

    let pixelBuffer = pixelBufferOut!

    CVPixelBufferLockBaseAddress(pixelBuffer, [])

    let data = CVPixelBufferGetBaseAddress(pixelBuffer)
    let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
    let context = CGContext(data: data, width: Int(size.width), height: Int(size.height),
                            bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue)

    context!.clear(CGRect(x: 0, y: 0, width: size.width, height: size.height))

    let horizontalRatio = size.width / image.size.width
    let verticalRatio = size.height / image.size.height

    let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit

    let newSize = CGSize(width: image.size.width * aspectRatio, height: image.size.height * aspectRatio)

    let x = newSize.width < size.width ? (size.width - newSize.width) / 2 : 0
    let y = newSize.height < size.height ? (size.height - newSize.height) / 2 : 0

    context!.concatenate(CGAffineTransform.identity)
    context!.draw(image.cgImage!, in: CGRect(x: x, y: y, width: newSize.width, height: newSize.height))

    CVPixelBufferUnlockBaseAddress(pixelBuffer, [])

    return pixelBuffer
}

init(renderSettings: RenderSettings) {
    self.renderSettings = renderSettings
}

func start() {

    let avOutputSettings: [String: AnyObject] = [
        AVVideoCodecKey: renderSettings.avCodecKey as AnyObject,
        AVVideoWidthKey: NSNumber(value: Float(renderSettings.width)),
        AVVideoHeightKey: NSNumber(value: Float(renderSettings.height))
    ]

    func createPixelBufferAdaptor() {
        let sourcePixelBufferAttributesDictionary = [
            kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32ARGB),
            kCVPixelBufferWidthKey as String: NSNumber(value: Float(renderSettings.width)),
            kCVPixelBufferHeightKey as String: NSNumber(value: Float(renderSettings.height))
        ]
        pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput,
                                                                  sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
    }

    func createAssetWriter(outputURL: NSURL) -> AVAssetWriter {
        guard let assetWriter = try? AVAssetWriter(outputURL: outputURL as URL, fileType: AVFileType.mp4) else {
            fatalError("AVAssetWriter() failed")
        }

        guard assetWriter.canApply(outputSettings: avOutputSettings, forMediaType: AVMediaType.video) else {
            fatalError("canApplyOutputSettings() failed")
        }

        return assetWriter
    }

    videoWriter = createAssetWriter(outputURL: renderSettings.outputURL)
    videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: avOutputSettings)

    if videoWriter.canAdd(videoWriterInput) {
        videoWriter.add(videoWriterInput)
    }
    else {
        fatalError("canAddInput() returned false")
    }


    createPixelBufferAdaptor()

    if videoWriter.startWriting() == false {
        fatalError("startWriting() failed")
    }

    videoWriter.startSession(atSourceTime: CMTime.zero)

    precondition(pixelBufferAdaptor.pixelBufferPool != nil, "nil pixelBufferPool")
}

func render(appendPixelBuffers: @escaping (VideoWriter)->Bool, completion: @escaping ()->Void) {

    precondition(videoWriter != nil, "Call start() to initialze the writer")

    let queue = DispatchQueue(label: "mediaInputQueue")
    videoWriterInput.requestMediaDataWhenReady(on: queue) {
        let isFinished = appendPixelBuffers(self)
        if isFinished {
            self.videoWriterInput.markAsFinished()
            self.videoWriter.finishWriting() {
                DispatchQueue.main.async {
                    completion()
                }
            }
        }
        else {

        }
    }
}

func addImage(image: UIImage, withPresentationTime presentationTime: CMTime) -> Bool {

    precondition(pixelBufferAdaptor != nil, "Call start() to initialze the writer")

    let pixelBuffer = VideoWriter.pixelBufferFromImage(image: image, pixelBufferPool: pixelBufferAdaptor.pixelBufferPool!, size: renderSettings.size)
    return pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
}

}

class ImageAnimator{

static let kTimescale: Int32 = 600

let settings: RenderSettings
let videoWriter: VideoWriter
var images: [UIImage]!

var frameNum = 0

class func removeFileAtURL(fileURL: NSURL) {
    do {
        try FileManager.default.removeItem(atPath: fileURL.path!)
    }
    catch _ as NSError {
        // 
    }
}

init(renderSettings: RenderSettings,imagearr: [UIImage]) {
    settings = renderSettings
    videoWriter = VideoWriter(renderSettings: settings)
    images = imagearr
}

func render(completion: @escaping ()->Void) {

    // The VideoWriter will fail if a file exists at the URL, so clear it out first.
    ImageAnimator.removeFileAtURL(fileURL: settings.outputURL)

    videoWriter.start()
    videoWriter.render(appendPixelBuffers: appendPixelBuffers) {

        let s:String=self.settings.outputURL.path!

        tempurl=s
        completion()
    }

}


func appendPixelBuffers(writer: VideoWriter) -> Bool {

    let frameDuration = CMTimeMake(value: Int64(ImageAnimator.kTimescale / settings.fps), timescale: ImageAnimator.kTimescale)

    while !images.isEmpty {

        if writer.isReadyForData == false {

            return false
        }

        let image = images.removeFirst()
        let presentationTime = CMTimeMultiply(frameDuration, multiplier: Int32(frameNum))
        let success = videoWriter.addImage(image: image, withPresentationTime: presentationTime)
        if success == false {
            fatalError("addImage() failed")
        }

        frameNum=frameNum+1
    }


    return true
}

}

for swift version 5 Ive made some minor changes in this very good answer . just simply copy and paste and dont forget to pass framesArray argument to "buildVideoFromImageArray function"

    func saveVideoToLibrary(videoURL: URL) {
    
    PHPhotoLibrary.shared().performChanges({
        PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: videoURL)
    }) { saved, error in
        
        if let error = error {
            print("Error saving video to librayr: \(error.localizedDescription)")
        }
        if saved {
            print("Video save to library")
            
        }
    }
}
func buildVideoFromImageArray(framesArray:[UIImage]) {
    var images = framesArray
    let outputSize = CGSize(width:images[0].size.width, height: images[0].size.height)
    let fileManager = FileManager.default
    let urls = fileManager.urls(for: .cachesDirectory, in: .userDomainMask)
    guard let documentDirectory = urls.first else {
        fatalError("documentDir Error")
    }
    
    let videoOutputURL = documentDirectory.appendingPathComponent("OutputVideo.mp4")
    
    if FileManager.default.fileExists(atPath: videoOutputURL.path) {
        do {
            try FileManager.default.removeItem(atPath: videoOutputURL.path)
        } catch {
            fatalError("Unable to delete file: \(error) : \(#function).")
        }
    }
    
    guard let videoWriter = try? AVAssetWriter(outputURL: videoOutputURL, fileType: AVFileType.mp4) else {
        fatalError("AVAssetWriter error")
    }
    
    let outputSettings = [AVVideoCodecKey : AVVideoCodecType.h264, AVVideoWidthKey : NSNumber(value: Float(outputSize.width)), AVVideoHeightKey : NSNumber(value: Float(outputSize.height))] as [String : Any]
    
    guard videoWriter.canApply(outputSettings: outputSettings, forMediaType: AVMediaType.video) else {
        fatalError("Negative : Can't apply the Output settings...")
    }
    
    let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: outputSettings)
    let sourcePixelBufferAttributesDictionary = [
        kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32ARGB),
        kCVPixelBufferWidthKey as String: NSNumber(value: Float(outputSize.width)),
        kCVPixelBufferHeightKey as String: NSNumber(value: Float(outputSize.height))
    ]
    let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
    
    if videoWriter.canAdd(videoWriterInput) {
        videoWriter.add(videoWriterInput)
    }
    
    if videoWriter.startWriting() {
        videoWriter.startSession(atSourceTime: CMTime.zero)
        assert(pixelBufferAdaptor.pixelBufferPool != nil)
        
        let media_queue = DispatchQueue(__label: "mediaInputQueue", attr: nil)
        
        videoWriterInput.requestMediaDataWhenReady(on: media_queue, using: { () -> Void in
            let fps: Int32 = 30//2
            let frameDuration = CMTimeMake(value: 1, timescale: fps)
            
            var frameCount: Int64 = 0
            var appendSucceeded = true
            
            while (!images.isEmpty) {
                if (videoWriterInput.isReadyForMoreMediaData) {
                    let nextPhoto = images.remove(at: 0)
                    let lastFrameTime = CMTimeMake(value: frameCount, timescale: fps)
                    let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)
                    
                    var pixelBuffer: CVPixelBuffer? = nil
                    let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer)
                    
                    if let pixelBuffer = pixelBuffer, status == 0 {
                        let managedPixelBuffer = pixelBuffer
                        
                        CVPixelBufferLockBaseAddress(managedPixelBuffer, [])
                        
                        let data = CVPixelBufferGetBaseAddress(managedPixelBuffer)
                        let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
                        let context = CGContext(data: data, width: Int(outputSize.width), height: Int(outputSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(managedPixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue)
                        
                        context?.clear(CGRect(x: 0, y: 0, width: outputSize.width, height: outputSize.height))
                        
                        let horizontalRatio = CGFloat(outputSize.width) / nextPhoto.size.width
                        let verticalRatio = CGFloat(outputSize.height) / nextPhoto.size.height
                        
                        let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit
                        
                        let newSize = CGSize(width: nextPhoto.size.width * aspectRatio, height: nextPhoto.size.height * aspectRatio)
                        
                        let x = newSize.width < outputSize.width ? (outputSize.width - newSize.width) / 2 : 0
                        let y = newSize.height < outputSize.height ? (outputSize.height - newSize.height) / 2 : 0
                        
                        context?.draw(nextPhoto.cgImage!, in: CGRect(x: x, y: y, width: newSize.width, height: newSize.height))
                        
                        CVPixelBufferUnlockBaseAddress(managedPixelBuffer, [])
                        
                        appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
                        frameCount += 1
                    } else {
                        print("Failed to allocate pixel buffer")
                        appendSucceeded = false
                    }
                }
                if !appendSucceeded {
                    break
                }
                //frameCount += 1
            }
            videoWriterInput.markAsFinished()
            videoWriter.finishWriting { () -> Void in
                print("Done saving")
                self.saveVideoToLibrary(videoURL: videoOutputURL)
            }
        })
    }
}

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM