繁体   English   中英

iOS - 使用PhotoKit将视频投射到Chromecast

[英]iOS - Cast video to Chromecast using PhotoKit

我想使用PhotoKit框架将设备本地视频投射到Chromecast,但Chromecast上只显示加载屏幕而不播放任何视频。 如果更换avUrlAsset.url.absoluteStringhttp_url_of_video比它成功地播放视频。

    let options = PHVideoRequestOptions()
    options.isNetworkAccessAllowed = true
    options.deliveryMode = .automatic

    // create a meta data
    let metadata = GCKMediaMetadata(metadataType: .movie)
    metadata.setString("Title", forKey: kGCKMetadataKeyTitle)
    metadata.setString("Subtitle", forKey: kGCKMetadataKeySubtitle)

    PHImageManager.default().requestAVAsset(forVideo: asset, options: options, resultHandler: { (avAsset, audioMix, info) in

        if let avUrlAsset = avAsset as? AVURLAsset {

            // create media information
            let mediaInfo = GCKMediaInformation(contentID: avUrlAsset.url.absoluteString,
                                                streamType: .buffered,
                                                contentType: "video/quicktime",
                                                metadata: metadata,
                                                streamDuration: 0,
                                                customData: nil)

            self._remotMediaClient?.loadMedia(mediaInfo, autoplay: true)

        }
    })

请建议我如何播放本地视频到cromecast。 我还尝试将视频复制到文档目录,并将复制视频的URL传递给cromecast但不能正常工作。

我解决使用本地http服务器

HttpServerManager.swift

import UIKit

class HttpServerManager: NSObject {

static let shared = HttpServerManager()

private var httpServer:HTTPServer!

override init() {
    super.init()

    // Create server using our custom MyHTTPServer class
    httpServer = HTTPServer()

    // Tell the server to broadcast its presence via Bonjour.
    // This allows browsers such as Safari to automatically discover our service.
    httpServer.setType("_http._tcp.")

    // Normally there's no need to run our server on any specific port.
    // Technologies like Bonjour allow clients to dynamically discover the server's port at runtime.
    // However, for easy testing you may want force a certain port so you can just hit the refresh button.
    // [httpServer setPort:12345];


    let documentsDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first
    httpServer.setDocumentRoot(documentsDirectory)
}

func startServer() {

    // Start the server (and check for problems)
    do{
        try httpServer?.start()
        DDLogWrapper.logInfo("Started HTTP Server on port \(httpServer?.listeningPort())")

    }catch {
        DDLogWrapper.logError("Error starting HTTP Server: \(error) ")
    }
}

func stopServer() {
    httpServer.stop()
}

func getListeningPort() -> UInt16 {
    return httpServer.listeningPort()
}


func setDocumentRoot(path stirng:String) {
    httpServer.setDocumentRoot(stirng)
}

}

在AppDelege.swift中启动服务器

class AppDelegate: UIResponder, UIApplicationDelegate, GCKLoggerDelegate {

var window: UIWindow?
var httpServer:HTTPServer?

func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool {
    // Override point for customization after application launch.

    GCKLogger.sharedInstance().delegate = self

    // Configure our logging framework.
    // To keep things simple and fast, we're just going to log to the Xcode console.
    LoggerFactory.initLogging()

    // start local http server
    HttpServerManager.shared.startServer()

    return true
}
}

使用以下方法播放Crome演员的本地视频

func playToRemotePlayer(with asset:PHAsset, forViewController viewController:UIViewController) {

    // if video is paused than resume it
    if _remotMediaClient?.mediaStatus?.playerState == .paused {
        _remotMediaClient?.play()
        return
    }

    // lets keep track of recent assets that is played on Crome cast
    if recentPlayedAsset == nil {
        recentPlayedAsset = asset
    }else{

        if recentPlayedAsset == asset {
            self._remotMediaClient?.loadMedia(self.recentMediaInfo!, autoplay: true)
            return

        }else{
            recentPlayedAsset = asset
        }

    }

    let options = PHVideoRequestOptions()
    options.isNetworkAccessAllowed = true
    options.deliveryMode = .highQualityFormat
    options.version = .original

    // create a meta data
    let metadata = GCKMediaMetadata(metadataType: .movie)
    metadata.setString("you video title", forKey: kGCKMetadataKeyTitle)
    metadata.setString("you video subtitle", forKey: kGCKMetadataKeySubtitle)

    PHImageManager.default().requestAVAsset(forVideo: asset, options: options, resultHandler: { (avAsset, audioMix, info) in

        if (avAsset as? AVURLAsset) != nil {

            let startDate = NSDate()

            //Create Export session
            let exportSession = AVAssetExportSession(asset: avAsset!, presetName: AVAssetExportPresetHighestQuality)

            let filePathURL = documentDirectoryUrl.appendingPathComponent("rendered_video.mp4")
            let filePath = NSURL(string: (filePathURL?.absoluteString)!)
            CommanUtilites.deleteFile(filePath: filePath!)

            exportSession!.outputURL = filePath as URL?
            exportSession!.outputFileType = AVFileTypeMPEG4
            exportSession!.shouldOptimizeForNetworkUse = true
            let start = CMTimeMakeWithSeconds(0.0, 0)
            let range = CMTimeRangeMake(start, (avAsset?.duration)!)
            exportSession?.timeRange = range

            print("Exporting Media...")

            DispatchQueue.main.async {
                self.progressHUD = MBProgressHUD.showAdded(to: viewController.view, animated: true)
                self.progressHUD?.mode = MBProgressHUDMode.indeterminate
                self.progressHUD?.label.text = "Exporting video please wait..."
            }

            exportSession!.exportAsynchronously(completionHandler: {() -> Void in

                DispatchQueue.main.async {
                    self.progressHUD?.hide(animated: true)
                }

                switch exportSession!.status {

                case .failed:
                    print("Error : " + (exportSession?.error?.localizedDescription)!)
                case .cancelled:
                    print("Export canceled")
                case .completed:
                    //Video conversion finished
                    let endDate = NSDate()

                    let time = endDate.timeIntervalSince(startDate as Date)
                    print(time)
                    print("Export Successful!")
                    print(exportSession?.outputURL?.path ?? "")

                    let port = String(HttpServerManager.shared.getListeningPort())

                    let videoHttpUrl = "http://127.0.0.1:" + port + "/rendered_video.mp4"

                    // create media information
                    self.recentMediaInfo = GCKMediaInformation(contentID: videoHttpUrl,
                                                               streamType: .buffered,
                                                               contentType: "video/mp4",
                                                               metadata: nil,
                                                               streamDuration: (avAsset?.duration.seconds)!,
                                                               customData: nil)

                    self._remotMediaClient?.loadMedia(self.recentMediaInfo!, autoplay: true)


                default:
                    break
                }

            })
        }
    })

}

暂无
暂无

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM