iOS - 使用PhotoKit将视频投射到Chromecast

时间:2017-04-04 09:54:58

标签: ios chromecast google-cast photokit

我想使用PhotoKit框架将设备本地视频投射到Chromecast,但Chromecast上只显示加载屏幕而不播放任何视频。如果将avUrlAsset.url.absoluteString替换为http_url_of_video而不是成功播放视频。

代码

    let options = PHVideoRequestOptions()
    options.isNetworkAccessAllowed = true
    options.deliveryMode = .automatic

    // create a meta data
    let metadata = GCKMediaMetadata(metadataType: .movie)
    metadata.setString("Title", forKey: kGCKMetadataKeyTitle)
    metadata.setString("Subtitle", forKey: kGCKMetadataKeySubtitle)

    PHImageManager.default().requestAVAsset(forVideo: asset, options: options, resultHandler: { (avAsset, audioMix, info) in

        if let avUrlAsset = avAsset as? AVURLAsset {

            // create media information
            let mediaInfo = GCKMediaInformation(contentID: avUrlAsset.url.absoluteString,
                                                streamType: .buffered,
                                                contentType: "video/quicktime",
                                                metadata: metadata,
                                                streamDuration: 0,
                                                customData: nil)

            self._remotMediaClient?.loadMedia(mediaInfo, autoplay: true)

        }
    })

请建议我如何播放本地视频到cromecast。我还尝试将视频复制到文档目录,并将复制视频的URL传递给cromecast但不能正常工作。

1 个答案:

答案 0 :(得分:0)

我使用本地http server

解决

HttpServerManager.swift

import UIKit

class HttpServerManager: NSObject {

static let shared = HttpServerManager()

private var httpServer:HTTPServer!

override init() {
    super.init()

    // Create server using our custom MyHTTPServer class
    httpServer = HTTPServer()

    // Tell the server to broadcast its presence via Bonjour.
    // This allows browsers such as Safari to automatically discover our service.
    httpServer.setType("_http._tcp.")

    // Normally there's no need to run our server on any specific port.
    // Technologies like Bonjour allow clients to dynamically discover the server's port at runtime.
    // However, for easy testing you may want force a certain port so you can just hit the refresh button.
    // [httpServer setPort:12345];


    let documentsDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first
    httpServer.setDocumentRoot(documentsDirectory)
}

func startServer() {

    // Start the server (and check for problems)
    do{
        try httpServer?.start()
        DDLogWrapper.logInfo("Started HTTP Server on port \(httpServer?.listeningPort())")

    }catch {
        DDLogWrapper.logError("Error starting HTTP Server: \(error) ")
    }
}

func stopServer() {
    httpServer.stop()
}

func getListeningPort() -> UInt16 {
    return httpServer.listeningPort()
}


func setDocumentRoot(path stirng:String) {
    httpServer.setDocumentRoot(stirng)
}

}

在AppDelege.swift中启动服务器

class AppDelegate: UIResponder, UIApplicationDelegate, GCKLoggerDelegate {

var window: UIWindow?
var httpServer:HTTPServer?

func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool {
    // Override point for customization after application launch.

    GCKLogger.sharedInstance().delegate = self

    // Configure our logging framework.
    // To keep things simple and fast, we're just going to log to the Xcode console.
    LoggerFactory.initLogging()

    // start local http server
    HttpServerManager.shared.startServer()

    return true
}
}

使用以下方法将本地视频播放到Crome演员

func playToRemotePlayer(with asset:PHAsset, forViewController viewController:UIViewController) {

    // if video is paused than resume it
    if _remotMediaClient?.mediaStatus?.playerState == .paused {
        _remotMediaClient?.play()
        return
    }

    // lets keep track of recent assets that is played on Crome cast
    if recentPlayedAsset == nil {
        recentPlayedAsset = asset
    }else{

        if recentPlayedAsset == asset {
            self._remotMediaClient?.loadMedia(self.recentMediaInfo!, autoplay: true)
            return

        }else{
            recentPlayedAsset = asset
        }

    }

    let options = PHVideoRequestOptions()
    options.isNetworkAccessAllowed = true
    options.deliveryMode = .highQualityFormat
    options.version = .original

    // create a meta data
    let metadata = GCKMediaMetadata(metadataType: .movie)
    metadata.setString("you video title", forKey: kGCKMetadataKeyTitle)
    metadata.setString("you video subtitle", forKey: kGCKMetadataKeySubtitle)

    PHImageManager.default().requestAVAsset(forVideo: asset, options: options, resultHandler: { (avAsset, audioMix, info) in

        if (avAsset as? AVURLAsset) != nil {

            let startDate = NSDate()

            //Create Export session
            let exportSession = AVAssetExportSession(asset: avAsset!, presetName: AVAssetExportPresetHighestQuality)

            let filePathURL = documentDirectoryUrl.appendingPathComponent("rendered_video.mp4")
            let filePath = NSURL(string: (filePathURL?.absoluteString)!)
            CommanUtilites.deleteFile(filePath: filePath!)

            exportSession!.outputURL = filePath as URL?
            exportSession!.outputFileType = AVFileTypeMPEG4
            exportSession!.shouldOptimizeForNetworkUse = true
            let start = CMTimeMakeWithSeconds(0.0, 0)
            let range = CMTimeRangeMake(start, (avAsset?.duration)!)
            exportSession?.timeRange = range

            print("Exporting Media...")

            DispatchQueue.main.async {
                self.progressHUD = MBProgressHUD.showAdded(to: viewController.view, animated: true)
                self.progressHUD?.mode = MBProgressHUDMode.indeterminate
                self.progressHUD?.label.text = "Exporting video please wait..."
            }

            exportSession!.exportAsynchronously(completionHandler: {() -> Void in

                DispatchQueue.main.async {
                    self.progressHUD?.hide(animated: true)
                }

                switch exportSession!.status {

                case .failed:
                    print("Error : " + (exportSession?.error?.localizedDescription)!)
                case .cancelled:
                    print("Export canceled")
                case .completed:
                    //Video conversion finished
                    let endDate = NSDate()

                    let time = endDate.timeIntervalSince(startDate as Date)
                    print(time)
                    print("Export Successful!")
                    print(exportSession?.outputURL?.path ?? "")

                    let port = String(HttpServerManager.shared.getListeningPort())

                    let videoHttpUrl = "http://127.0.0.1:" + port + "/rendered_video.mp4"

                    // create media information
                    self.recentMediaInfo = GCKMediaInformation(contentID: videoHttpUrl,
                                                               streamType: .buffered,
                                                               contentType: "video/mp4",
                                                               metadata: nil,
                                                               streamDuration: (avAsset?.duration.seconds)!,
                                                               customData: nil)

                    self._remotMediaClient?.loadMedia(self.recentMediaInfo!, autoplay: true)


                default:
                    break
                }

            })
        }
    })

}