Search code examples
ioschromecastgoogle-castphotokit

iOS - Cast video to Chromecast using PhotoKit


I want to cast device local video to Chromecast using PhotoKit framework but only loading screen is displayed on Chromecast and no video is played. If replace avUrlAsset.url.absoluteString with http_url_of_video than it play the video successfully.

Code

    let options = PHVideoRequestOptions()
    options.isNetworkAccessAllowed = true
    options.deliveryMode = .automatic

    // create a meta data
    let metadata = GCKMediaMetadata(metadataType: .movie)
    metadata.setString("Title", forKey: kGCKMetadataKeyTitle)
    metadata.setString("Subtitle", forKey: kGCKMetadataKeySubtitle)

    PHImageManager.default().requestAVAsset(forVideo: asset, options: options, resultHandler: { (avAsset, audioMix, info) in

        if let avUrlAsset = avAsset as? AVURLAsset {

            // create media information
            let mediaInfo = GCKMediaInformation(contentID: avUrlAsset.url.absoluteString,
                                                streamType: .buffered,
                                                contentType: "video/quicktime",
                                                metadata: metadata,
                                                streamDuration: 0,
                                                customData: nil)

            self._remotMediaClient?.loadMedia(mediaInfo, autoplay: true)

        }
    })

Please suggest me how can I play local video to cromecast. I also try to copy the video to document directory and pass url of copied video to cromecast but not working.


Solution

  • I solve using local http server

    HttpServerManager.swift

    import UIKit
    
    class HttpServerManager: NSObject {
    
    static let shared = HttpServerManager()
    
    private var httpServer:HTTPServer!
    
    override init() {
        super.init()
    
        // Create server using our custom MyHTTPServer class
        httpServer = HTTPServer()
    
        // Tell the server to broadcast its presence via Bonjour.
        // This allows browsers such as Safari to automatically discover our service.
        httpServer.setType("_http._tcp.")
    
        // Normally there's no need to run our server on any specific port.
        // Technologies like Bonjour allow clients to dynamically discover the server's port at runtime.
        // However, for easy testing you may want force a certain port so you can just hit the refresh button.
        // [httpServer setPort:12345];
    
    
        let documentsDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first
        httpServer.setDocumentRoot(documentsDirectory)
    }
    
    func startServer() {
    
        // Start the server (and check for problems)
        do{
            try httpServer?.start()
            DDLogWrapper.logInfo("Started HTTP Server on port \(httpServer?.listeningPort())")
    
        }catch {
            DDLogWrapper.logError("Error starting HTTP Server: \(error) ")
        }
    }
    
    func stopServer() {
        httpServer.stop()
    }
    
    func getListeningPort() -> UInt16 {
        return httpServer.listeningPort()
    }
    
    
    func setDocumentRoot(path stirng:String) {
        httpServer.setDocumentRoot(stirng)
    }
    
    }
    

    start server in AppDelege.swift

    class AppDelegate: UIResponder, UIApplicationDelegate, GCKLoggerDelegate {
    
    var window: UIWindow?
    var httpServer:HTTPServer?
    
    func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool {
        // Override point for customization after application launch.
    
        GCKLogger.sharedInstance().delegate = self
    
        // Configure our logging framework.
        // To keep things simple and fast, we're just going to log to the Xcode console.
        LoggerFactory.initLogging()
    
        // start local http server
        HttpServerManager.shared.startServer()
    
        return true
    }
    }
    

    Play local video to Crome cast using below method

    func playToRemotePlayer(with asset:PHAsset, forViewController viewController:UIViewController) {
    
        // if video is paused than resume it
        if _remotMediaClient?.mediaStatus?.playerState == .paused {
            _remotMediaClient?.play()
            return
        }
    
        // lets keep track of recent assets that is played on Crome cast
        if recentPlayedAsset == nil {
            recentPlayedAsset = asset
        }else{
    
            if recentPlayedAsset == asset {
                self._remotMediaClient?.loadMedia(self.recentMediaInfo!, autoplay: true)
                return
    
            }else{
                recentPlayedAsset = asset
            }
    
        }
    
        let options = PHVideoRequestOptions()
        options.isNetworkAccessAllowed = true
        options.deliveryMode = .highQualityFormat
        options.version = .original
    
        // create a meta data
        let metadata = GCKMediaMetadata(metadataType: .movie)
        metadata.setString("you video title", forKey: kGCKMetadataKeyTitle)
        metadata.setString("you video subtitle", forKey: kGCKMetadataKeySubtitle)
    
        PHImageManager.default().requestAVAsset(forVideo: asset, options: options, resultHandler: { (avAsset, audioMix, info) in
    
            if (avAsset as? AVURLAsset) != nil {
    
                let startDate = NSDate()
    
                //Create Export session
                let exportSession = AVAssetExportSession(asset: avAsset!, presetName: AVAssetExportPresetHighestQuality)
    
                let filePathURL = documentDirectoryUrl.appendingPathComponent("rendered_video.mp4")
                let filePath = NSURL(string: (filePathURL?.absoluteString)!)
                CommanUtilites.deleteFile(filePath: filePath!)
    
                exportSession!.outputURL = filePath as URL?
                exportSession!.outputFileType = AVFileTypeMPEG4
                exportSession!.shouldOptimizeForNetworkUse = true
                let start = CMTimeMakeWithSeconds(0.0, 0)
                let range = CMTimeRangeMake(start, (avAsset?.duration)!)
                exportSession?.timeRange = range
    
                print("Exporting Media...")
    
                DispatchQueue.main.async {
                    self.progressHUD = MBProgressHUD.showAdded(to: viewController.view, animated: true)
                    self.progressHUD?.mode = MBProgressHUDMode.indeterminate
                    self.progressHUD?.label.text = "Exporting video please wait..."
                }
    
                exportSession!.exportAsynchronously(completionHandler: {() -> Void in
    
                    DispatchQueue.main.async {
                        self.progressHUD?.hide(animated: true)
                    }
    
                    switch exportSession!.status {
    
                    case .failed:
                        print("Error : " + (exportSession?.error?.localizedDescription)!)
                    case .cancelled:
                        print("Export canceled")
                    case .completed:
                        //Video conversion finished
                        let endDate = NSDate()
    
                        let time = endDate.timeIntervalSince(startDate as Date)
                        print(time)
                        print("Export Successful!")
                        print(exportSession?.outputURL?.path ?? "")
    
                        let port = String(HttpServerManager.shared.getListeningPort())
    
                        let videoHttpUrl = "http://127.0.0.1:" + port + "/rendered_video.mp4"
    
                        // create media information
                        self.recentMediaInfo = GCKMediaInformation(contentID: videoHttpUrl,
                                                                   streamType: .buffered,
                                                                   contentType: "video/mp4",
                                                                   metadata: nil,
                                                                   streamDuration: (avAsset?.duration.seconds)!,
                                                                   customData: nil)
    
                        self._remotMediaClient?.loadMedia(self.recentMediaInfo!, autoplay: true)
    
    
                    default:
                        break
                    }
    
                })
            }
        })
    
    }