Search code examples
iosavassetexportsession

AVAssetExportSession export video AVFoundationErrorDomain Code=-11841 error


I trying to export video using following code. It works fine for first 3 times and then fails for more than 3 attempts, I am trying to add recorded voices over video, I am pretty new to all these concepts so any help will be appreciated

open func generate(video url: URL, with frame: CGRect? = nil, byApplying transformation: CGAffineTransform? = nil, in previewArea: CGRect? = nil, previewCornerRadius: Float = 0, overlayImage: UIImage? = nil, setOverlayAsBackground: Bool = false, gifLayer: CALayer? = nil, audioUrl: URL? = nil, muteAudio: Bool = false, success: @escaping ((URL) -> Void), failure: @escaping ((Error) -> Void)) {
    let mixComposition: AVMutableComposition = AVMutableComposition()
    var mutableCompositionVideoTrack: AVMutableCompositionTrack? = nil
    var mutableCompositionOriginalAudioTrack: AVMutableCompositionTrack? = nil
    var mutableCompositionAudioTrack: AVMutableCompositionTrack? = nil
    let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
    
    let aVideoAsset: AVAsset = AVAsset(url: url)
    var aAudioAsset: AVAsset? = nil
    
    if let url = audioUrl {
        aAudioAsset = AVAsset(url: url)
    }
    
    if let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) {
        mutableCompositionVideoTrack = videoTrack
        if aAudioAsset != nil, let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
            mutableCompositionAudioTrack = audioTrack
        }
        
        if !muteAudio, aVideoAsset.hasAudio, let originalAudioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
            /// If original audio present
            mutableCompositionOriginalAudioTrack = originalAudioTrack
        }
    }
    
    do {
        var originalVideoSize: CGSize = self.prefferedVideoSize
        let ratio = self.prefferedVideoSize.width / Utility.get9By16ScreenSize().width
        
        if let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: .video).first  {
            originalVideoSize = aVideoAssetTrack.naturalSize
            var transforms = aVideoAssetTrack.preferredTransform
            if aVideoAsset.videoOrientation().orientation == .landscapeLeft || aVideoAsset.videoOrientation().orientation == .landscapeRight {
                let ratio = self.prefferedVideoSize.width / originalVideoSize.width
                let centerY: CGFloat = (self.prefferedVideoSize.height - (originalVideoSize.height * ratio)) / 2
                transforms = transforms.concatenating(CGAffineTransform(translationX: 0, y: centerY).scaledBy(x: ratio, y: ratio))
            }
                            
            try mutableCompositionVideoTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: CMTime.zero)
            
            if !muteAudio, aVideoAsset.hasAudio, let audioAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: .audio).first  {
                try mutableCompositionOriginalAudioTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: audioAssetTrack.timeRange.duration), of: audioAssetTrack, at: CMTime.zero)
            }
            
            if let audioAsset = aAudioAsset, let aAudioAssetTrack: AVAssetTrack = audioAsset.tracks(withMediaType: .audio).first  {
                try mutableCompositionAudioTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aAudioAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: CMTime.zero)
            }
            
            totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration)
            let mixVideoTrack = mixComposition.tracks(withMediaType: AVMediaType.video)[0]
            
            let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: mixVideoTrack)
            layerInstruction.setTransform(transforms, at: CMTime.zero)
            totalVideoCompositionInstruction.layerInstructions = [layerInstruction]
        }
        
        let mutableVideoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
        mutableVideoComposition.frameDuration = CMTimeMake(value: 1, timescale: 12)
        mutableVideoComposition.renderSize =  self.prefferedVideoSize
        mutableVideoComposition.instructions = [totalVideoCompositionInstruction]
        
        let parentLayer = CALayer()
        parentLayer.frame = self.prefferedVideoRect
        parentLayer.isGeometryFlipped = true
        
        let videoLayer = CALayer()
        videoLayer.contentsGravity = .resizeAspect
        videoLayer.contentsScale = 1
        videoLayer.frame = self.prefferedVideoRect
        
        if let frame = frame {
            let scalledFrame = frame.scale(by: ratio)
            videoLayer.frame = scalledFrame
            
            let videoContainerLayer = CALayer()
            parentLayer.frame = self.prefferedVideoRect
            parentLayer.addSublayer(videoContainerLayer)
            videoContainerLayer.addSublayer(videoLayer)
            
            if let transformation = transformation {
                if let previewFrame = previewArea {
                    let maskLayer = CALayer()
                    maskLayer.backgroundColor = UIColor.black.cgColor
                    let scalledMaskFrame = previewFrame.scale(by: ratio)
                    maskLayer.frame = scalledMaskFrame
                    maskLayer.cornerRadius = previewCornerRadius.cgFloat
                    maskLayer.masksToBounds = true
                    videoContainerLayer.mask = maskLayer
                }
                videoLayer.transform = CATransform3DMakeAffineTransform(transformation)
            }
        } else {
            parentLayer.addSublayer(videoLayer)
        }
        
        /// Add overlay if overlay image present
        if let image = overlayImage {
            let imageLayer = CALayer()
            imageLayer.contents = image.cgImage
            imageLayer.frame = self.prefferedVideoRect
            imageLayer.masksToBounds = true
            if setOverlayAsBackground {
                parentLayer.insertSublayer(imageLayer, at: 0)
            } else {
                parentLayer.addSublayer(imageLayer)
            }
        }
        
        /// Add overlay if overlay image present
        if let overlay = gifLayer {
            overlay.frame = CGRect(origin: CGPoint(x: (self.prefferedVideoSize.width - overlay.frame.width) / 2, y: (self.prefferedVideoSize.height - overlay.frame.height) / 2), size: overlay.frame.size)
            
            overlay.transform = CATransform3DMakeAffineTransform(CGAffineTransform(scaleX: ratio, y: ratio))
            parentLayer.addSublayer(overlay)
        }
        
        mutableVideoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)

        let outputURL = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("tmp-rendered-video-R6S9K2B4.m4v")
        self.exportVideo(from: mixComposition, toFile: outputURL, with: mutableVideoComposition,  success: success, failure: failure)
    } catch{
        DCDebug.print(error)
        failure(error)
    }
}

func exportVideo(from composition: AVComposition, toFile output: URL, with videoComposition: AVVideoComposition? = nil, success: @escaping ((URL) -> Void), failure: @escaping ((Error) -> Void)) {
        do {
            if FileManager.default.fileExists(atPath: output.path) {
                try FileManager.default.removeItem(at: output)
            }
            
            if let exportSession = AVAssetExportSession(asset: composition, presetName: self.presetName ?? AVAssetExportPresetHighestQuality) {
                exportSession.outputURL = output
                exportSession.outputFileType = AVFileType.mp4
                exportSession.shouldOptimizeForNetworkUse = true
                if let videoComposition = videoComposition {
                    exportSession.videoComposition = videoComposition
                }
                
                /// try to export the file and handle the status cases
                exportSession.exportAsynchronously(completionHandler: {
                    DispatchQueue.main.async {
                        switch exportSession.status {
                        case .completed:
                            success(output)
                        case .failed:
                            if let _error = exportSession.error {
                                failure(_error)
                            }
                            
                        case .cancelled:
                            if let _error = exportSession.error {
                                failure(_error)
                            }
                        default:
                            success(output)
                        }
                    }
                })
            } else {
                failure(VideoMakerError(error: .kFailedToStartAssetExportSession))
            }
        } catch {
            DCDebug.print(error)
            failure(error)
        }
    }

I am getting following error Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped" UserInfo={NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The video could not be composed.}

following are time range values when export fails

(lldb) po aVideoAsset.tracks(withMediaType: .audio).first?.timeRange.duration
▿ Optional<CMTime>
  ▿ some : CMTime
    - value : 1852
    - timescale : 600
    ▿ flags : CMTimeFlags
      - rawValue : 1
    - epoch : 0

(lldb) po aVideoAsset.tracks(withMediaType: .video).first?.timeRange.duration
▿ Optional<CMTime>
  ▿ some : CMTime
    - value : 1800
    - timescale : 600
    ▿ flags : CMTimeFlags
      - rawValue : 1
    - epoch : 0

Solution

  • I solved this issue by replacing following line

    totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration)
    

    By

    if let originalAudioTrack = mutableCompositionOriginalAudioTrack, originalAudioTrack.timeRange.duration > aVideoAssetTrack.timeRange.duration, !muteAudio, aVideoAsset.hasAudio {
                        totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: originalAudioTrack.timeRange.duration)
                    } else {
                        totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration)
                    }
    

    This solved my issue, but I am not sure if this is correct solution to this problem or just a hack, so if anyone provide me proper explanation to this issue and a valid solution other than this, then bounty is yours.