Search code examples
iosswiftvideovideo-editing

Composing videos into a single movie with transition


I have several h.264 quicktime videos, and I would like to concat them into a single video with transitions(fade in/out). The following code should accept an array of AVAsset and insert them into a AVMutableVideoComposition. But I keep getting -11841 from AVAssetExportSession! Please help me to find the problem.

index = Int(0)
let mutableComposition = AVMutableComposition()
var currentTime = kCMTimeZero
// Transition relate
let timeOffsetBetweenVideos = CMTimeMakeWithSeconds(1, 30)
let videoComposition = AVMutableVideoComposition()
var videoCompInstructions: [AVMutableVideoCompositionInstruction] = []
let compositionAudioTrack = mutableComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
var lastCompositionVideoTrack: AVMutableCompositionTrack? = nil

for asset in movieAssets as! [AVAsset] {
    // Add video track into composition
    let compositionVideoTrack = mutableComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
    let videoTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0]
    try compositionVideoTrack.insertTimeRange(videoTrack.timeRange, ofTrack: videoTrack, atTime: currentTime)
    compositionVideoTrack.scaleTimeRange(videoTrack.timeRange, toDuration: CMTimeAdd(asset.duration, timeOffsetBetweenVideos))

    // Add audio track into composition
    let audioTrack = asset.tracksWithMediaType(AVMediaTypeAudio)[0]
    try compositionAudioTrack.insertTimeRange(audioTrack.timeRange, ofTrack: audioTrack, atTime: currentTime)

    if index > 0 {
        let fromLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: lastCompositionVideoTrack!)
        let transitionTimeRange = CMTimeRangeMake(currentTime, timeOffsetBetweenVideos)
        fromLayerInstruction.setOpacityRampFromStartOpacity(1.0, toEndOpacity: 0.5, timeRange: transitionTimeRange)

        let toLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack)
        toLayerInstruction.setOpacityRampFromStartOpacity(0.5, toEndOpacity: 1.0, timeRange: transitionTimeRange)

        let videoCompositionInstruction = AVMutableVideoCompositionInstruction()
        videoCompositionInstruction.timeRange = transitionTimeRange
        videoCompositionInstruction.layerInstructions = [fromLayerInstruction, toLayerInstruction]

        videoCompInstructions.append(videoCompositionInstruction)
    }

    lastCompositionVideoTrack = compositionVideoTrack
    lastVideoTrack = videoTrack

    currentTime = CMTimeAdd(currentTime, asset.duration)
    ++index
}

let exportSesstion = AVAssetExportSession(asset: mutableComposition, presetName: AVAssetExportPresetHighestQuality)
exportSesstion?.outputFileType = AVFileTypeQuickTimeMovie
exportSesstion?.outputURL = retFileUrl
if movieAssets.count > 1 {
    videoComposition.instructions = videoCompInstructions
    videoComposition.renderSize = mutableComposition.naturalSize
    print("frame size: \(mutableComposition.naturalSize)")
    videoComposition.renderScale = 1.0
    videoComposition.frameDuration = CMTimeMake(1, 30)
    exportSesstion?.videoComposition = videoComposition
}
exportSesstion?.exportAsynchronouslyWithCompletionHandler({ () -> Void in
    if exportSesstion?.status == AVAssetExportSessionStatus.Completed {
        print("Video file exported: \(retFileUrl)")
    } else {
        print(exportSesstion!.error!)
        print("Failed exporting video: \(exportSesstion?.error?.localizedDescription)")
        print(exportSesstion?.error?.localizedFailureReason)
    }
})

Solution

  • Finally solved the problem! It appears that I have to include all the AVMutableVideoCompositionInstruction into a single array since all of the AVMutableVideoCompositionLayerInstruction are overlapped within another.

    I will post the working code bellow in case someone having same question.

    index = Int(0)
    let mutableComposition = AVMutableComposition()
    // Transition relate
    let timeOffsetBetweenVideos = CMTimeMakeWithSeconds(0.3, 30)
    let videoCompositionInstruction = AVMutableVideoCompositionInstruction()
    let compositionAudioTrack = mutableComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
    var lastVideoEndTime = kCMTimeZero
    
    for asset in movieAssets as! [AVAsset] {
        // Add video track into composition
        let videoStartTime = CMTimeCompare(lastVideoEndTime, kCMTimeZero) == 0 ? kCMTimeZero : CMTimeSubtract(lastVideoEndTime, timeOffsetBetweenVideos)
        let compositionVideoTrack = mutableComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
        let videoTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0]
        try compositionVideoTrack.insertTimeRange(videoTrack.timeRange, ofTrack: videoTrack, atTime: videoStartTime)
        if index == (movieAssets.count - 1) {
            compositionVideoTrack.scaleTimeRange(videoTrack.timeRange, toDuration: CMTimeAdd(asset.duration, timeOffsetBetweenVideos))
        }
    
        // Add audio track into composition
        let audioTrack = asset.tracksWithMediaType(AVMediaTypeAudio)[0]
        try compositionAudioTrack.insertTimeRange(audioTrack.timeRange, ofTrack: audioTrack, atTime: videoStartTime)
    
        if movieAssets.count == 1 {
            break
        }
        if index == 0 {
            // First movie has ending animation only
            let transitionTimeRange = CMTimeRangeMake(CMTimeSubtract(compositionVideoTrack.timeRange.end, timeOffsetBetweenVideos), timeOffsetBetweenVideos)
            let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack)
            let transform = CGAffineTransformTranslate(videoTrack.preferredTransform, movieFrameSize.width / -1.0, 0)
            layerInstruction.setTransformRampFromStartTransform(videoTrack.preferredTransform, toEndTransform: transform, timeRange: transitionTimeRange)
            layerInstruction.setOpacity(0.0, atTime: compositionVideoTrack.timeRange.end)
    
            videoCompositionInstruction.layerInstructions.append(layerInstruction)
        } else if index == (movieAssets.count - 1) {
            // Last movie has begining animation only
            let transitionTimeRange = CMTimeRangeMake(lastVideoEndTime, timeOffsetBetweenVideos)
            let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack)
            var transform = CGAffineTransformScale(videoTrack.preferredTransform, 0.5, 0.5)
            transform = CGAffineTransformTranslate(transform, movieFrameSize.width / 2, movieFrameSize.height / 2)
            layerInstruction.setTransformRampFromStartTransform(transform, toEndTransform: videoTrack.preferredTransform, timeRange: transitionTimeRange)
    
            videoCompositionInstruction.layerInstructions.append(layerInstruction)
        } else {
            // Other movies has both begining/ending animation
            let transitionTimeRangeBegin = CMTimeRangeMake(lastVideoEndTime, timeOffsetBetweenVideos)
            let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack)
            var transformBegin = CGAffineTransformScale(videoTrack.preferredTransform, 0.5, 0.5)
            transformBegin = CGAffineTransformTranslate(transformBegin, movieFrameSize.width / 2, movieFrameSize.height / 2)
            layerInstruction.setTransformRampFromStartTransform(transformBegin, toEndTransform: videoTrack.preferredTransform, timeRange: transitionTimeRangeBegin)
    
            let transitionTimeRangeEnd = CMTimeRangeMake(CMTimeSubtract(compositionVideoTrack.timeRange.end, timeOffsetBetweenVideos), timeOffsetBetweenVideos)
            let transform = CGAffineTransformTranslate(videoTrack.preferredTransform, movieFrameSize.width / -1.0, 0)
            layerInstruction.setTransformRampFromStartTransform(videoTrack.preferredTransform, toEndTransform: transform, timeRange: transitionTimeRangeEnd)
            layerInstruction.setOpacity(0.0, atTime: compositionVideoTrack.timeRange.end)
    
            videoCompositionInstruction.layerInstructions.append(layerInstruction)
        }
    
        lastVideoEndTime = CMTimeSubtract(compositionVideoTrack.timeRange.end, timeOffsetBetweenVideos)
        ++index
    }
    
    let exportSesstion = AVAssetExportSession(asset: mutableComposition, presetName: AVAssetExportPresetHighestQuality)
    exportSesstion?.outputFileType = AVFileTypeQuickTimeMovie
    exportSesstion?.outputURL = retFileUrl
    if movieAssets.count > 1 {
        videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, mutableComposition.duration)
        videoCompositionInstruction.enablePostProcessing = false
    
        let videoComposition = AVMutableVideoComposition(propertiesOfAsset: mutableComposition)
        videoComposition.instructions = [videoCompositionInstruction]
        videoComposition.renderSize = mutableComposition.naturalSize
        videoComposition.renderScale = 1.0
        videoComposition.frameDuration = CMTimeMake(1, 30)
        exportSesstion?.videoComposition = videoComposition
    }
    exportSesstion?.exportAsynchronouslyWithCompletionHandler({ () -> Void in
        if exportSesstion?.status == AVAssetExportSessionStatus.Completed {
            print("Video file exported: \(retFileUrl)")
        } else {
            print(exportSesstion!.error!)
            print("Failed exporting video: \(exportSesstion?.error?.localizedDescription)")
            print(exportSesstion?.error?.localizedFailureReason)
        }
    })