I am pulling my hair out over this.
I try to concatenate videos and it just won't do what I want it to do. Specifically, I have videos that have a different orientation and I try to set it right with a Layer instruction. Alas, not matter what I try, it has no effect...
I read every tutorial, tried to implement the APLCompositionDebugView provided by Apple (which basically looks ok), to no avail... I'm ready to throw everything overboard...
Here's my code:
self.videoComposition = [AVMutableVideoComposition videoComposition];
self.videoComposition.renderSize = CGSizeMake(480, 320);
self.videoComposition.frameDuration = CMTimeMake(1, 30);
NSMutableArray *videoCompositionInstructions = [[NSMutableArray alloc] init];
AVMutableComposition *theMutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [theMutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [theMutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
CMTime titleDuration = CMTimeMakeWithSeconds(5, 600);
CMTimeRange titleRange = CMTimeRangeMake(kCMTimeZero, titleDuration);
[compositionVideoTrack insertEmptyTimeRange:titleRange];
[compositionAudioTrack insertEmptyTimeRange:titleRange];
CMTime insertPoint = [[transitionTimes lastObject] CMTimeValue];
CMTime totalTime = CMTimeMakeWithSeconds(5, 600);
for(NSDictionary *clip in collection[@"clips"]){
NSString *movieName = [NSString stringWithFormat:@"collection_%li/recording_%li_%li.MOV", (long)editCollection, editCollection, [clip[@"clipID"] longValue]];
NSURL *assetUrl = [NSURL fileURLWithPath:[usefulStuff pathForFile: movieName]];
AVURLAsset *videoAsset = [AVURLAsset URLAssetWithURL:assetUrl options:nil];
totalTime = CMTimeAdd(totalTime, videoAsset.duration);
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
NSError *error;
[compositionVideoTrack insertTimeRange:clipVideoTrack.timeRange ofTrack:clipVideoTrack atTime:insertPoint error:&error]; // Add video
[compositionAudioTrack insertTimeRange:clipVideoTrack.timeRange ofTrack:clipAudioTrack atTime:insertPoint error:&error]; // Add audio
[passThroughLayer setTransform:clipVideoTrack.preferredTransform atTime:insertPoint]; ////// This should supposedly set the video in the right orientation at the given time...
insertPoint = CMTimeAdd(insertPoint, videoAsset.duration);
}
AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
passThroughInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, totalTime);
NSLog(@"Total time b %f", CMTimeGetSeconds(totalTime));
passThroughInstruction.layerInstructions = [NSArray arrayWithObject:passThroughLayer];
[videoCompositionInstructions addObject:passThroughInstruction];
self.videoComposition.instructions = videoCompositionInstructions;
HEEEEEEELP! :)
I don't see anything in your posted code that deals with your orientation problem. You need to do some transform in layerInstruction
to make all the orientation same.
Try something like this:
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.renderScale = 1.0;
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
// Get only paths the user selected
NSMutableArray *array = [NSMutableArray array];
for(NSString* string in videoPathArray){
if(![string isEqualToString:@""]){
[array addObject:string];
}
}
self.videoPathArray = array;
float time = 0;
for (int i = 0; i<self.videoPathArray.count; i++) {
AVURLAsset *sourceAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[videoPathArray objectAtIndex:i]] options:[NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey]];
NSError *error = nil;
BOOL ok = NO;
AVAssetTrack *sourceVideoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize temp = CGSizeApplyAffineTransform(sourceVideoTrack.naturalSize, sourceVideoTrack.preferredTransform);
CGSize size = CGSizeMake(fabsf(temp.width), fabsf(temp.height));
CGAffineTransform transform = sourceVideoTrack.preferredTransform;
videoComposition.renderSize = sourceVideoTrack.naturalSize;
if (size.width > size.height) {
[layerInstruction setTransform:transform atTime:CMTimeMakeWithSeconds(time, 30)];
} else {
float s = size.width/size.height;
CGAffineTransform new = CGAffineTransformConcat(transform, CGAffineTransformMakeScale(s,s));
float x = (size.height - size.width*s)/2;
CGAffineTransform newer = CGAffineTransformConcat(new, CGAffineTransformMakeTranslation(x, 0));
[layerInstruction setTransform:newer atTime:CMTimeMakeWithSeconds(time, 30)];
}
ok = [compositionVideoTrack insertTimeRange:sourceVideoTrack.timeRange ofTrack:sourceVideoTrack atTime:[composition duration] error:&error];
if (!ok) {
// Deal with the error.
NSLog(@"something went wrong");
}
NSLog(@"\n source asset duration is %f \n source vid track timerange is %f %f \n composition duration is %f \n composition vid track time range is %f %f",CMTimeGetSeconds([sourceAsset duration]), CMTimeGetSeconds(sourceVideoTrack.timeRange.start),CMTimeGetSeconds(sourceVideoTrack.timeRange.duration),CMTimeGetSeconds([composition duration]), CMTimeGetSeconds(compositionVideoTrack.timeRange.start),CMTimeGetSeconds(compositionVideoTrack.timeRange.duration));
time += CMTimeGetSeconds(sourceVideoTrack.timeRange.duration);
}
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
instruction.timeRange = compositionVideoTrack.timeRange;
videoComposition.instructions = [NSArray arrayWithObject:instruction];
I have taken the code from here because it works for me every time. All you have to do is to make the resulting transform according to your requirement.