In my ios app, I am trying to record video of the content of EAGLView(There is no Camera involvement). I have no problem recording the video. After recording I have to add few sound tracks to the video and then have to share this video to Youtube and facebook. My problem is that the video is okay when I play it on iphone or on mac but when I upload this video to youtube (using Youtube Data Api v3), the video is vertically inverted or upside down.
I guess I need to rotate the frame in video before uploading but I don't know how to do that.
Any help will be highly appreciated.
The code I am using to add audio tracks to the video is below:
-(void)prepareVideoForPath:(NSString *)videoPath usingAudio:(NSArray *)audioArray andOutputPath:(NSString *)exportPath{
NSDictionary *optionsDictionary = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
NSURL *videoUrl=[NSURL fileURLWithPath:videoPath];
AVURLAsset* videoAsset = [AVURLAsset URLAssetWithURL:videoUrl options:optionsDictionary];
AVAssetTrack *FirstAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform;
AVMutableComposition* mixComposition = [AVMutableComposition composition];
//VideoTrack
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:FirstAssetTrack atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:firstTransform];
//Audio Track
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime audioStartTime = kCMTimeZero;
for (NSURL *audioURL in audioArray) {
AVURLAsset *audioAsset = [AVURLAsset URLAssetWithURL:audioURL options:optionsDictionary];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:audioStartTime error:nil];
audioStartTime = CMTimeAdd(audioStartTime, audioAsset.duration);
}
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
NSLog(@"file type %@",_assetExport.outputFileType);
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
// your completion code here
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(@"Mixing complete");
});
}
];
}
Ok I found it how to rotate a video.
Here is the code:
-(void)fixVideoOrientationForURL:(NSURL *)videoURL andOutputPath:(NSString *)exportPath{
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:videoURL options:@{ AVURLAssetPreferPreciseDurationAndTimingKey:@YES }];
AVMutableVideoCompositionInstruction *instruction = nil;
AVMutableVideoCompositionLayerInstruction *layerInstruction = nil;
CGAffineTransform transform;
AVAssetTrack *assetVideoTrack = nil;
AVAssetTrack *assetAudioTrack = nil;
// Check if the asset contains video and audio tracks
if ([[asset tracksWithMediaType:AVMediaTypeVideo] count] != 0) {
assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo][0];
}
if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] != 0) {
assetAudioTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
}
CMTime insertionPoint = kCMTimeZero;
NSError *error = nil;
// Step 1
// Create a composition with the given asset and insert audio and video tracks into it from the asset
AVMutableComposition *mutableComposition = [AVMutableComposition composition];
// Insert the video and audio tracks from AVAsset
if (assetVideoTrack != nil) {
AVMutableCompositionTrack *compositionVideoTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetVideoTrack atTime:insertionPoint error:&error];
}
if (assetAudioTrack != nil) {
AVMutableCompositionTrack *compositionAudioTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetAudioTrack atTime:insertionPoint error:&error];
}
// Step 2
// Translate the composition to compensate the movement caused by rotation (since rotation would cause it to move out of frame)
// Rotate transformation
transform = CGAffineTransformMake(1, 0, 0, -1, 0, assetVideoTrack.naturalSize.height);
// Step 3
// Set the appropriate render sizes and rotational transforms
// Create a new video composition
AVMutableVideoComposition *mutableVideoComposition = [AVMutableVideoComposition videoComposition];
mutableVideoComposition.renderSize = CGSizeMake(assetVideoTrack.naturalSize.width,assetVideoTrack.naturalSize.height);
mutableVideoComposition.frameDuration = CMTimeMake(1, 30);
// The rotate transform is set on a layer instruction
instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mutableComposition duration]);
layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:(mutableComposition.tracks)[0]];
[layerInstruction setTransform:t2 atTime:kCMTimeZero];
// Step 4
// Add the transform instructions to the video composition
instruction.layerInstructions = @[layerInstruction];
mutableVideoComposition.instructions = @[instruction];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:[mutableComposition copy] presetName:AVAssetExportPresetHighestQuality];
exportSession.videoComposition = mutableVideoComposition;
exportSession.outputURL = [NSURL fileURLWithPath:exportPath];
exportSession.outputFileType=AVFileTypeQuickTimeMovie;
exportSession.shouldOptimizeForNetworkUse = YES;
[exportSession exportAsynchronouslyWithCompletionHandler:^(void){
dispatch_async(dispatch_get_main_queue(), ^{
switch (exportSession.status) {
case AVAssetExportSessionStatusCompleted:
NSLog(@"writing complete");
break;
case AVAssetExportSessionStatusFailed:
NSLog(@"Failed:%@",exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"Canceled:%@",exportSession.error);
break;
default:
break;
}
});
}];
}