I am actually working with the AVFoundation framework. I am trying to play two videos simultaneously, one with filter and other the normal video.
I have done the whole task. I'm just stuck on setting the orientation at the end while saving entire video in gallery. I have already tried a lot but could not get any thing fruitful. Please help me in setting the orientations.
//Save action
@IBAction func saveAction(_ sender: UIButton) {
HelperClass.shared().applyFilter(self.firstAsset, andSecondAsset:self.secondAsset, onviewController:self, andcompos:self.composition, completion: { (value,error,url) in
if(url != nil){
print("url",url!)
}
})
MBProgressHUD.showAdded(to:self.view, animated: true)
}
#pragma mark - Overlay task is happening here,filtered video that I am getting from applyfilter method is overlayed with foregroundvideo
-(void)saveVideosToPhotoAlbum:(AVURLAsset*)filteredasset andOriginalAsset:(AVURLAsset*)origAsset onViewController:(UIViewController*)vc{
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//----first track---//
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, origAsset.duration) ofTrack:[[origAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
//----second track---//
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, filteredasset.duration) ofTrack:[[filteredasset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, origAsset.duration);
//----FirstlayerInstruction---//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
CGAffineTransform Scale = CGAffineTransformMakeScale(0.8f,1.0f);
CGAffineTransform Move = CGAffineTransformMakeTranslation(40,0);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];
//----FirstlayerInstruction---//
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
//
CGAffineTransform SecondScale = CGAffineTransformMakeScale(1.5f,1.5f);
CGAffineTransform SecondMove = CGAffineTransformMakeTranslation(0,0);;
[SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondScale,SecondMove) atTime:kCMTimeZero];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,SecondlayerInstruction,nil];;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 50);
MainCompositionInst.renderSize = CGSizeMake(firstTrack.naturalSize.width, firstTrack.naturalSize.height);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:@"overlapVideotest.mov"];
if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
{
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
}
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset1280x720];
exporter.outputURL=url;
[exporter setVideoComposition:MainCompositionInst];
exporter.outputFileType = AVFileTypeQuickTimeMovie;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter onViewController:vc];
});
}];
}
#pragma mark - Saving the final video to photo gallery here
- (void)exportDidFinish:(AVAssetExportSession*)session onViewController:(UIViewController*)vc {
if(session.status == AVAssetExportSessionStatusCompleted) {
NSURL *outputURL = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:outputURL
completionBlock:^(NSURL *assetURL, NSError *error) {
dispatch_async(dispatch_get_main_queue(), ^{
[MBProgressHUD hideHUDForView:vc.view animated:true];
if (error) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"Error" message:@"Video Saving Failed" delegate:nil cancelButtonTitle:@"Ok" otherButtonTitles: nil, nil];
[alert show];
} else {
UIAlertView *alert = [[UIAlertView alloc]
initWithTitle:@"Video Saved" message:@"Saved To Photo Album" delegate:self cancelButtonTitle:@"Ok" otherButtonTitles: nil];
[alert show];
// [self loadMoviePlayer:outputURL];
}
});
}];
}
}else{
[MBProgressHUD hideHUDForView:vc.view animated:true];
NSLog(@"found an issue %@",session.error);
}
}
You will have to play with the transformation of input video track. First, you have to get input videotrack orientation and find relative transformation according to it for your AVMutableCompositionLayerInstruction. Here is the proper way to do it. I mimic your saveVideo method with my own code base. Also i changed render size to 720 x 720 to make it look better for all different resolution videos.
Take orientation and zoomOrientation object in .h file to use it globally in .m file. Like AVCaptureVideoOrientation zoomOrientation, AVCaptureVideoOrientation orientation.
-(void)saveVideosToPhotoAlbum:(AVURLAsset*)filteredasset andOriginalAsset:(AVURLAsset*)origAsset onViewController:(UIViewController*)vc{
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//----first track---//
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, origAsset.duration) ofTrack:[[origAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
//----second track---//
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, filteredasset.duration) ofTrack:[[filteredasset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, origAsset.duration);
//----FirstlayerInstruction---//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
orientation = [self videoOrientation:origAsset];
zoomOrientation = [self videoOrientation:filteredasset];
BOOL isPortrait = NO;
switch (orientation) {
case AVCaptureVideoOrientationLandscapeRight:
isPortrait = NO;
break;
case AVCaptureVideoOrientationLandscapeLeft:
isPortrait = NO;
break;
case AVCaptureVideoOrientationPortrait:
isPortrait = YES;
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
isPortrait = YES;
break;
default:
break;
}
CGSize naturalSize = firstTrack.naturalSize;
if(isPortrait){
naturalSize = CGSizeMake(naturalSize.height,naturalSize.width);
}
BOOL isZoomPortrait = NO;
switch (zoomOrientation) {
case AVCaptureVideoOrientationLandscapeRight:
isZoomPortrait = NO;
break;
case AVCaptureVideoOrientationLandscapeLeft:
isZoomPortrait = NO;
break;
case AVCaptureVideoOrientationPortrait:
isZoomPortrait = YES;
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
isZoomPortrait = YES;
break;
default:
break;
}
CGSize zoomNaturalSize = secondTrack.naturalSize;
if(isZoomPortrait){
zoomNaturalSize = CGSizeMake(zoomNaturalSize.height,zoomNaturalSize.width);
}
CGFloat aspectWidth = 720/naturalSize.width;
CGFloat aspectheight = 720/naturalSize.height;
CGFloat zoomAspectWidth = 720/zoomNaturalSize.width;
CGFloat zoomAspectheight = 720/zoomNaturalSize.height;
CGFloat scale = MIN(aspectWidth, aspectheight);
CGFloat zoomScale = MAX(zoomAspectWidth, zoomAspectheight);
CGAffineTransform transform = [self transformFromOrientationWithVideoSizeWithAspect:naturalSize scale1:scale];
CGAffineTransform zoomTransform = [self zoomTransformFromOrientationWithVideoSizeWithAspect:zoomNaturalSize scale1:zoomScale];
[FirstlayerInstruction setTransform:transform atTime:kCMTimeZero];
//----FirstlayerInstruction---//
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
[SecondlayerInstruction setTransform:zoomTransform atTime:kCMTimeZero];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,SecondlayerInstruction,nil];;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(720, 720);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:@"overlapVideotest.mov"];
if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
{
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
}
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
[exporter setVideoComposition:MainCompositionInst];
exporter.outputFileType = AVFileTypeQuickTimeMovie;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter onViewController:vc];
});
}];
}
- (CGAffineTransform)transformFromOrientationWithVideoSizeWithAspect:(CGSize)naturalSize scale1:(float)scale
{
CGAffineTransform transform = CGAffineTransformIdentity;
transform = CGAffineTransformScale(transform,scale,scale);
CGSize size = CGSizeMake(720.0, 720.0);
switch (orientation) {
case AVCaptureVideoOrientationLandscapeRight:
transform = CGAffineTransformTranslate(transform, (size.width/scale) / 2.0 - naturalSize.width/2.0, (size.height/scale) / 2.0 - naturalSize.height/2.0);
break;
case AVCaptureVideoOrientationLandscapeLeft:
transform = CGAffineTransformTranslate(transform,
naturalSize.width,
naturalSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
if (naturalSize.width > naturalSize.height){
transform = CGAffineTransformTranslate(transform, 0, -((size.height/scale) / 2.0 - naturalSize.height/2.0));
}else{
//transform = transform.translatedBy(x: -((size.width/scale) / 2.0 - naturalSize.width/2.0), y: 0)
transform = CGAffineTransformTranslate(transform, -((size.width/scale) / 2.0 - naturalSize.width/2.0), 0);
}
break;
case AVCaptureVideoOrientationPortrait:
transform = CGAffineTransformTranslate(transform, naturalSize.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
//transform = transform.translatedBy(x: 0, y: -((size.width/scale) / 2.0 - naturalSize.width/2.0))
transform = CGAffineTransformTranslate(transform, 0, -((size.width/scale) / 2.0 - naturalSize.width/2.0));
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
transform = CGAffineTransformTranslate(transform, 0, naturalSize.height);
// transform = CGAffineTransformTranslate(transform, 0, (naturalSize.width - naturalSize.height)/2);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
default:
break;
}
return transform;
}
- (CGAffineTransform)zoomTransformFromOrientationWithVideoSizeWithAspect:(CGSize)naturalSize scale1:(float)scale
{
CGAffineTransform transform = CGAffineTransformIdentity;
transform = CGAffineTransformScale(transform,scale,scale);
CGSize size = CGSizeMake(720.0, 720.0);
switch (zoomOrientation) {
case AVCaptureVideoOrientationLandscapeRight:
transform = CGAffineTransformTranslate(transform, (size.width/scale) / 2.0 - naturalSize.width/2.0, (size.height/scale) / 2.0 - naturalSize.height/2.0);
break;
case AVCaptureVideoOrientationLandscapeLeft:
transform = CGAffineTransformTranslate(transform,
naturalSize.width,
naturalSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
if (naturalSize.width > naturalSize.height){
transform = CGAffineTransformTranslate(transform, 0, -((size.height/scale) / 2.0 - naturalSize.height/2.0));
}else{
//transform = transform.translatedBy(x: -((size.width/scale) / 2.0 - naturalSize.width/2.0), y: 0)
transform = CGAffineTransformTranslate(transform, -((size.width/scale) / 2.0 - naturalSize.width/2.0), 0);
}
break;
case AVCaptureVideoOrientationPortrait:
transform = CGAffineTransformTranslate(transform, naturalSize.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
//transform = transform.translatedBy(x: 0, y: -((size.width/scale) / 2.0 - naturalSize.width/2.0))
transform = CGAffineTransformTranslate(transform, 0, -((size.width/scale) / 2.0 - naturalSize.width/2.0));
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
transform = CGAffineTransformTranslate(transform, 0, naturalSize.height);
// transform = CGAffineTransformTranslate(transform, 0, (naturalSize.width - naturalSize.height)/2);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
default:
break;
}
return transform;
}
-(AVCaptureVideoOrientation)videoOrientation:(AVAsset *)asset
{
AVCaptureVideoOrientation result = 0;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
{
result = AVCaptureVideoOrientationPortrait;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
result = AVCaptureVideoOrientationPortraitUpsideDown;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
{
result = AVCaptureVideoOrientationLandscapeRight;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
{
result = AVCaptureVideoOrientationLandscapeLeft;
}
}
return result;
}