I know this question has been asked a few times in past and I have read responses to those. But nothing seems to work the way I want. There are multiple videos and all are added in queue of AVQueuePlayer
.
I have tried adding in two ways as mentioned in other pages:
AVPlayerItem *item1 = [AVPlayerItem playerItemWithURL:url1];
AVPlayerItem *item2 = [AVPlayerItem playerItemWithURL:url2];
NSArray *playerItems = [[NSArray alloc] initWithObjects:item1, item2, nil];
avPlayer = [[AVQueuePlayer alloc] initWithItems:playerItems];
And this way:
avPlayer = [[AVQueuePlayer alloc] init];
AVURLAsset *asset1 = [[AVURLAsset alloc] initWithURL:url1 options:nil];
NSArray *keys = [NSArray arrayWithObject:@"playable"];
[asset loadValuesAsynchronouslyForKeys:keys completionHandler:^()
{
dispatch_async(dispatch_get_main_queue(), ^
{
AVPlayerItem *playerItem = [[AVPlayerItem alloc] initWithAsset:asset1];
[avPlayer insertItem:playerItem afterItem:nil];
});
}];
AVURLAsset *asset2 = [[AVURLAsset alloc] initWithURL:url2 options:nil];
[asset loadValuesAsynchronouslyForKeys:keys completionHandler:^()
{
dispatch_async(dispatch_get_main_queue(), ^
{
AVPlayerItem *playerItem = [[AVPlayerItem alloc] initWithAsset:asset2];
[avPlayer insertItem:playerItem afterItem:nil];
});
}];
But none of this is able to remove that black screen while advancing to next item. There is gap of around 1 second before next item starts playing. How can I remove this gap?
UPDATE: I also tried with AVMutableComposition
. The gaps are reduced significantly but still are noticeable. Is there ANY way to remove these gaps completely?
AVMutableComposition
code:
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
NSMutableArray *arrayInstruction = [[NSMutableArray alloc] init];
AVMutableVideoCompositionInstruction * MainInstruction =
[AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableCompositionTrack *audioTrack;
audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime duration = kCMTimeZero;
for(int i = 0; i <= 5; i++)
{
AVAsset *currentAsset;
currentAsset = [self currentAsset:i]; // i take the for loop for getting the asset
AVMutableCompositionTrack *currentTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
AVMutableVideoCompositionLayerInstruction *currentAssetLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:currentTrack];
AVAssetTrack *currentAssetTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
ALAssetOrientation currentAssetOrientation = ALAssetOrientationUp;
BOOL isCurrentAssetPortrait = YES;
CGAffineTransform currentTransform = currentAssetTrack.preferredTransform;
if(currentTransform.a == 0 && currentTransform.b == 1.0 && currentTransform.c == -1.0 && currentTransform.d == 0) {currentAssetOrientation= ALAssetOrientationRight; isCurrentAssetPortrait = YES;}
if(currentTransform.a == 0 && currentTransform.b == -1.0 && currentTransform.c == 1.0 && currentTransform.d == 0) {currentAssetOrientation = ALAssetOrientationLeft; isCurrentAssetPortrait = YES;}
if(currentTransform.a == 1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == 1.0) {currentAssetOrientation = ALAssetOrientationUp;}
if(currentTransform.a == -1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == -1.0) {currentAssetOrientation = ALAssetOrientationDown;}
CGFloat FirstAssetScaleToFitRatio = 640.0/640.0;
if(isCurrentAssetPortrait){
FirstAssetScaleToFitRatio = 640.0/640.0;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[currentAssetLayerInstruction setTransform:CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:duration];
}else{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[currentAssetLayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:duration];
}
duration=CMTimeAdd(duration, currentAsset.duration);
[arrayInstruction addObject:currentAssetLayerInstruction];
}
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, duration);
MainInstruction.layerInstructions = arrayInstruction;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(640.0, 640.0);
NSString* filename = [NSString stringWithFormat:@"mergedVideo.mp4"];
pathForFile = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
NSFileManager *fileManager = [NSFileManager defaultManager];
BOOL delete = [fileManager removeItemAtPath:pathForFile error:NULL];
NSLog(@"Deletion Succesful???? :: %d",delete);
NSURL *url = [NSURL fileURLWithPath:pathForFile];
NSLog(@"\n\nurl ::::::::::: %@\n\n",url);
NSError *err;
if ([url checkResourceIsReachableAndReturnError:&err] == NO)
NSLog(@"\n\nFINEEEEEEEEEEEEE\n\n");
else
NSLog(@"\n\nERRRRRORRRRRRRRRRRRRR\n\n");
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
switch (exporter.status)
{
case AVAssetExportSessionStatusCompleted:
{
NSURL *outputURL = exporter.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
ALAssetsLibrary* library = [[ALAssetsLibrary alloc]init];
[library writeVideoAtPathToSavedPhotosAlbum:outputURL completionBlock:^(NSURL *assetURL, NSError *error)
{
NSLog(@"ASSET URL %@",assetURL);
if (error)
{
NSLog(@"EROR %@ ", error);
}else{
NSLog(@"VIDEO SAVED ");
}
}];
NSLog(@"Video Merge SuccessFullt");
currentFile ++;
}
}
break;
case AVAssetExportSessionStatusFailed:
NSLog(@"Failed:%@", exporter.error.description);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"Canceled:%@", exporter.error);
break;
case AVAssetExportSessionStatusExporting:
NSLog(@"Exporting!");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(@"Waiting");
break;
default:
break;
}
}];
For Ultravisual we used AVMutableComposition
, and as long as we built up the composition first and then built a player to play it, we were able to get flawless gap-free playback everywhere except when looping.
Can you walk through all the tracks in your AVMutableComposition
and verify there are no gaps? Don't forget the audio tracks. Sometimes audio and video have different timestamps - you may need to add another track to your AVMutableComposition
to get around this.