私は小さなビデオクリップを録画しています(フロントカメラとリアカメラの両方で、向きが異なる可能性がある約1秒程度)。次に、AVAssetExportSessionを使用してそれらをマージしてみてください。私は基本的に、適切な変換とオーディオおよびビデオトラックを使用してコンポジションとビデオコンポジションを作成します。
問題は、iOS 5では4つ以上のビデオクリップがあると失敗し、iOS6では制限が16クリップのように見えることです。
これは私には本当に不可解に思えます。AVAssetExportSessionは奇妙なことをしていますか、それとも渡すことができるクリップの数に文書化されていない制限がありますか?これが私のコードからの抜粋です:
-(void)exportVideo
{
AVMutableComposition *composition = video.composition;
AVMutableVideoComposition *videoComposition = video.videoComposition;
NSString * presetName = AVAssetExportPresetMediumQuality;
AVAssetExportSession *_assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:presetName];
self.exportSession = _assetExport;
videoComposition.renderSize = CGSizeMake(640, 480);
_assetExport.videoComposition = videoComposition;
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent: @"export.mov"];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
// Delete the currently exported files if it exists
if([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:^{
switch (_assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
NSLog(@"Completed exporting!");
break;
case AVAssetExportSessionStatusFailed:
NSLog(@"Failed:%@", _assetExport.error.description);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"Canceled:%@", _assetExport.error);
break;
default:
break;
}
}];
}
そして、これが構成がどのように作られるかです:
-(void)setVideoAndExport
{
video = nil;
video = [[VideoComposition alloc] initVideoTracks];
CMTime localTimeline = kCMTimeZero;
// Create the composition of all videofiles
for (NSURL *url in outputFileUrlArray) {
AVAsset *asset = [[AVURLAsset alloc]initWithURL:url options:nil];
[video setVideo:url at:localTimeline];
localTimeline = CMTimeAdd(localTimeline, asset.duration); // Increment the timeline
}
[self exportVideo];
}
そして、これがVideoCompositionクラスの要点です。
-(id)initVideoTracks
{
if((self = [super init]))
{
composition = [[AVMutableComposition alloc] init];
addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instructions = [[NSMutableArray alloc] init];
videoComposition = [AVMutableVideoComposition videoComposition];
}
return self;
}
-(void)setVideo:(NSURL*) url at:(CMTime)to
{
asset = [[AVURLAsset alloc]initWithURL:url options:nil];
AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *compositionTrackVideo = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionTrackVideo insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack: assetTrack atTime:to error:nil];
AVMutableCompositionTrack *compositionTrackAudio = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionTrackAudio insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:[[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:to error:nil];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(to, asset.duration));
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionTrackVideo];
[layerInstruction setTransform: assetTrack.preferredTransform atTime: kCMTimeZero];
[layerInstruction setOpacity:0.0 atTime:CMTimeAdd(to, asset.duration)];
[instructions addObject:layerInstruction];
mainInstruction.layerInstructions = instructions;
videoComposition.instructions = [NSArray arrayWithObject:mainInstruction];
videoComposition.frameDuration = CMTimeMake(1, 30);
}