私はビデオマッシュアップアプリを構築しようとしていますが、ユーザーは一定のビデオトラックで1つのトラックを設定でき、2番目のトラックはbロールトラックとして機能し、時々メイントラックに関連するコンテンツを表示します。最初のトラックを実行しているので、タイムライン上のクリップはAVMutableCompositionInstructionsを使用してコンポジションにまとめられますが、独立して制御されるbロールトラックで作業する方法に頭を悩ませることはできませんか?私はこれに何日も苦労しています!これが最初のトラックに相当するコンテンツを作成するコードです。これで、クリップ間で黒にディップするように設定しました。私にヒントを与えることができるAVFoundationの第一人者はいますか?
CMTime nextClipStartTime = kCMTimeZero;
NSInteger i;
CMTime transitionDuration = CMTimeMakeWithSeconds(1,30);
AVMutableCompositionTrack *compositionVideoTrack[2];
AVMutableCompositionTrack *compositionAudioTrack[2];
compositionVideoTrack[0] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
compositionAudioTrack[0] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
compositionAudioTrack[1] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
compositionVideoTrack[1] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *bedMusicTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
i = 0;
NSMutableArray *allAudioParams = [NSMutableArray array];
AVMutableAudioMixInputParameters *audioInputParams[2];
audioInputParams[0] = [AVMutableAudioMixInputParameters audioMixInputParameters];
audioInputParams[1] = [AVMutableAudioMixInputParameters audioMixInputParameters];
[audioInputParams[0] setTrackID: compositionAudioTrack[0].trackID];
[audioInputParams[1] setTrackID: compositionAudioTrack[1].trackID];
float lastVol = 0;
NSMutableArray *instructions = [NSMutableArray array];
for(ClipInfo *info in videoLine.items){
AVAsset *asset = [AVAsset assetWithURL:info.url];
CMTimeRange timeRangeInAsset = CMTimeRangeMake(info.inTime, info.duration);
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack[0] insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil];
AVAssetTrack *clipAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack[0] insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:nil];
if(i != 0){
[audioInputParams[0] setVolume:lastVol atTime: CMTimeSubtract(nextClipStartTime,CMTimeMakeWithSeconds(1,30))];;
}
[audioInputParams[0] setVolume:info.volume atTime:nextClipStartTime];
lastVol = info.volume;
CMTime clipStartTime = (i == 0) ? nextClipStartTime : CMTimeAdd(nextClipStartTime,transitionDuration);
CMTime clipDuration = (i == 0 || i == (videoLine.items.count - 1)) ? CMTimeSubtract(timeRangeInAsset.duration, transitionDuration) : CMTimeSubtract(timeRangeInAsset.duration, CMTimeMultiply(transitionDuration, 2));
if([videoLine.items count] == 1){
clipDuration = timeRangeInAsset.duration;
}
if(i != 0){
//trans in
AVMutableVideoCompositionInstruction *inInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inInstruction.timeRange = CMTimeRangeMake(nextClipStartTime, transitionDuration);
AVMutableVideoCompositionLayerInstruction *fadeIn = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack[0]];
[fadeIn setOpacityRampFromStartOpacity:0 toEndOpacity:1 timeRange:CMTimeRangeMake(nextClipStartTime, transitionDuration)];
inInstruction.layerInstructions = [NSArray arrayWithObject:fadeIn];
[instructions addObject:inInstruction];
}
AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
passThroughInstruction.timeRange = CMTimeRangeMake(clipStartTime,clipDuration);
AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack[0]];
passThroughInstruction.layerInstructions = [NSArray arrayWithObject:passThroughLayer];
[instructions addObject:passThroughInstruction];
if(i < (videoLine.items.count - 1)){
//fade out
AVMutableVideoCompositionInstruction *outInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
outInstruction.timeRange = CMTimeRangeMake(CMTimeAdd(clipStartTime,clipDuration), transitionDuration);
AVMutableVideoCompositionLayerInstruction *fadeOut = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack[0]];
[fadeOut setOpacityRampFromStartOpacity:1.0 toEndOpacity:0 timeRange:CMTimeRangeMake(CMTimeAdd(clipStartTime,clipDuration), transitionDuration)];
outInstruction.layerInstructions = [NSArray arrayWithObject:fadeOut];
[instructions addObject:outInstruction];
}
nextClipStartTime = CMTimeAdd(nextClipStartTime,timeRangeInAsset.duration);
if(i == ([videoLine.items count] - 1)){
[audioInputParams[0] setVolume:info.volume atTime:nextClipStartTime];
}
i++;
}