UIImagePickerController
を使用して複数のビデオ クリップを録画し、Vine アプリと同様にそれらを 1 つのビデオにつなぎ合わせたいと思います。ビデオクリップを単一のビデオに結合する方法について、誰かが正しい方向に向けることができますか?
質問する
1627 次
2 に答える
1
Ray Wenderlich のサイトには、これをカバーするチュートリアルがあります。http://www.raywenderlich.com/13418/how-to-play-record-edit-videos-in-ios
于 2013-09-03T10:11:57.347 に答える
0
Ray Wenderlich のブログに部分的に基づいて、ビデオをつなぎ合わせる方法の例を次に示します。
-(void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info
{
[appDelegate.playerLayer removeFromSuperlayer];
if (self.firstAsset)
{
self.secondAssetURL = [info objectForKey:UIImagePickerControllerMediaURL];
self.secondAsset = [AVAsset assetWithURL:self.secondAssetURL];
}
else
{
self.firstAssetURL = [info objectForKey:UIImagePickerControllerMediaURL];
self.firstAsset = [AVAsset assetWithURL:self.firstAssetURL];
}
if (self.secondAsset)
{
CMTime recordTime = CMTimeMakeWithSeconds(self.recordTime, NSEC_PER_SEC);
AVMutableComposition *mixComposition = [AVMutableComposition new];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, self.firstAsset.duration) ofTrack:[[self.firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, self.secondAsset.duration)
ofTrack:[[self.secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:recordTime error:nil];
AVURLAsset *firstAudioAsset = [[AVURLAsset alloc] initWithURL:self.firstAssetURL options:nil];
AVURLAsset *secondAudioAsset = [[AVURLAsset alloc] initWithURL:self.secondAssetURL options:nil];
AVAssetTrack *firstAudioTrack = [[firstAudioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetTrack *secondAudioTrack = [[secondAudioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAudioAsset.duration) ofTrack:firstAudioTrack atTime:kCMTimeZero error:nil];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAudioAsset.duration) ofTrack:secondAudioTrack atTime:recordTime error:nil];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:@"mergeVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
NSLog(@"we are now exporting");
int exportStatus = exporter.status;
switch (exportStatus) {
case AVAssetExportSessionStatusFailed: {
// log error to text view
NSError *exportError = exporter.error;
NSLog (@"AVAssetExportSessionStatusFailed: %@", exportError);
break;
}
case AVAssetExportSessionStatusCompleted: {
NSLog (@"AVAssetExportSessionStatusCompleted");
self.firstAssetURL = exporter.outputURL;
self.firstAsset = [AVAsset assetWithURL:self.firstAssetURL];
dispatch_async(dispatch_get_main_queue(), ^{
[self layoutVideo:exporter.outputURL];
});
break;
}
default: { NSLog (@"didn't get export status"); break;}
}
}];
}
else
{
[self layoutVideo:[info objectForKey:@"UIImagePickerControllerMediaURL"]];
}
}
于 2015-04-03T08:14:03.523 に答える