1

AVMutableComposition を使用してアプリケーションでオーディオとビデオをマージするのが好きで、アプリケーションで次のコードを使用しましたが、出力データを取得できます。

-(void) playerFunction
{
    NSString  *fileNamePath = @"mexicanDance.mp3";//myaudio
    NSArray   *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
    NSString  *documentsDirectory = [paths  objectAtIndex:0];
    NSString  *oldappSettingsPath = [documentsDirectory stringByAppendingPathComponent:fileNamePath];
    NSURL *audioUrl = [NSURL fileURLWithPath:oldappSettingsPath];
    NSString  *fileNamePath1 = @"Egg_break.mov";//my video
    NSArray   *paths1 = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
    NSString  *documentsDirectory1 = [paths1  objectAtIndex:0];
    NSString  *oldappSettingsPath1 = [documentsDirectory1 stringByAppendingPathComponent:fileNamePath1];
    NSLog(@"oldpath=%@",oldappSettingsPath);
    NSURL *videoUrl = [NSURL fileURLWithPath:oldappSettingsPath1];
//    if (avPlayer.duration >0.00000)
//    {
        NSLog(@"SOMEDATA     IS THERE ");
        AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audioUrl options:nil];
        AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoUrl options:nil];

        AVMutableComposition* mixComposition = [AVMutableComposition composition];

        NSLog(@"audio =%@",[audioAsset tracksWithMediaType:AVMediaTypeAudio]);
    NSLog(@"Video =%@",[videoAsset tracksWithMediaType:AVMediaTypeVideo]);

        AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    NSLog(@"%@",compositionCommentaryTrack);
        [compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];

        AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
        [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];


        AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough];

        NSString* videoName = @"output.mp4";//outputdata

        NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
        NSURL    *exportUrl = [NSURL fileURLWithPath:exportPath];

        if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
        {
            [[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
        }

        _assetExport.outputFileType = @"com.apple.quicktime-movie";
        NSLog(@"file type %@",_assetExport.outputFileType);
        _assetExport.outputURL = exportUrl;
        _assetExport.shouldOptimizeForNetworkUse = YES;



        [_assetExport exportAsynchronouslyWithCompletionHandler:
         ^(void )
         {

             NSString  *fileNamePath = @"sound_record.mov";
             NSArray   *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
             NSString  *documentsDirectory = [paths  objectAtIndex:0];
             NSString  *oldappSettingsPath = [documentsDirectory stringByAppendingPathComponent:fileNamePath];


             //             if ([[NSFileManager defaultManager] fileExistsAtPath:oldappSettingsPath]) {
             //
             //                 NSFileManager *fileManager = [NSFileManager defaultManager];
             //                 [fileManager removeItemAtPath: oldappSettingsPath error:NULL];
             //
             //             }
             NSURL *documentDirectoryURL = [NSURL fileURLWithPath:oldappSettingsPath];
             [[NSFileManager defaultManager] copyItemAtURL:exportUrl toURL:documentDirectoryURL error:nil];
             [audioAsset release];
             [videoAsset release];
             [_assetExport release];
         }       
         ];

//}
    video = [[MPMoviePlayerController alloc] init];
    if (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad)
        video.view.frame = CGRectMake(0, 0, 768, 1004);
    else
        video.view.frame = CGRectMake(0, 0, 320, 460);

    NSString * audioPath=[[NSBundle mainBundle] pathForResource:@"output" ofType:@"mp4"];
    [video setContentURL:[NSURL fileURLWithPath:audioPath]];

    [video prepareToPlay];
    [video play];
    [self.view addSubview:video.view];
    [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(stopAudio)
                                                 name:MPMoviePlayerPlaybackDidFinishNotification object:video];

    [video setFullscreen:YES];
}

しかし、両方の AVURLAsset が空のセットをスローしているため、このコードは機能しません。何がエラーなのかわかりません。誰かがこの問題を解決するのを手伝ってくれますか?

前もって感謝します

4

2 に答える 2

0

両方の(オーディオとビデオ)ファイルに適切な拡張子があり、その期間が空であってはならないことを確認してください。このように使用する前に確認できます

if ([[_videoAsset tracksWithMediaType:AVMediaTypeVideo]count]!=0)
{
 // write code here to add video in composition. you can check same for audio too.
}
于 2016-01-21T12:26:57.413 に答える