4

サーバーから動画ファイルへの h.264 ストリームを作成したいのですが、 を使用するassetwrite.finishwriteと、XCode がレポートします。

Video /var/mobile/Applications/DE4196F1-BB77-4B7D-8C20-7A5D6223C64D/Documents/test.mov cannot be saved to the saved photos album: Error Domain=NSOSStatusErrorDomain Code=-12847 "This movie format is not supported." UserInfo=0x5334830 {NSLocalizedDescription=This movie format is not supported.}"

以下は私のコードです:データはh.264フレームで、1フレームだけです。iフレームまたはpの可能性があります。

(void)_encodeVideoFrame2:(NSData *) data time:(double)tm 
{
  CMBlockBufferRef videoBlockBuffer=NULL;
  CMFormatDescriptionRef videoFormat=NULL;
  CMSampleBufferRef videoSampleBuffer=NULL;
  CMItemCount numberOfSampleTimeEntries=1;
  CMItemCount numberOfSamples=1;
  CMVideoFormatDescriptionCreate(kCFAllocatorDefault, kCMVideoCodecType_H264, 320, 240, NULL, &videoFormat);
  OSStatus result;
  result=CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, NULL, data.length, kCFAllocatorDefault, NULL, 0, data.length, kCMBlockBufferAssureMemoryNowFlag, &videoBlockBuffer);
  result=CMBlockBufferReplaceDataBytes(data.bytes, videoBlockBuffer, 0, data.length);
  CMSampleTimingInfo videoSampleTimingInformation={CMTimeMake(tm*600, 600)};
  size_t sampleSizeArray[1];
  sampleSizeArray[0]=data.length;
  result=CMSampleBufferCreate(kCFAllocatorDefault, videoBlockBuffer, TRUE, NULL, NULL, videoFormat, numberOfSamples, numberOfSampleTimeEntries, &videoSampleTimingInformation, 1, sampleSizeArray, &videoSampleBuffer);
  result = CMSampleBufferMakeDataReady(videoSampleBuffer);
  [assetWriterInput appendSampleBuffer:videoSampleBuffer]; 
}

たぶんCMSampleBufferCreate引数が間違っていますか?ありがとう。

4

2 に答える 2

3

このコードを試してください

  • (IBAction)createVideo:(id)sender {

    ///////////// setup OR function def if we move this to a separate function //////////// // this should be moved to its own function, that can take an imageArray, videoOutputPath, etc... // - (void)exportImages:(NSMutableArray *)imageArray // asVideoToPath:(NSString *)videoOutputPath // withFrameSize:(CGSize)imageSize // framesPerSecond:(NSUInteger)fps {

    NSError *error = nil;

    // set up file manager, and file videoOutputPath, remove "test_output.mp4" if it exists... //NSString *videoOutputPath = @"/Users/someuser/Desktop/test_output.mp4"; NSFileManager *fileMgr = [NSFileManager defaultManager]; NSString *documentsDirectory = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"]; NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:@"test_output.mp4"]; //NSLog(@"-->videoOutputPath= %@", videoOutputPath); // get rid of existing mp4 if exists... if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES) NSLog(@"Unable to delete file: %@", [error localizedDescription]);

    CGSize imageSize = CGSizeMake(400, 200); NSUInteger fps = 30;

    //NSMutableArray *imageArray; //imageArray = [[NSMutableArray alloc] initWithObjects:@"download.jpeg", @"download2.jpeg", nil]; NSMutableArray imageArray; NSArray imagePaths = [[NSBundle mainBundle] pathsForResourcesOfType:@"jpg" inDirectory:nil]; imageArray = [[NSMutableArray alloc] initWithCapacity:imagePaths.count]; NSLog(@"-->imageArray.count= %i", imageArray.count); for (NSString* path in imagePaths) { [imageArray addObject:[UIImage imageWithContentsOfFile:path]]; //NSLog(@"-->image path= %@", path); }

    ////////////// end setup ///////////////////////////////////

    NSLog(@"Start building video from defined frames.");

    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL: [NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie error:&error]; NSParameterAssert(videoWriter);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt:imageSize.width], AVVideoWidthKey, [NSNumber numberWithInt:imageSize.height], AVVideoHeightKey, nil];

    AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:nil];

    NSParameterAssert(videoWriterInput); NSParameterAssert([videoWriter canAddInput:videoWriterInput]); videoWriterInput.expectsMediaDataInRealTime = YES; [videoWriter addInput:videoWriterInput];

    //Start a session: [videoWriter startWriting]; [videoWriter startSessionAtSourceTime:kCMTimeZero];

    CVPixelBufferRef buffer = NULL;

    //convert uiimage to CGImage. int frameCount = 0; double numberOfSecondsPerFrame = 6; double frameDuration = fps * numberOfSecondsPerFrame;

    //for(VideoFrame * frm in imageArray) NSLog(@"****************************"); for(UIImage * img in imageArray) { //UIImage * img = frm._imageFrame; buffer = [self pixelBufferFromCGImage:[img CGImage]];

    BOOL append_ok = NO;
    int j = 0;
    while (!append_ok && j < 30) {
        if (adaptor.assetWriterInput.readyForMoreMediaData)  {
            //print out status:
            NSLog(@"Processing video frame (%d,%d)",frameCount,[imageArray count]);
    
            CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
            append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
            if(!append_ok){
                NSError *error = videoWriter.error;
                if(error!=nil) {
                    NSLog(@"Unresolved error %@,%@.", error, [error userInfo]);
                }
            }
        }
        else {
            printf("adaptor not ready %d, %d\n", frameCount, j);
            [NSThread sleepForTimeInterval:0.1];
        }
        j++;
    }
    if (!append_ok) {
        printf("error appending image %d times %d\n, with error.", frameCount, j);
    }
    frameCount++;
    

    } NSLog(@"****************************");

    //Finish the session: [videoWriterInput markAsFinished]; [videoWriter finishWriting]; NSLog(@"Write Ended");

    //////////////////////////////////////////////////////////////////////////// ////////////// OK now add an audio file to move file ///////////////////// AVMutableComposition* mixComposition = [AVMutableComposition composition];

    NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath]; // audio input file... NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:@"30secs.mp3"]; NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];

    // this is the video file that was just written above, full path to file is in --> videoOutputPath NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];

    // create the final video output file as MOV file - may need to be MP4, but this works so far... NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:@"final_video.mp4"]; NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];

    if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath]) [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];

    CMTime nextClipStartTime = kCMTimeZero;

    AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil]; CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration); AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; [a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];

    //nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);

    AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil]; CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration); AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; [b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];

    //AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality]; __block AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough];

    NSLog(@"support file types= %@", [_assetExport supportedFileTypes]); _assetExport.outputFileType = @"com.apple.quicktime-movie"; NSLog(@"support file types= %@", [_assetExport supportedFileTypes]); _assetExport.outputURL = outputFileUrl;

    [_assetExport exportAsynchronouslyWithCompletionHandler:^{ switch (_assetExport.status) { case AVAssetExportSessionStatusCompleted: // Custom method to import the Exported Video NSLog(@"completed!!!"); break; case AVAssetExportSessionStatusFailed: // NSLog(@"Failed:%@",_assetExport.error); break; case AVAssetExportSessionStatusCancelled: // NSLog(@"Canceled:%@",_assetExport.error); break; default: break; } }];

    ///// THAT IS IT DONE... the final video file will be written here... NSLog(@"DONE.....outputFilePath--->%@", outputFilePath);

    // the final video file will be located somewhere like here: // /Users/caferrara/Library/Application Support/iPhone Simulator/6.0/Applications/D4B12FEE-E09C-4B12-B772-7F1BD6011BE1/Documents/outputFile.mov

}

于 2012-11-26T07:17:52.893 に答える
2

すでに圧縮されている h264 バッファをで多重化するには、ドキュメントに記載されている内容に反して、OutputSettingsAVAssetWriterを指定する必要があります。nil

だから、代わりに

videoInput = [[AVAssetWriterInput alloc] 
    initWithMediaType:AVMediaTypeVideo outputSettings:@{
        AVVideoCodecKey: AVVideoCodecH264,
    }
    sourceFormatHint:myVideoFormat
];

あなたがするべきです

videoInput = [[AVAssetWriterInput alloc] 
    initWithMediaType:AVMediaTypeVideo
    outputSettings:nil
    sourceFormatHint:myVideoFormat
];

これにより、AVAssetWriterInput が何かをエンコード/トランスコードしようとすることなく、ビデオまたはオーディオ データのパススルーを行うことができます。

于 2015-07-20T05:01:12.920 に答える