1

AVAssetWriter を使用してムービーをキャプチャしようとしています。iPhone 5 ではすべて問題なく、ムービーを魅力的にキャプチャして保存できます。

しかし、iPhone 4 でムービーをキャプチャしようとすると、サンプルバッファがいくつかのフレームをスキップし、ムービーがうまくいきません。

だから、これは私のコードです:

- (void) initCaptureSession{
//  openSession and set quality to 1280x720
    session                             = [[AVCaptureSession alloc] init];
    if([session canSetSessionPreset:AVCaptureSessionPreset640x480]) session.sessionPreset = AVCaptureSessionPresetHigh;

//  get devices for audio and video
    deviceVideo                         = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    deviceAudio                         = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];

    NSError *error                      = nil;

//  create input of audio and video
    inputVideo                          = [AVCaptureDeviceInput deviceInputWithDevice:deviceVideo error:&error];
    if (!inputVideo)    NSLog(@"ERROR: trying to open camera: %@", error);

    inputAudio                          = [AVCaptureDeviceInput deviceInputWithDevice:deviceAudio error:&error];
    if (!inputAudio)    NSLog(@"ERROR: trying to open audio: %@", error);

//    CMTime maxDuration                  = CMTimeMake(60, 1);

//  create output audio and video
    outputVideo                         = [[AVCaptureVideoDataOutput alloc] init];
    outputVideo.alwaysDiscardsLateVideoFrames = NO;
    outputVideo.videoSettings           = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];

    outputAudio                         = [[AVCaptureAudioDataOutput alloc] init];   

//  add inputs and outputs in the current session
    [session beginConfiguration];
    if ([session canAddInput:inputVideo])[session addInput:inputVideo];
    if ([session canAddInput:inputAudio])[session addInput:inputAudio];
    if ([session canAddOutput:outputVideo]) [session addOutput:outputVideo];
    if ([session canAddOutput:outputAudio]) [session addOutput:outputAudio];
    [session commitConfiguration];

//  tourn of the torch
    [deviceVideo lockForConfiguration:&error];
    if([deviceVideo hasTorch] && [deviceVideo isTorchModeSupported:AVCaptureTorchModeOff]) [deviceVideo setTorchMode:AVCaptureTorchModeOff];
    [deviceVideo unlockForConfiguration];

    [self configDevice];

//  create the preview view to show the video
    captureVideoPreviewLayer            = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
    [captureVideoPreviewLayer setBackgroundColor:[[UIColor blackColor] CGColor]];
    [captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];

    captureVideoPreviewLayer.frame      = viewPreview.bounds;
    [viewPreview.layer addSublayer:captureVideoPreviewLayer];

    CALayer *viewLayer                  = viewPreview.layer;

    [viewLayer setMasksToBounds:YES];
    [captureVideoPreviewLayer setFrame:[viewLayer bounds]];
    [viewLayer addSublayer:captureVideoPreviewLayer];

//  dispatch outputs to delegate in a queue
    dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
    [outputVideo setSampleBufferDelegate:self queue:queue];
    [outputAudio setSampleBufferDelegate:self queue:queue];
//    dispatch_release(queue);

    [session startRunning];
}

-(BOOL) setupWriter{    
    urlOutput           = [self tempFileURL];
    NSError *error      = nil;

    videoWriter         = [[AVAssetWriter alloc] initWithURL:urlOutput fileType:AVFileTypeMPEG4 error:&error];
    NSParameterAssert(videoWriter);

//  Add metadata  
    NSArray *existingMetadataArray      = videoWriter.metadata;
    NSMutableArray *newMetadataArray    = nil;
    if (existingMetadataArray) {
        newMetadataArray = [existingMetadataArray mutableCopy];
    } else {
        newMetadataArray = [[NSMutableArray alloc] init];
    }

    AVMutableMetadataItem *mutableItemLocation  = [[AVMutableMetadataItem alloc] init];
    mutableItemLocation.keySpace                = AVMetadataKeySpaceCommon;
    mutableItemLocation.key                     = AVMetadataCommonKeyLocation;
    mutableItemLocation.value                   = [NSString stringWithFormat:@"%+08.4lf%+09.4lf/", location.latitude, location.longitude];

    AVMutableMetadataItem *mutableItemModel     = [[AVMutableMetadataItem alloc] init];
    mutableItemModel.keySpace                   = AVMetadataKeySpaceCommon;
    mutableItemModel.key                        = AVMetadataCommonKeyModel;
    mutableItemModel.value                      = [[UIDevice currentDevice] model];

    [newMetadataArray addObject:mutableItemLocation];
    [newMetadataArray addObject:mutableItemModel];

    videoWriter.metadata = newMetadataArray;

//  video Configuration
    NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                                [NSNumber numberWithInt:640], AVVideoCleanApertureWidthKey,
                                                [NSNumber numberWithInt:360], AVVideoCleanApertureHeightKey,
                                                [NSNumber numberWithInt:2], AVVideoCleanApertureHorizontalOffsetKey,
                                                [NSNumber numberWithInt:2], AVVideoCleanApertureVerticalOffsetKey,
                                                nil];


    NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                              [NSNumber numberWithInt:1], AVVideoPixelAspectRatioHorizontalSpacingKey,
                                              [NSNumber numberWithInt:1],AVVideoPixelAspectRatioVerticalSpacingKey,
                                              nil];

    NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   [NSNumber numberWithInt:1024000], AVVideoAverageBitRateKey,
                                   [NSNumber numberWithInt:90],AVVideoMaxKeyFrameIntervalKey,
                                   videoCleanApertureSettings, AVVideoCleanApertureKey,
                                   videoAspectRatioSettings, AVVideoPixelAspectRatioKey,
                                   AVVideoProfileLevelH264Main30, AVVideoProfileLevelKey,
                                   nil];

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   codecSettings,AVVideoCompressionPropertiesKey,
                                   [NSNumber numberWithInt:640], AVVideoWidthKey,
                                   [NSNumber numberWithInt:360], AVVideoHeightKey,
                                   nil];

    videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

    NSParameterAssert(videoWriterInput);
    videoWriterInput.expectsMediaDataInRealTime = YES;

    // Add the audio input
    AudioChannelLayout acl;
    bzero( &acl, sizeof(acl));
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;


    NSDictionary* audioOutputSettings = nil;
    // Both type of audio inputs causes output video file to be corrupted.
//    if( NO ) {
        // should work from iphone 3GS on and from ipod 3rd generation
        audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
                               [ NSNumber numberWithInt: 2 ], AVNumberOfChannelsKey,
                               [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                               [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
                               [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                               nil];
//    } else {
//        // should work on any device requires more space
//        audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
//                               [ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
//                               [ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
//                               [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
//                               [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
//                               [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
//                               nil ];
//    }

    audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeAudio outputSettings: audioOutputSettings];

    audioWriterInput.expectsMediaDataInRealTime = YES;  


    // add input
    [videoWriter addInput:videoWriterInput];
    [videoWriter addInput:audioWriterInput];

    return YES;
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
    if( !CMSampleBufferDataIsReady(sampleBuffer) ){
        NSLog( @"sample buffer is not ready. Skipping sample" );
        return;
    }

    if(isRecording == YES ){
        lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
        if(videoWriter.status != AVAssetWriterStatusWriting  ){
            [videoWriter startWriting];
            [videoWriter startSessionAtSourceTime:lastSampleTime];
        }

        if( captureOutput == outputVideo ){
            [self newVideoSample:sampleBuffer];
        } else if( captureOutput == outputAudio) {
            [self newAudioSample:sampleBuffer];
        }
    }
}

-(void) newVideoSample:(CMSampleBufferRef)sampleBuffer{
    if( isRecording ){
        if( videoWriter.status > AVAssetWriterStatusWriting ) {
            NSLog(@"Warning: writer status is %d", videoWriter.status);
            if( videoWriter.status == AVAssetWriterStatusFailed )
                NSLog(@"Error: %@", videoWriter.error);
            return;
        }        


        while (!videoWriterInput.readyForMoreMediaData) {
            NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
            [[NSRunLoop currentRunLoop] runUntilDate:maxDate];
        }

        if( ![videoWriterInput appendSampleBuffer:sampleBuffer] )
            NSLog(@"Unable to write to video input");
    }
}

-(void) newAudioSample:(CMSampleBufferRef)sampleBuffer{
    if( isRecording ){
        if( videoWriter.status > AVAssetWriterStatusWriting ) {
            NSLog(@"Warning: writer status is %d", videoWriter.status);
            if( videoWriter.status == AVAssetWriterStatusFailed )
                NSLog(@"Error: %@", videoWriter.error);
            return;
        }

        while (!audioWriterInput.readyForMoreMediaData) {
             NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
            [[NSRunLoop currentRunLoop] runUntilDate:maxDate];
        }

        if( ![audioWriterInput appendSampleBuffer:sampleBuffer] )
            NSLog(@"Unable to write to audio input");
    }
}

-(void) startVideoRecording {
    if( !isRecording ){
        NSLog(@"start video recording...");
        if( ![self setupWriter] ) {
            NSLog(@"Setup Writer Failed") ;

            return;
        }

        isRecording = YES;
        recorded    = NO;
    }
}

-(void) stopVideoRecording {
    if( isRecording ) {
        isRecording                 = NO;
        btRecord.hidden             = NO;
        btRecording.hidden          = YES;
        [timerToRecord invalidate];
        timerToRecord               = nil;

//        [session stopRunning];

        [videoWriter finishWritingWithCompletionHandler:^{
            if (videoWriter.status != AVAssetWriterStatusFailed && videoWriter.status == AVAssetWriterStatusCompleted) {
                videoWriterInput    = nil;
                audioWriterInput    = nil;
                videoWriter         = nil;

                NSLog(@"finishWriting returned succeful");

                recorded    = YES;
            } else {
                NSLog(@"finishWriting returned unsucceful") ;
            }
        }];    

        NSLog(@"video recording stopped");

        [self performSelector:@selector(openPlayer) withObject:nil afterDelay:0.5];
    }
} 

この行を削除すると:

        while (!audioWriterInput.readyForMoreMediaData) {
             NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
            [[NSRunLoop currentRunLoop] runUntilDate:maxDate];
        }

このエラーが発生しました:

* キャッチされない例外 'NSInternalInconsistencyException' によりアプリを終了します。理由: '* -[AVAssetWriterInput appendSampleBuffer:] readyForMoreMediaData が NO の場合、サンプル バッファーを追加できません。'

iPhone 5 では、このループを使用していません。

ここでいくつかの例を読みましたが、iPhone 4 でムービーをスムーズにする方法がわかりませんでした。

iphone 3gs、iphone 4、iphone 4s、および iphone 5 用の AVAssetWriter を使用してムービーを作成するための提案または完全な例を誰かが持っている場合は、どうもありがとうございます。

ありがとう

4

2 に答える 2

1

AVFoundation との 1 週間の戦いの後、私は良い解決策を得ました。

wwdc2012 - session 520 を見た後、私は良い解決策を作りました。

最初に、AVCaptureMovieFileOutput とセッション プレセット AVCaptureSessionPreset640x480 を使用してムービーを記録します。

したがって、ユーザーが保存して共有するかどうかを選択した後、ムービーを保存または削除するだけです。

ユーザーが保存/保存して共有したい場合は、ムービーを記録して個別に圧縮します。

まず、オーディオを圧縮してトラックをマージした後、ムービーを圧縮します。

私のコードを見てください:

-(void)exportMediaWithURL:(NSURL *)url location:(CLLocationCoordinate2D)location mirror:(BOOL)mirror{
    urlMedia                        = url;
    locationMedia                   = location;

    videoRecorded                   = NO;
    audioRecorded                   = NO;

    asset                           = [AVAsset assetWithURL:urlMedia];

    progressVideo                   = 0.0;
    progressAudio                   = 0.0;
    progressMarge                   = 0.0;
    progressFactor                  = 3.0;

    mirrored                        = mirror;

    limitTime                       = CMTimeMake(1000*60, 1000);

    [asset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler:^() {
        NSError *error;
        AVKeyValueStatus stats      = [asset statusOfValueForKey:@"tracks" error:&error];

        if(stats == AVKeyValueStatusLoaded){
            if([[asset tracksWithMediaType:AVMediaTypeVideo] count] > 0) video_track = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
            if([[asset tracksWithMediaType:AVMediaTypeAudio] count] > 0) audio_track = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];

            if(!audio_track) progressFactor = 1.0;            
            if(video_track){
                if (CMTimeCompare(asset.duration, limitTime) > 0) {
                    totalTime = limitTime;
                }else{
                    totalTime = asset.duration;
                }
                [self exportVideo];
            }
        }
    }];
}

-(void)exportVideo{
    NSError *error;
    AVAssetReader *assetReader          = [AVAssetReader assetReaderWithAsset:asset error:&error];

    NSDictionary* videoSettings         = [NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];

    AVAssetReaderOutput *videoOutput    = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:video_track outputSettings:videoSettings];

    [assetReader addOutput:videoOutput];

    assetReader.timeRange               = CMTimeRangeMake(kCMTimeZero, totalTime);

//  start session to make a movie
    if (assetVideoWriter.status == AVAssetWriterStatusUnknown) {
        if ([self setupWriterVideo]) {
            if ([assetVideoWriter startWriting]) {
                [assetVideoWriter startSessionAtSourceTime:kCMTimeZero];
            }
        }
    }            

    if([assetReader startReading]){                
        BOOL videoDone = NO;

        CMSampleBufferRef bufferVideo;

        while (!videoDone) {
            if ([assetReader status]== AVAssetReaderStatusReading ) bufferVideo = [videoOutput copyNextSampleBuffer];

            if(bufferVideo){
                [self newVideoSample:bufferVideo];
                CFRelease(bufferVideo);
            }else{
                videoDone = YES;
            }
        }

//      finish
        [videoWriterInput markAsFinished];
        [assetVideoWriter finishWritingWithCompletionHandler:^{}];

        //  gambiarra to resolve the dealloc problem when use a block to delegate something
        while (!videoRecorded) {
            if (assetVideoWriter.status == AVAssetWriterStatusCompleted) {
                videoWriterInput    = nil;
                assetVideoWriter    = nil;

                videoRecorded       = YES;

                if (audio_track) {
                    [self exportAudio];
                }else{
                    NSMutableDictionary *infoToSend = [NSMutableDictionary new];
                    [infoToSend setValue:urlOutputVideo forKey:@"url_media"];
                    [[NSNotificationCenter defaultCenter] postNotificationName:EXPORT_STATUS_DONE object:self userInfo:infoToSend];
                }
            }
        }
    }
}

-(void)exportAudio{            
    NSError *error;    
    AVAssetReader *assetReader          = [AVAssetReader assetReaderWithAsset:asset error:&error];

    NSDictionary* audioSettings         = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey, nil];

    AVAssetReaderOutput *audioOutput    = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audio_track outputSettings:audioSettings];

    [assetReader addOutput:audioOutput];

     assetReader.timeRange               = CMTimeRangeMake(kCMTimeZero, totalTime);

//   start session to make a movie
    if (assetAudioWriter.status == AVAssetWriterStatusUnknown) {
        if ([self setupWriterAudio]) {
            if ([assetAudioWriter startWriting]) {
                [assetAudioWriter startSessionAtSourceTime:kCMTimeZero];
            }
        }
    }

    if([assetReader startReading]){                
        BOOL audioDone = NO;

        CMSampleBufferRef bufferAudio;

        while (!audioDone) {                    
            if ([assetReader status]== AVAssetReaderStatusReading ) bufferAudio = [audioOutput copyNextSampleBuffer];

            if(bufferAudio){                     
                [self newAudioSample:bufferAudio];
                CFRelease(bufferAudio);
            }else{
                audioDone = YES;
            }
        }

//      finish
        [audioWriterInput markAsFinished];
        [assetAudioWriter finishWritingWithCompletionHandler:^{}];

//      gambiarra to resolve the dealloc problem when use a block to delegate something
        while (!audioRecorded) {
            if (assetAudioWriter.status == AVAssetWriterStatusCompleted) {
                audioWriterInput    = nil;
                assetAudioWriter    = nil;

                audioRecorded       = YES;

                [self margeFile];
            }
        }                
    }
}

-(void)margeFile{    
    AVURLAsset *assetVideo                              = [AVURLAsset assetWithURL:urlOutputVideo];
    AVAssetTrack *video_track_marge                     = [[assetVideo tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

    AVURLAsset *assetAudio                              = [AVURLAsset assetWithURL:urlOutputAudio];
    AVAssetTrack *audio_track_marge                     = [[assetAudio tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];

    CMTime startTime                                    = CMTimeMake(1, 1);
    CMTimeRange timeRangeVideo                          = CMTimeRangeMake(kCMTimeZero, assetVideo.duration);
    CMTimeRange timeRangeAudio                          = CMTimeRangeMake(kCMTimeZero, assetAudio.duration);

    AVMutableComposition * composition                  = [AVMutableComposition composition];
    AVMutableCompositionTrack *compositionVideoTrack    = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    if(mirrored) compositionVideoTrack.preferredTransform = CGAffineTransformMakeRotation(M_PI);
    AVMutableCompositionTrack *compositionAudioTrack    = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

    NSError *error;

    [compositionVideoTrack insertTimeRange:timeRangeVideo ofTrack:video_track_marge atTime:startTime error:&error];
    [compositionAudioTrack insertTimeRange:timeRangeAudio ofTrack:audio_track_marge atTime:startTime error:&error];

    AVAssetExportSession *exportSession                 = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetPassthrough];
    exportSession.outputFileType                        = AVFileTypeAppleM4V;
    exportSession.outputURL                             = [self tempFileURL:media_mixed];
    exportSession.shouldOptimizeForNetworkUse           = YES;
    exportSession.metadata                              = newMetadataArray;

    exportSession.timeRange                             = CMTimeRangeMake(CMTimeMakeWithSeconds(1.0, 600), totalTime);

    [exportSession exportAsynchronouslyWithCompletionHandler:^{
        NSMutableDictionary *infoToSend = [NSMutableDictionary new];

        switch (exportSession.status) {
            case AVAssetExportSessionStatusCompleted:
                [infoToSend setValue:exportSession.outputURL forKey:@"url_media"];                
                [[NSNotificationCenter defaultCenter] postNotificationName:EXPORT_STATUS_DONE object:self userInfo:infoToSend];
                break;

            case AVAssetExportSessionStatusExporting:
                [[NSNotificationCenter defaultCenter] postNotificationName:EXPORT_STATUS_EXPORTING object:self];
                break;

            case AVAssetExportSessionStatusFailed:
                NSLog(@"failed");
                break;
        }

    }];

    while (exportSession.status == AVAssetExportSessionStatusExporting) {        
        progressMarge = exportSession.progress;

        [self postProgress];
    }
}

-(BOOL) setupWriterVideo{
    urlOutputVideo                              = [self tempFileURL:media_video];
    NSError *error                              = nil;

    assetVideoWriter                            = [[AVAssetWriter alloc] initWithURL:urlOutputVideo fileType:AVFileTypeMPEG4 error:&error];
    NSParameterAssert(assetVideoWriter);

    //  Add metadata
    NSArray *existingMetadataArray              = assetVideoWriter.metadata;
    if (existingMetadataArray) {
        newMetadataArray = [existingMetadataArray mutableCopy];
    } else {
        newMetadataArray = [[NSMutableArray alloc] init];
    }

    AVMutableMetadataItem *mutableItemLocation  = [[AVMutableMetadataItem alloc] init];
    mutableItemLocation.keySpace                = AVMetadataKeySpaceCommon;
    mutableItemLocation.key                     = AVMetadataCommonKeyLocation;
    mutableItemLocation.value                   = [NSString stringWithFormat:@"%+08.4lf%+09.4lf/", locationMedia.latitude, locationMedia.longitude];

    AVMutableMetadataItem *mutableItemModel     = [[AVMutableMetadataItem alloc] init];
    mutableItemModel.keySpace                   = AVMetadataKeySpaceCommon;
    mutableItemModel.key                        = AVMetadataCommonKeyModel;
    mutableItemModel.value                      = [[UIDevice currentDevice] model];

    [newMetadataArray addObject:mutableItemLocation];
    [newMetadataArray addObject:mutableItemModel];

    assetVideoWriter.metadata                    = newMetadataArray;
    assetVideoWriter.shouldOptimizeForNetworkUse = YES;

    videoWriterInput                            = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:[self videoConfiguration]];
    NSParameterAssert(videoWriterInput);
    videoWriterInput.expectsMediaDataInRealTime = NO;

//  add input
    [assetVideoWriter addInput:videoWriterInput];

    return YES;
}

-(BOOL) setupWriterAudio{
    urlOutputAudio                              = [self tempFileURL:media_audio];
    NSError *error                              = nil;

    assetAudioWriter                            = [[AVAssetWriter alloc] initWithURL:urlOutputAudio fileType:AVFileTypeAppleM4A error:&error];
    NSParameterAssert(assetAudioWriter);

    audioWriterInput                            = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:[self audioConfiguration]];
    audioWriterInput.expectsMediaDataInRealTime = NO;

    // add input
    [assetAudioWriter addInput:audioWriterInput];

    return YES;
}

- (NSDictionary *)videoConfiguration{
    //  video Configuration
    //    float bitsPerPixel;
    //    int numPixels = 640.0 * 360.0;
    //  int bitsPerSecond;
    //
    //  // Assume that lower-than-SD resolutions are intended for streaming, and use a lower bitrate
    //  if ( numPixels < (640 * 360.0) )
    //      bitsPerPixel = 4.05; // This bitrate matches the quality produced by AVCaptureSessionPresetMedium or Low.
    //  else
    //      bitsPerPixel = 11.4; // This bitrate matches the quality produced by AVCaptureSessionPresetHigh.
    //
    //  bitsPerSecond = numPixels * bitsPerPixel;

    NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                                [NSNumber numberWithInt:640], AVVideoCleanApertureWidthKey,
                                                [NSNumber numberWithInt:360], AVVideoCleanApertureHeightKey,
                                                [NSNumber numberWithInt:2], AVVideoCleanApertureHorizontalOffsetKey,
                                                [NSNumber numberWithInt:2], AVVideoCleanApertureVerticalOffsetKey,
                                                nil];


    NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                              [NSNumber numberWithInt:1],AVVideoPixelAspectRatioHorizontalSpacingKey,
                                              [NSNumber numberWithInt:1],AVVideoPixelAspectRatioVerticalSpacingKey,
                                              nil];

    NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   [NSNumber numberWithInt:1024000], AVVideoAverageBitRateKey,
                                   [NSNumber numberWithInt:90],AVVideoMaxKeyFrameIntervalKey,
                                   videoCleanApertureSettings, AVVideoCleanApertureKey,
                                   videoAspectRatioSettings, AVVideoPixelAspectRatioKey,
                                   AVVideoProfileLevelH264Main30, AVVideoProfileLevelKey,
                                   nil];

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   AVVideoScalingModeResizeAspectFill, AVVideoScalingModeKey,
                                   codecSettings,AVVideoCompressionPropertiesKey,
                                   [NSNumber numberWithInt:640], AVVideoWidthKey,
                                   [NSNumber numberWithInt:360], AVVideoHeightKey,
                                   nil];

    return videoSettings;
}

-(NSDictionary *)audioConfiguration{
    // Add the audio input
    AudioChannelLayout acl;
    bzero( &acl, sizeof(acl));
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;


    NSDictionary* audioOutputSettings = nil;
    // Both type of audio inputs causes output video file to be corrupted.
    //    if( NO ) {
    // should work from iphone 3GS on and from ipod 3rd generation
    audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                           [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
                           [ NSNumber numberWithInt: 2 ], AVNumberOfChannelsKey,
                           [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                           [ NSNumber numberWithInt: 128000 ], AVEncoderBitRateKey,
                           [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                           nil];
    //    } else {
    //        // should work on any device requires more space
//        audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
//                               [ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
//                               [ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
//                               [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
//                               [ NSNumber numberWithInt: 2 ], AVNumberOfChannelsKey,
//                               [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
//                               nil ];
    //    }

    return audioOutputSettings;
}

-(void) newVideoSample:(CMSampleBufferRef)sampleBuffer{
    if( assetVideoWriter.status > AVAssetWriterStatusWriting ) {
        if( assetVideoWriter.status == AVAssetWriterStatusFailed )
            NSLog(@"Error: %@", assetVideoWriter.error);
        return;
    }

    if (assetVideoWriter.status == AVAssetWriterStatusWriting ) {
        while (!videoWriterInput.readyForMoreMediaData) NSLog(@"waitting video");

        if (videoWriterInput.readyForMoreMediaData) {
            CMTime presTime     = CMSampleBufferGetPresentationTimeStamp( sampleBuffer );
            float valueLoading  = (presTime.value / presTime.timescale);
            float valueTotal    = (totalTime.value / totalTime.timescale);

            progressVideo       =  valueLoading / valueTotal;

            [self postProgress];

            if (![videoWriterInput appendSampleBuffer:sampleBuffer]) NSLog(@"Unable to write to video input");
        }
    }    
}

-(void) newAudioSample:(CMSampleBufferRef)sampleBuffer{
    if( assetAudioWriter.status > AVAssetWriterStatusWriting ) {
        if( assetAudioWriter.status == AVAssetWriterStatusFailed )
            NSLog(@"Error: %@", assetAudioWriter.error);
        return;
    }

    if (assetAudioWriter.status == AVAssetWriterStatusWriting ) {
        while (!audioWriterInput.readyForMoreMediaData) NSLog(@"waitting audio");

        if (audioWriterInput.readyForMoreMediaData) {
            CMTime presTime     = CMSampleBufferGetPresentationTimeStamp( sampleBuffer );
            float valueLoading  = (presTime.value / presTime.timescale);
            float valueTotal    = (totalTime.value / totalTime.timescale);

            progressAudio       =  valueLoading / valueTotal;

            [self postProgress];

            if (![audioWriterInput appendSampleBuffer:sampleBuffer]) {
                NSLog(@"Unable to write to audio input");
            }
        }
    }
}

- (void)postProgress{    
    float totalProgress = (progressVideo + progressAudio + progressMarge) / progressFactor;

    NSMutableDictionary *infoToSend = [NSMutableDictionary new];
    [infoToSend setValue:[NSNumber numberWithFloat:totalProgress] forKey:@"progress"];

    [[NSNotificationCenter defaultCenter] postNotificationName:EXPORT_STATUS_EXPORTING object:self userInfo:infoToSend];
}


- (NSURL *)tempFileURL:(int)typeMedia {
    NSString *outputPath;

    switch (typeMedia) {
        case media_video:
            outputPath          = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output_export.mp4"];
            break;

        case media_audio:
            outputPath          = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output_export.m4a"];
            break;

        case media_mixed:
            outputPath          = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"mixed.mp4"];
            break;
    }

    NSURL *outputURL            = [[NSURL alloc] initFileURLWithPath:outputPath];
    NSFileManager *fileManager  = [NSFileManager defaultManager];
    if ([fileManager fileExistsAtPath:outputPath]) [[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil];
    return outputURL;
}

- (void) dealloc {
    NSLog(@"dealloc video exporter");
    [[NSNotificationCenter defaultCenter] removeObserver:self];

    assetVideoWriter        = nil;
    assetAudioWriter        = nil;

    videoWriterInput        = nil;
    audioWriterInput        = nil;

    urlMedia                = nil;
    urlOutputVideo          = nil;
    urlOutputAudio          = nil;
    urlOutputFinal          = nil;
}

@end

誰かが追加するものがあれば、ここに投稿してください!

于 2013-04-05T01:31:25.427 に答える
0

AVAssetWriterInput.outputSettings[AVVideoCompressionPropertiesKey][AVVideoAllowFrameReorderingKey] = @(NO) を設定し ます ここに画像の説明を入力してください

于 2018-05-14T08:31:03.330 に答える