6

「AVFoundation」を使ってつるのような動画アプリを作ってみました。これで、ビデオを保存して再生AVCaptureVideoDataOutputできるようになりましたが、どういうわけかオーディオが機能せず、理由がわかりません。iOSアプリ初心者なので分かりにくいかもしれません。私が言おうとしていることを理解し、いくつかのヒントを教えていただければ幸いです。

これは私が使用しているコードです。

セットアップAVCaptureVideoDataOutputAVCaptureAudioDataOutput:

AVCaptureVideoDataOutput* videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
[CaptureSession addOutput:videoDataOutput];

videoDataOutput.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                 [NSNumber numberWithInt:kCVPixelFormatType_32BGRA],kCVPixelBufferPixelFormatTypeKey,
                                 nil];

dispatch_queue_t videoQueue = dispatch_queue_create("VideoQueue", NULL);
[videoDataOutput setSampleBufferDelegate:self queue:videoQueue];

AVCaptureAudioDataOutput *audioDataOutput = [[AVCaptureAudioDataOutput alloc] init];
[CaptureSession addOutput:audioDataOutput];

dispatch_queue_t audioQueue = dispatch_queue_create("AudioQueue", NULL);
[audioDataOutput setSampleBufferDelegate:self queue:audioQueue];

セットアップAVAssetWriteAVAssetWriterInput:

- (void)makeWriter{
pathString = [NSHomeDirectory()stringByAppendingPathComponent:@"Documents/capture.mov"];
exportURL = [NSURL fileURLWithPath:pathString];

if ([[NSFileManager defaultManager] fileExistsAtPath:exportURL.path])
{
    [[NSFileManager defaultManager] removeItemAtPath:exportURL.path error:nil];
}
NSError* error;
writer = [[AVAssetWriter alloc] initWithURL:exportURL
                                   fileType:AVFileTypeQuickTimeMovie
                                      error:&error];
NSDictionary* videoSetting = [NSDictionary dictionaryWithObjectsAndKeys:
                              AVVideoCodecH264, AVVideoCodecKey,
                              [NSNumber numberWithInt:1280], AVVideoWidthKey, 
                              [NSNumber numberWithInt:720], AVVideoHeightKey,
                              nil];

videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                                      outputSettings:videoSetting];


videoWriterInput.expectsMediaDataInRealTime = YES;

// Add the audio input
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;

NSDictionary* audioOutputSettings = nil;
// Both type of audio inputs causes output video file to be corrupted.
if( NO ) {
    // should work from iphone 3GS on and from ipod 3rd generation
    audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                           [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
                           [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
                           [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                           [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
                           [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                           nil];
} else {
    // should work on any device requires more space
    audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
                           [ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
                           [ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
                           [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                           [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
                           [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                           nil ];
}

audioWriterInput = [AVAssetWriterInput
                      assetWriterInputWithMediaType: AVMediaTypeAudio
                     outputSettings: audioOutputSettings ];

audioWriterInput.expectsMediaDataInRealTime = YES;    
// add input
[writer addInput:videoWriterInput];
[writer addInput:audioWriterInput];

}

そして最後にCaptureOutputコード:

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
if ((isPause) && (isRecording)) { return; }
if( !CMSampleBufferDataIsReady(sampleBuffer) ){return;}
if( isRecording == YES ) {
    isWritting = YES;
    if( writer.status != AVAssetWriterStatusWriting  ) {
        [writer startWriting];

        [writer startSessionAtSourceTime:kCMTimeZero];
    }

    if( [videoWriterInput isReadyForMoreMediaData] ) {
        CFRetain(sampleBuffer);
        CMSampleBufferRef newSampleBuffer = [self offsetTimmingWithSampleBufferForVideo:sampleBuffer];
        [videoWriterInput appendSampleBuffer:newSampleBuffer];

        CFRelease(sampleBuffer);
        CFRelease(newSampleBuffer);
    }
    writeFrames++;

}
}

- (CMSampleBufferRef)offsetTimmingWithSampleBufferForVideo:(CMSampleBufferRef)sampleBuffer
{
CMSampleBufferRef newSampleBuffer;
CMSampleTimingInfo sampleTimingInfo;
sampleTimingInfo.duration = CMTimeMake(1, 30);
sampleTimingInfo.presentationTimeStamp = CMTimeMake(writeFrames, 30);
sampleTimingInfo.decodeTimeStamp = kCMTimeInvalid;

CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault,
                                      sampleBuffer,
                                      1,
                                      &sampleTimingInfo,
                                      &newSampleBuffer);


return newSampleBuffer;
}
4

2 に答える 2

0

少なくとも 1 つの問題は、すべてのサンプル バッファーを videowriter 入力に入れることです。audiobuffer からのサンプルを audiowriterinput に入れる必要があります。

このSOの質問と回答をチェックしてください!

avcapturevideodataoutput および avcaptureaudiodataout を使用する場合のパフォーマンスの問題

于 2014-12-16T14:35:37.953 に答える