私はとCMSampleBufferRef
の両方から取得しようAVCaptureVideoDataOutput
としますAVCaptureAudioDataOutput
。
AVCamRecorder.h
#import <AVFoundation/AVFoundation.h>
@interface AVCamRecorder : NSObject {
}
@property (nonatomic,retain) AVCaptureVideoDataOutput *videoDataOutput;
@property (nonatomic,retain) AVCaptureAudioDataOutput *audioDataOutput;
@end
AVCamRecorder.m
#import "AVCamRecorder.h"
#import <AVFoundation/AVFoundation.h>
@interface AVCamRecorder (VideoDataOutputDelegate) <AVCaptureVideoDataOutputSampleBufferDelegate>
@end
@interface AVCamRecorder (AudioDataOutputDelegate) <AVCaptureAudioDataOutputSampleBufferDelegate>
@end
-(id)initWithSession:(AVCaptureSession *)aSession
{
self = [super init];
if (self != nil) {
//AudioDataoutput
AVCaptureAudioDataOutput *aAudioDataOutput = [[AVCaptureAudioDataOutput alloc] init];
//VideoDataoutput
AVCaptureVideoDataOutput *aMovieDataOutput = [[AVCaptureVideoDataOutput alloc] init];
if ([aSession canAddOutput:aAudioDataOutput]) {
[aSession addOutput:aAudioDataOutput];
}
if ([aSession canAddOutput:aMovieDataOutput]) {
[aSession addOutput:aMovieDataOutput];
}
[aAudioDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
[aMovieDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
[self setAudioDataOutput:aAudioDataOutput];
[self setVideoDataOutput:aMovieDataOutput];
[self setSession:aSession];
}
return self;
}
@implementation AVCamRecorder (VideoDataOutputDelegate)
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
NSLog(@"VideoDataOutputDelegate = %@", captureOutput);
}
@end
@implementation AVCamRecorder (AudioDataOutputDelegate)
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
NSLog(@"AudioDataOutputDelegate = %@", captureOutput);
}
@end
不思議なことに、私は「@implementation AVCamRecorder (AudioDataOutputDelegate)
」でビデオデータを取得しました
AudioDataOutputDelegate = <AVCaptureVideoDataOutput: 0x208a7df0>
@implementation AVCamRecorder (VideoDataOutputDelegate)
「 」と「」@implementation AVCamRecorder (VideoDataOutputDelegate)
の順番を入れ替えて、
VideoDataOutputDelegate = <AVCaptureVideoDataOutput: 0x208a7df0>
2""を設定できないようですcaptureOutput:didOutputSampleBuffer:fromConnection:
。それ以外の場合、データはどちらかになります。
@implementation AVCamRecorder (VideoDataOutputDelegate)
または、「 」と「」の設定を間違えました@implementation AVCamRecorder (AudioDataOutputDelegate)
か?
コールバックを分離する必要はないと思いますが、何が悪いのか疑問に思っています。
よろしくお願いします。