関数を使用しているアプリケーションを作成しています-(void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection { }
が、その関数は呼び出されていません。さらに説明すると、アプリケーションはこのチュートリアルのコードを使用してビデオ録画アプリを作成しています。チュートリアルのコードをxCodeで実行すると、上記の関数が実行されましたが、アプリケーションにコピーしたときに、とにかく変更せずに、呼び出されることはありませんでした。
使用されるコードは次のとおりです。
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
NSError *error = nil;
AVCaptureSession *session = [[AVCaptureSession alloc] init];
if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone){
[session setSessionPreset:AVCaptureSessionPreset640x480];
} else {
[session setSessionPreset:AVCaptureSessionPresetPhoto];
}
// Select a video device, make an input
AVCaptureDevice *device;
AVCaptureDevicePosition desiredPosition = AVCaptureDevicePositionFront;
// find the front facing camera
for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if ([d position] == desiredPosition) {
device = d;
isUsingFrontFacingCamera = YES;
break;
}
}
// fall back to the default camera.
if( nil == device )
{
isUsingFrontFacingCamera = NO;
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
// get the input device
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if( !error ) {
// add the input to the session
if ( [session canAddInput:deviceInput] ){
[session addInput:deviceInput];
}
previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
previewLayer.backgroundColor = [[UIColor blackColor] CGColor];
previewLayer.videoGravity = AVLayerVideoGravityResizeAspect;
CALayer *rootLayer = [previewView layer];
[rootLayer setMasksToBounds:YES];
[previewLayer setFrame:[rootLayer bounds]];
[rootLayer addSublayer:previewLayer];
[session startRunning];
}
session = nil;
if (error) {
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:
[NSString stringWithFormat:@"Failed with error %d", (int)[error code]]
message:[error localizedDescription]
delegate:nil
cancelButtonTitle:@"Dismiss"
otherButtonTitles:nil];
[alertView show];
[self teardownAVCapture];
}
NSDictionary *detectorOptions = [[NSDictionary alloc] initWithObjectsAndKeys:CIDetectorAccuracyLow, CIDetectorAccuracy, nil];
faceDetector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil options:detectorOptions];
// Make a video data output
videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
// we want BGRA, both CoreGraphics and OpenGL work well with 'BGRA'
NSDictionary *rgbOutputSettings = [NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCMPixelFormat_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
[videoDataOutput setVideoSettings:rgbOutputSettings];
[videoDataOutput setAlwaysDiscardsLateVideoFrames:YES]; // discard if the data output queue is blocked
// create a serial dispatch queue used for the sample buffer delegate
// a serial dispatch queue must be used to guarantee that video frames will be delivered in order
// see the header doc for setSampleBufferDelegate:queue: for more information
videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
[videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
if ( [session canAddOutput:videoDataOutput] ){
[session addOutput:videoDataOutput];
}
// get the output for doing face detection.
[[videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];
//[self setupCaptureSession];
}