私のアプリでは、次のコードでビデオ プレビュー レイヤーを開いています。
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput
deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]
error:nil];
/*We setupt the output*/
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[captureOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
self.captureSession = [[AVCaptureSession alloc] init];
[self.captureSession addInput:captureInput];
[self.captureSession addOutput:captureOutput];
/*We use medium quality, ont the iPhone 4 this demo would be laging too much, the conversion in UIImage and CGImage demands too much ressources for a 720p resolution.*/
[self.captureSession setSessionPreset:AVCaptureSessionPresetMedium];
CGRect Vframe;
Vframe = CGRectMake(self.viewNo2.frame.origin.x, self.viewNo2.frame.origin.y, self.viewNo2.frame.size.width, self.viewNo2.frame.size.height);
/*We add the Custom Layer (We need to change the orientation of the layer so that the video is displayed correctly)*/
self.customLayer = [CALayer layer];
self.customLayer.frame = Vframe;
self.customLayer.contentsGravity = kCAGravityResizeAspect;
[self.view.layer addSublayer:self.customLayer];
CGRect VFrame1;
VFrame1 = CGRectMake(self.viewNo3.frame.origin.x, self.viewNo3.frame.origin.y, self.viewNo3.frame.size.width, self.viewNo3.frame.size.height);
/*We add the Custom Layer (We need to change the orientation of the layer so that the video is displayed correctly)*/
self.customLayer1 = [CALayer layer];
self.customLayer1.frame = VFrame1;
self.customLayer1.contentsGravity = kCAGravityResizeAspect;
[self.view.layer addSublayer:self.customLayer1];
///*We add the imageView*/
//self.imageView = [[UIImageView alloc] init];
//self.imageView.frame = CGRectMake(9, 9, 137, 441);
//[self.view addSubview:self.imageView];
/*We add the preview layer*/
CGRect VFrame2;
VFrame2 = CGRectMake(self.viewNo1.frame.origin.x, self.viewNo1.frame.origin.y, self.viewNo1.frame.size.width, self.viewNo1.frame.size.height);
self.prevLayer = [AVCaptureVideoPreviewLayer layerWithSession: self.captureSession];
self.prevLayer.frame = VFrame2;
self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer: self.prevLayer];
/*We start the capture*/
[self.captureSession startRunning];
この方法で画面をキャプチャしようとすると:
-(IBAction)Photo{
CGRect rect = [self.view bounds];
UIGraphicsBeginImageContextWithOptions(rect.size,YES,0.0f);
CGContextRef context = UIGraphicsGetCurrentContext();
[self.view.layer renderInContext:context];
UIImage *viewImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
UIImageWriteToSavedPhotosAlbum(viewImage, nil, nil, nil);}
prevLayer がキャプチャされていません。何か不足していますか?