次のメソッドを使用して、AVCaptureStillImageOutputからフレームをキャプチャしています
- (void)captureImage:(void(^)(NSError*, UIImage*))completionHandler {
AVCaptureConnection *connection = [_photoOutput connectionWithMediaType:AVMediaTypeVideo];
if (connection != nil) {
[_photoOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:
^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer != nil && error == nil) {
NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
if (jpegData) {
UIImage *image = [UIImage imageWithData:jpegData];
if (completionHandler != nil) {
completionHandler(nil, image);
}
} else {
if (completionHandler != nil) {
completionHandler([SCRecorder createError:@"Failed to create jpeg data"], nil);
}
}
} else {
if (completionHandler != nil) {
completionHandler(error, nil);
}
}
}];
} else {
if (completionHandler != nil) {
completionHandler([SCRecorder createError:@"Camera session not started or Photo disabled"], nil);
}
}
}
縦向きを使用しても
if ([captureConnection isVideoOrientationSupported]) {
captureConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
}
結果の画像をバッファ内で回転できません。
バッファから直接画像を取得しようとしましたが、回転しています
// Create a UIImage from sample buffer data
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer {
// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// Get the number of bytes per row for the pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// Create a bitmap graphics context with the sample buffer data
CGContextRef context1 = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context1);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// Free up the context and color space
CGContextRelease(context1);
CGColorSpaceRelease(colorSpace);
// Create an image object from the Quartz image
//I modified this line: [UIImage imageWithCGImage:quartzImage]; to the following to correct the orientation:
UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0 orientation:UIImageOrientationRight];
// Release the Quartz image
CGImageRelease(quartzImage);
return (image);
}
しかし、 captureStillImageAsynchronouslyFromConnection:connectionによって渡されたCMSampleBufferRefから取得する必要がある結果のCVImageBufferRefは nil です。
[_photoOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:
^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(imageDataSampleBuffer);
// The image buffer ref is nil.
次のように、出力データバッファーから直接取得した画像表現を回転させようとすると:
NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
if (jpegData) {
UIImage *image = [UIImage imageWithData:jpegData];
UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0 orientation:UIImageOrientationRight];
}
結果として得られる UIImage は、XCode QuickLook と po を通して見ると、正しい方向を取得していません。