以前のiOSバージョンで正しく動作するAVCaptureSessionを備えたアプリケーションがありますが、ios8を搭載したデバイスで実行しようとすると、アプリケーションが散発的にクラッシュしました。しかし、問題は解決しませんでした。「[session addInput:input];」で例外が発生しました . 解決方法のアドバイスをお願いします。以下のコードを確認してください。[session addInput:input]; でエラーが発生しています。
エラーの説明の印刷: Error Domain=AVFoundationErrorDomain Code=-11852 "Cannot use Back Camera" UserInfo=0x17c076e0 {NSLocalizedDescription=Cannot use Back Camera, AVErrorDeviceKey=, NSLocalizedFailureReason=このアプリは Back Camera を使用する権限がありません。}
#import "CameraViewController.h"
#import "MAImagePickerControllerAdjustViewController.h"
#import "PopupViewController.h"
#import "MAImagePickerFinalViewController.h"
@implementation CameraViewController
@synthesize vImagePreview;
@synthesize vImage;
@synthesize stillImageOutput;
@synthesize lFrameCount;
@synthesize session;
@synthesize device;
@synthesize oneOff;
@synthesize captureManager = _captureManager;
@synthesize flashButton = _flashButton;
@synthesize vImage1;
@synthesize vImage2;
@synthesize vImage3;
@synthesize vImage4;
@synthesize vImage5;
@synthesize vImage6;
/////////////////////////////////////////////////////////////////////
#pragma mark - UI Actions
/////////////////////////////////////////////////////////////////////
-(IBAction) captureNow
{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(@"about to request a capture from: %@", stillImageOutput);
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(@"attachements: %@", exifAttachments);
}
else
NSLog(@"no attachments");
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
NSUserDefaults *standardUserDefaults = [NSUserDefaults standardUserDefaults];
NSString *val1 = nil;
if (standardUserDefaults)
{
val1 = [standardUserDefaults objectForKey:@"clickTypeTwo"];
}
if([val1 isEqualToString:@"cameraType"])
{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[session stopRunning];
});
FinalViewController *finalView;
if ([[UIScreen mainScreen] bounds].size.height == 568)
finalView = [[FinalViewController alloc] initWithNibName:IS_IPAD()?@"FinalViewController_iPad":@"FinalViewController" bundle:nil];
else
finalView =[[FinalViewController alloc] initWithNibName:IS_IPAD()?@"FinalViewController_iPad":@"FinalViewController" bundle:nil];
finalView.sourceImage = image;
//finalView.imageFrameEdited = YES;
CATransition* transition = [CATransition animation];
transition.duration = 0.4;
transition.type = kCATransitionFade;
transition.subtype = kCATransitionFromBottom;
[self.navigationController.view.layer addAnimation:transition forKey:kCATransition];
[self.navigationController pushViewController:finalView animated:NO];
}
else
{
[session stopRunning];
AdjustViewController *adjustViewController;
if ([[UIScreen mainScreen] bounds].size.height == 568)
adjustViewController = [[AdjustViewController alloc] initWithNibName:IS_IPAD()?@"AdjustViewController_iPad":@"AdjustViewController" bundle:nil];
else
adjustViewController =[[AdjustViewController alloc] initWithNibName:IS_IPAD()?@"AdjustViewController_iPad":@"AdjustViewController" bundle:nil];
adjustViewController.sourceImage = image;
CATransition* transition = [CATransition animation];
transition.duration = 0.4;
transition.type = kCATransitionFade;
transition.subtype = kCATransitionFromBottom;
[self.navigationController.view.layer addAnimation:transition forKey:kCATransition];
[self.navigationController pushViewController:adjustViewController animated:NO];
}
}];
}
-(void)cropImageViewControllerDidFinished:(UIImage *)image{
FinalViewController *finalView;
if ([[UIScreen mainScreen] bounds].size.height == 568)
finalView = [[MAImagePickerFinalViewController alloc] initWithNibName:IS_IPAD()?@"FinalViewController_iPad":@"FinalViewController" bundle:nil];
else
finalView =[[MAImagePickerFinalViewController alloc] initWithNibName:IS_IPAD()?@"FinalViewController_iPad":@"FinalViewController" bundle:nil];
finalView.sourceImage = image;
//finalView.imageFrameEdited = YES;
CATransition* transition = [CATransition animation];
transition.duration = 0.4;
transition.type = kCATransitionFade;
transition.subtype = kCATransitionFromBottom;
[self.navigationController.view.layer addAnimation:transition forKey:kCATransition];
[self.navigationController pushViewController:finalView animated:NO];
}
/////////////////////////////////////////////////////////////////////
#pragma mark - Video Frame Delegate
/////////////////////////////////////////////////////////////////////
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
//NSLog(@"got frame");
iFrameCount++;
// Update Display
// We are running the the context of the capture session. To update the UI in real time, We have to do this in the context of the main thread.
NSString * frameCountString = [[NSString alloc] initWithFormat:@"%4.4d", iFrameCount];
[lFrameCount performSelectorOnMainThread: @selector(setText:) withObject:frameCountString waitUntilDone:YES];
//NSLog(@"frame count %d", iFrameCount);
}
- (IBAction)showLeftSideBar
{
//[self dismissModalViewControllerAnimated:YES];
if ([[SidebarViewController share] respondsToSelector:@selector(showSideBarControllerWithDirection:)]) {
[[SidebarViewController share] showSideBarControllerWithDirection:SideBarShowDirectionLeft];
}
}
- (IBAction)showRightSideBar:(id)sender
{
}
- (IBAction)flipCamera:(id)sender
{
AVCaptureDevicePosition desiredPosition;
if (isUsingFrontFacingCamera)
desiredPosition = AVCaptureDevicePositionBack;
else
desiredPosition = AVCaptureDevicePositionFront;
for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if ([d position] == desiredPosition) {
[[self session] beginConfiguration];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:d error:nil];
for (AVCaptureInput *oldInput in [[self session] inputs]) {
[[self session] removeInput:oldInput];
}
[[self session] addInput:input];
[[self session] commitConfiguration];
break;
}
}
isUsingFrontFacingCamera = !isUsingFrontFacingCamera;
}
BOOL isUsingFrontFacingCamera;
/////////////////////////////////////////////////////////////////////
#pragma mark - Guts
/////////////////////////////////////////////////////////////////////
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
/////////////////////////////////////////////////////////////////////
#pragma mark - View lifecycle
/////////////////////////////////////////////////////////////////////
- (void)viewDidLoad
{
[super viewDidLoad];
}
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
}
- (void)viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
flashIsOn=YES;
/////////////////////////////////////////////////////////////////////////////
// Create a preview layer that has a capture session attached to it.
// Stick this preview layer into our UIView.
/////////////////////////////////////////////////////////////////////////////
session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPreset640x480;
CALayer *viewLayer = self.vImagePreview.layer;
NSLog(@"viewLayer = %@", viewLayer);
// viewLayer.frame = CGRectMake(-70, 150, 480, 336);
// UIGraphicsBeginImageContextWithOptions(CGSizeMake(400, 400), NO, 1);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
CGRect bounds=vImagePreview.layer.bounds;
captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
captureVideoPreviewLayer.bounds=bounds;
captureVideoPreviewLayer.position=CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[self.vImagePreview.layer addSublayer:captureVideoPreviewLayer];
//[self addVideoInputFrontCamera:YES]; // set to YES for Front Camera, No for Back camera
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(@"ERROR: trying to open camera: %@", error);
}
[session addInput:input];
/////////////////////////////////////////////////////////////
// OUTPUT #1: Still Image
/////////////////////////////////////////////////////////////
// Add an output object to our session so we can get a still image
// We retain a handle to the still image output and use this when we capture an image.
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
/////////////////////////////////////////////////////////////
// OUTPUT #2: Video Frames
/////////////////////////////////////////////////////////////
// Create Video Frame Outlet that will send each frame to our delegate
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
//captureOutput.minFrameDuration = CMTimeMake(1, 3); // deprecated in IOS5
// We need to create a queue to funnel the frames to our delegate
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[captureOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
// Set the video output to store frame in BGRA (It is supposed to be faster)
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
// let's try some different keys,
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
[session addOutput:captureOutput];
/////////////////////////////////////////////////////////////
// start the capture session
[session startRunning];
/////////////////////////////////////////////////////////////////////////////
// initialize frame counter
iFrameCount = 0;
}
- (void)viewWillDisappear:(BOOL)animated
{
[super viewWillDisappear:animated];
}
- (void)viewDidDisappear:(BOOL)animated
{
[super viewDidDisappear:animated];
[session stopRunning];
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
// Return YES for supported orientations
if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) {
return (interfaceOrientation != UIInterfaceOrientationPortraitUpsideDown);
} else {
return YES;
}
}
- (IBAction)cancelButton:(id)sender{
}
- (IBAction)flashOn:(id)sender{
Class captureDeviceClass = NSClassFromString(@"AVCaptureDevice");
if (captureDeviceClass != nil) {
if ([device hasTorch] && [device hasFlash]){
[device lockForConfiguration:nil];
if (flashIsOn) {
[device setTorchMode:AVCaptureTorchModeOn];
[device setFlashMode:AVCaptureFlashModeOn];
oneOff.text=@"On";
[_flashButton setImage:[UIImage imageNamed:@"flash-on-button"]];
_flashButton.accessibilityLabel = @"Disable Camera Flash";
flashIsOn = NO; //define as a variable/property if you need to know status
} else {
[_flashButton setImage:[UIImage imageNamed:@"flash-off-button"]];
_flashButton.accessibilityLabel = @"Enable Camera Flash";
oneOff.text=@"Off";
[device setTorchMode:AVCaptureTorchModeOff];
[device setFlashMode:AVCaptureFlashModeOff];
flashIsOn = YES;
}
[device unlockForConfiguration];
}
}
}
- (void)dealloc {
[[self session] stopRunning];
[super dealloc];
}
- (void)storeFlashSettingWithBool:(BOOL)flashSetting
{
[[NSUserDefaults standardUserDefaults] setBool:flashSetting forKey:kCameraFlashDefaultsKey];
[[NSUserDefaults standardUserDefaults] synchronize];
}
@end