こんにちは、写真を果物、肉、魚などのさまざまなジャンルに保存するギャラリー アプリを作成しています。ただし、ユーザーが正方形の写真を撮るコードを書きました。ただし、別のジャンル(写真ライブラリではなく)に保存し、別のView Controllerで呼び出してギャラリーのように表示できるようにしたいので、保存方法がわかりません。また、保存したいジャンルをユーザーに選択してもらいたいです。この種のアクションを達成したい場合、どうすればよいでしょうか? これは以下の私のコードです
- (void) initializeCamera {
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetPhoto;
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
captureVideoPreviewLayer.frame = self.imagePreview.bounds;
[self.imagePreview.layer addSublayer:captureVideoPreviewLayer];
UIView *view = [self imagePreview];
CALayer *viewLayer = [view layer];
[viewLayer setMasksToBounds:YES];
CGRect bounds = [view bounds];
[captureVideoPreviewLayer setFrame:bounds];
NSArray *devices = [AVCaptureDevice devices];
AVCaptureDevice *frontCamera;
AVCaptureDevice *backCamera;
for (AVCaptureDevice *device in devices) {
NSLog(@"Device name: %@", [device localizedName]);
if ([device hasMediaType:AVMediaTypeVideo]) {
if ([device position] == AVCaptureDevicePositionBack) {
NSLog(@"Device position : back");
backCamera = device;
}
else {
NSLog(@"Device position : front");
frontCamera = device;
}
}
}
if (!FrontCamera) {
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:&error];
if (!input) {
NSLog(@"ERROR: trying to open camera: %@", error);
}
[session addInput:input];
}
if (FrontCamera) {
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:frontCamera error:&error];
if (!input) {
NSLog(@"ERROR: trying to open camera: %@", error);
}
[session addInput:input];
}
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
}
- (IBAction)snapImage:(id)sender {
if (!haveImage) {
captureImage.image = nil; //remove old image from view
captureImage.hidden = NO; //show the captured image view
imagePreview.hidden = YES; //hide the live video feed
[self capImage];
}
else {
captureImage.hidden = YES;
imagePreview.hidden = NO;
haveImage = NO;
}
}
- (void) capImage { //method to capture image from AVCaptureSession video feed
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
NSLog(@"about to request a capture from: %@", stillImageOutput);
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
if (imageSampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
[self processImage:[UIImage imageWithData:imageData]];
}
}];
}
- (void) processImage:(UIImage *)image { //process captured image, crop, resize and rotate
haveImage = YES;
if([UIDevice currentDevice].userInterfaceIdiom==UIUserInterfaceIdiomPad) { //Device is ipad
// Resize image
UIGraphicsBeginImageContext(CGSizeMake(768, 1022));
[image drawInRect: CGRectMake(0, 0, 768, 1022)];
UIImage *smallImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
CGRect cropRect = CGRectMake(0, 130, 768, 768);
CGImageRef imageRef = CGImageCreateWithImageInRect([smallImage CGImage], cropRect);
//or use the UIImage wherever you like
[captureImage setImage:[UIImage imageWithCGImage:imageRef]];
CGImageRelease(imageRef);
captureImage.hidden = NO;
}else{ //Device is iphone
// Resize image
UIGraphicsBeginImageContext(CGSizeMake(320, 426));
[image drawInRect: CGRectMake(0, 0, 320, 426)];
UIImage *smallImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
CGRect cropRect = CGRectMake(0, 55, 320, 320);
CGImageRef imageRef = CGImageCreateWithImageInRect([smallImage CGImage], cropRect);
[captureImage setImage:[UIImage imageWithCGImage:imageRef]];
CGImageRelease(imageRef);
}
//adjust image orientation based on device orientation
if ([[UIDevice currentDevice] orientation] == UIDeviceOrientationLandscapeLeft) {
NSLog(@"landscape left image");
[UIView beginAnimations:@"rotate" context:nil];
[UIView setAnimationDuration:0.5];
captureImage.transform = CGAffineTransformMakeRotation(DegreesToRadians(-90));
[UIView commitAnimations];
}
if ([[UIDevice currentDevice] orientation] == UIDeviceOrientationLandscapeRight) {
NSLog(@"landscape right");
[UIView beginAnimations:@"rotate" context:nil];
[UIView setAnimationDuration:0.5];
captureImage.transform = CGAffineTransformMakeRotation(DegreesToRadians(90));
[UIView commitAnimations];
}
if ([[UIDevice currentDevice] orientation] == UIDeviceOrientationPortraitUpsideDown) {
NSLog(@"upside down");
[UIView beginAnimations:@"rotate" context:nil];
[UIView setAnimationDuration:0.5];
captureImage.transform = CGAffineTransformMakeRotation(DegreesToRadians(180));
[UIView commitAnimations];
}
if ([[UIDevice currentDevice] orientation] == UIDeviceOrientationPortrait) {
NSLog(@"upside upright");
[UIView beginAnimations:@"rotate" context:nil];
[UIView setAnimationDuration:0.5];
captureImage.transform = CGAffineTransformMakeRotation(DegreesToRadians(0));
[UIView commitAnimations];
}
}