0

I'm trying to build a non-realtime face detection application.

Following this article: http://maniacdev.com/2011/11/tutorial-easy-face-detection-with-core-image-in-ios-5/ I can load in a jpg and detect faces.

I would like to automatically take a picture every 20 seconds, then display the image in a UIImageView* and then run the existing detect face function on it.

My question is two fold.

  1. Is there an easy way to take a sample picture from the camera and load it into a UIImageView* without saving it?

  2. How can i automate this to happen every 30 seconds with no user interaction?

Thanks!

4

1 に答える 1

0

AVFoundationプログラミングガイドを見てください

AVFoundation プログラミング ガイド

このガイドでは、AVFoundation を使用してメディアをキャプチャする方法について説明します。

CATransformMatrix を介して出力を回転するまで、カメラは生の出力のみを表示するため、デバイスの回転を考慮する必要がありますが、それは必要以上に深くなります。

知っているだけで逃げられるかもしれません。元の点から最終的な回転位置まで 45° 回転します。

これが私の小さなカメラ テスト ユーティリティのコードです。

UIView を構築し、IBOutlets と IBActions を接続します

ViewController.h

#import <UIKit/UIKit.h>

@interface ViewController : UIViewController
@property (weak, nonatomic) IBOutlet UIView *previewViewContainer;
@property (weak, nonatomic) IBOutlet UIView *playerViewContainer;
- (IBAction)button1Pressed:(id)sender;
- (IBAction)button2Pressed:(id)sender;
- (IBAction)button3Pressed:(id)sender;
- (IBAction)button4Pressed:(id)sender;
- (IBAction)startPressed:(id)sender;
- (IBAction)stopPressed:(id)sender;
- (IBAction)swapInputsPressed:(id)sender;
- (IBAction)recordPressed:(id)sender;

@end

ViewController.m

#import "ViewController.h"

#import <AVFoundation/AVFoundation.h>


@interface ViewController ()

@property (nonatomic, strong) AVCaptureSession *captureSession;
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *capturePreviewLayer;

@property (nonatomic, strong) AVCaptureDeviceInput *frontCam;
@property (nonatomic, readonly) BOOL frontCamIsSet;
@property (nonatomic, readonly) BOOL hasFrontCam;
@property (nonatomic, readonly) BOOL isUsingFrontCam;

@property (nonatomic, strong) AVCaptureDeviceInput *backCam;
@property (nonatomic, readonly) BOOL backCamIsSet;
@property (nonatomic, readonly) BOOL hasBackCam;
@property (nonatomic, readonly) BOOL isUsingBackCam;

@property (nonatomic, strong) AVCaptureDeviceInput *mic;
@property (nonatomic, readonly) BOOL micIsSet;
@property (nonatomic, readonly) BOOL hasMic;

@end

CGFloat DegreesToRadians(CGFloat degrees)
{
    return degrees * M_PI / 180;
};

CGFloat RadiansToDegrees(CGFloat radians)
{
    return radians * 180 / M_PI;
};

@implementation ViewController

#pragma mark - Helper Methods

- (NSArray *) inputDevices{
    return [AVCaptureDevice devices];
}
- (NSArray *) videoInputDevices{
    return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
}
- (NSArray *) audioInputDevices{
    return [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
}


#pragma mark - Properties

@synthesize captureSession = _captureSession;
- (AVCaptureSession *)captureSession{
    if (_captureSession == nil){
        _captureSession = [[AVCaptureSession alloc] init];
    }
    return _captureSession;
}
@synthesize capturePreviewLayer = _capturePreviewLayer;
- (AVCaptureVideoPreviewLayer *)capturePreviewLayer{
    if (_capturePreviewLayer == nil){
        _capturePreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
    }
    return _capturePreviewLayer;
}


@synthesize frontCam = _frontCam;
- (AVCaptureDeviceInput *)frontCam{
    if (_frontCam == nil && !self.frontCamIsSet){
        _frontCamIsSet = YES;
        NSArray *videoDevices = [self videoInputDevices];
        for (AVCaptureDevice *inputDevice in videoDevices) {
            if ([inputDevice position] == AVCaptureDevicePositionFront){
                NSError *error = nil;
                _frontCam = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
                if (!_frontCam){
                    NSLog(@"Error Attaching Front Cam %@",error);
                }
            }
        }
    }
    return _frontCam;
}
- (BOOL)hasFrontCam{
    return self.frontCam != nil;
}
@synthesize isUsingFrontCam = _isUsingFrontCam;

@synthesize backCam = _backCam;
- (AVCaptureDeviceInput *)backCam{
    if (_backCam == nil && !self.backCamIsSet){
        _backCamIsSet = YES;
        NSArray *videoDevices = [self videoInputDevices];
        for (AVCaptureDevice *inputDevice in videoDevices) {
            if ([inputDevice position] == AVCaptureDevicePositionBack){
                NSError *error = nil;
                _backCam = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
                if (!_backCam){
                    NSLog(@"Error Attaching Back Cam %@",error);
                }
            }
        }
    }
    return _backCam;
}
- (BOOL)hasBackCam{
    return self.backCam != nil;
}

@synthesize mic = _mic;
- (AVCaptureDeviceInput *)mic{
    if (_mic == nil && !self.micIsSet){
        _micIsSet = YES;
        NSArray *audioDevices = [self audioInputDevices];
        for (AVCaptureDevice *inputDevice in audioDevices) {
            NSError *error = nil;
            _mic = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
            if (!_mic){
                NSLog(@"Error Attaching Mic %@",error);
            }
        }
    }
    return _mic;
}
- (BOOL)hasMic{
    return self.mic != nil;
}
- (BOOL)isUsingBackCam{
    return !self.isUsingFrontCam;
}

- (IBAction)button1Pressed:(id)sender {
    if (NO && self.hasFrontCam && [self.captureSession canAddInput:self.frontCam]){
        _isUsingFrontCam = YES;
        [self.captureSession addInput:self.frontCam];
    }
    else if(self.hasBackCam && [self.captureSession canAddInput:self.backCam]){
        _isUsingFrontCam = NO;
        [self.captureSession addInput:self.backCam];
    }
    if (self.hasMic && [self.captureSession canAddInput:self.mic]) {
        [self.captureSession addInput:self.mic];
    }
}
- (IBAction)button2Pressed:(id)sender {
    self.capturePreviewLayer.frame = self.previewViewContainer.layer.bounds;

    [self.previewViewContainer.layer addSublayer:self.capturePreviewLayer];

}

- (void) orientationChanged:(NSNotification*) notification{
    NSLog(@"Notification Of Orientation Change\n\n%@",notification.userInfo);
    if (_capturePreviewLayer != nil){


        CGFloat rotate90 = DegreesToRadians(90);

        CGFloat rotateFinish = 0;

        UIDeviceOrientation orientation = [UIDevice currentDevice].orientation;
        switch (orientation) {
            case UIDeviceOrientationLandscapeLeft:
                rotateFinish += rotate90;
            case UIDeviceOrientationPortraitUpsideDown:
                rotateFinish += rotate90;
            case UIDeviceOrientationLandscapeRight:
                rotateFinish += rotate90;
            case UIDeviceOrientationPortrait:
            default:
                break;
        }

        _capturePreviewLayer.transform = CATransform3DMakeRotation(rotateFinish, 0.0, 0.0, 1.0);
    }
}

- (IBAction)button3Pressed:(id)sender {

}
- (IBAction)button4Pressed:(id)sender {
}

- (IBAction)startPressed:(id)sender {
    [self.captureSession startRunning];
}
- (IBAction)stopPressed:(id)sender {
    [self.captureSession stopRunning];
}

- (IBAction)swapInputsPressed:(id)sender {
    if (!self.isUsingFrontCam){
        _isUsingFrontCam = YES;
        [self.captureSession removeInput:self.backCam];
        [self.captureSession addInput:self.frontCam];
    }
    else {
        _isUsingFrontCam = NO;
        [self.captureSession removeInput:self.frontCam];
        [self.captureSession addInput:self.backCam];
    }
}

- (IBAction)recordPressed:(id)sender {

}
- (NSString *) applicationDocumentsDirectory{
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
    return basePath;
}



- (void)viewDidLoad{
    [super viewDidLoad];
    // Do any additional setup after loading the view, typically from a nib.
    [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
    [[NSNotificationCenter defaultCenter] addObserver:self
                                             selector:@selector(orientationChanged:)
                                                 name:UIDeviceOrientationDidChangeNotification
                                               object:nil];

}

- (void) dealloc{
    [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
    [[NSNotificationCenter defaultCenter] removeObserver:self
                                                    name:UIDeviceOrientationDidChangeNotification
                                                  object:nil];
}
- (void)didReceiveMemoryWarning{
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}


@end

幸いなことに、写真を取得するためのこのテスト アプリを作成しました。

あ、忘れる前に。CALayer をグラフィックにレンダリングするのは、次のように簡単です。

+ (UIImage *) captureImageOfView:(UIView *)srcView{
    UIGraphicsBeginImageContext(srcView.bounds.size);
    [srcView.layer renderInContext:UIGraphicsGetCurrentContext()];
    UIImage *anImage = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();
    return anImage;
}

ただし、AVFoundation プログラミング ガイドを参照して、実際にどのようにキャプチャされているかを確認することをお勧めします。これは私自身のデモアプリであり、私が言ったように. 完全ではありません。

于 2012-12-27T21:53:13.097 に答える