0

CIFaceFeature を使用して顔を検出し、顔を正しく検出しますが、人が笑っているかどうかを検出しませんでした。次のコードを使用して笑顔を検出しています。

CIImage* image = [CIImage imageWithCGImage:facePicture.CGImage];

// create a face detector - since speed is not an issue we'll use a high accuracy
// detector
CIDetector* detector = [CIDetector detectorOfType:CIDetectorTypeFace 
                                          context:nil options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyLow forKey:CIDetectorAccuracy]];

// create an array containing all the detected faces from the detector    
NSArray* features = [detector featuresInImage:image];

// we'll iterate through every detected face.  CIFaceFeature provides us
// with the width for the entire face, and the coordinates of each eye
// and the mouth if detected.  Also provided are BOOL's for the eye's and
// mouth so we can check if they already exist.

//    NSLog(@"Finding: %d",[features count]);

HUDView.hidden = YES;
for(CIFaceFeature* faceFeature in features)
{
    HUDView.hidden = NO;
    // get the width of the face
    CGFloat faceWidth = faceFeature.bounds.size.width;

    // create a UIView using the bounds of the face
    //faceView = [[UIView alloc] initWithFrame:faceFeature.bounds];
    faceView.frame = faceFeature.bounds;
    // add a border around the newly created UIView
    faceView.backgroundColor = [UIColor clearColor];
    faceView.layer.borderWidth = 1;
    faceView.layer.borderColor = [[UIColor redColor] CGColor];

     if(faceFeature.hasMouthPosition)
    {
        // create a UIView with a size based on the width of the face
        NSLog(@"%f",faceFeature.rightEyePosition.x);
        NSLog(@"%f",faceFeature.leftEyePosition.x);
         NSLog(@"%f",faceFeature.mouthPosition.x);
        NSLog(@"%f %f %f %f",faceFeature.mouthPosition.x-faceWidth*0.2,faceFeature.mouthPosition.y-faceWidth*0.2, faceWidth*0.4, faceWidth*0.4);

        CGRect mouthRect = CGRectMake(faceFeature.mouthPosition.x-faceWidth*0.2, faceFeature.mouthPosition.y-faceWidth*0.2, faceWidth*0.4, faceFeature.bounds.size.height*0.4);

        CGRect pointRect = CGRectMake(faceFeature.mouthPosition.x-faceWidth*0.2, faceFeature.mouthPosition.y-faceWidth*0.2, 3, 3);

        pointView.frame = pointRect;
        [pointView setBackgroundColor:[[UIColor blueColor] colorWithAlphaComponent:0.5]];
        // set the position of the mouthView based on the face
        [pointView setCenter:faceFeature.mouthPosition];
        // round the corners
        pointView.layer.cornerRadius = faceWidth*0.2;

        //float topSpace = faceFeature.bounds.origin.y;
        float bottomSpace = 460- (faceFeature.bounds.origin.y+faceFeature.bounds.size.height);

        CGRect cutRect = CGRectMake(faceFeature.mouthPosition.x-faceWidth*0.2, bottomSpace+(faceWidth*0.75)+faceWidth*0.02, 
                                    faceWidth*0.4, faceWidth*0.2);

        mouthRect.origin.y += ((faceFeature.bounds.origin.y)+faceWidth*0.4);
        //mouthRect.origin.y += facePicture.bounds.size.height - (faceFeature.mouthPosition.y + faceWidth*0.2);
        //            CGImageRef imageRef = CGImageCreateWithImageInRect([facePicture CGImage], mouthRect);
        CGImageRef imageRef = CGImageCreateWithImageInRect([facePicture CGImage], cutRect);

        // or use the UIImage wherever you like



        //////////////////////////////////////////////////////////////

        UIImage *smileData = [UIImage imageWithCGImage:imageRef];
       smileView.image = [[SDetector detector] convertImageToGrayColor:smileData];
        CGImageRelease(imageRef);

        NSMutableArray *histoData = [[SDetector detector] getHistogramArray:smileData];

        int value = [[SDetector detector] smileProcess:histoData];
        scoreLB.text = [NSString stringWithFormat:@"%d",value];

       //histogramImageView.image = [[SDetector detector] drawHistogram:histoData];

        //////////////////////////////////////////////////////////////


        oldHistoArr = newHistoArr;
        newHistoArr = histoData;


        int oldSum=0,oldAVG=0;
        int newSum=0,newAVG=0;

        oldDiffSUM = newDiffSUM;

        newDiffSUM = 0;
        NSLog(@"[oldHistoArr count]:>>>>>>>>>>>>>%d",[oldHistoArr count]);
         NSLog(@"[newHistoArr count]:>>>>>>>>>>>>>%d",[newHistoArr count]);
        if ([oldHistoArr count]>0) {
            for (int i=0; i<256; i++) 
            {
                oldSum += [[oldHistoArr objectAtIndex:i] intValue];
                newSum += [[newHistoArr objectAtIndex:i] intValue];

                newDiffSUM += 0.7*abs([[oldHistoArr objectAtIndex:i] intValue] - [[newHistoArr objectAtIndex:i] intValue]);
            }

            oldAVG = oldSum/256;
            newAVG = newSum/256;            

            float multi = ((float)(250-faceWidth)/250.0) /1.9 +1.0;

            float ratio = (float)newDiffSUM/(float)oldDiffSUM;

            if (ratio>2) {
                ratio = ratio*multi;
            }
            NSLog(@"FW: %f[%f]   [DIFF: %d]\t%f\t%f",faceWidth,multi,newDiffSUM,ratio,ratio/6.0);

            [progressView setProgress:ratio/6.0];

            scoreLB.text = [NSString stringWithFormat:@"%d",newDiffSUM];

            if (ratio >= currentLevel && autoON) {
                NSLog(@"Capture At %f > %f", ratio,currentLevel);
                [self captureStillImage:nil];
            }
        }
        ////////////////////////////////////////////

        mouth.frame = mouthRect;
        // change the background color for the mouth to green
        [mouth setBackgroundColor:[[UIColor greenColor] colorWithAlphaComponent:0.3]];
        // set the position of the mouthView based on the face
        [mouth setCenter:faceFeature.mouthPosition];
        // round the corners
        mouth.layer.cornerRadius = faceWidth*0.2;
        // add the new view to the window
        NSLog(@"%f",faceFeature.bounds.size.height);
    }



    //NSLog(@"Lx%.2f Ly%.2f Rx%.2f Ry%.2f Mx%.2f My%.2f", faceFeature.leftEyePosition.x, faceFeature.leftEyePosition.y, faceFeature.rightEyePosition.x, faceFeature.rightEyePosition.y, faceFeature.mouthPosition.x, faceFeature.mouthPosition.y);


}
4

2 に答える 2