2

(以下のコードの大規模なブロックは無視してください。他の誰かが一緒にプレイしたい場合に備えて、参照/リソースのためだけにあります)

CoreImageの顔検出ルーチンは、小さい画像でも自然に高速に動作するため、を使用して顔データを生成し、表現aspectRatioThumbnailを描画するためにスケールアップする計画を立てています。fullScreenImage私がこれを行っている理由は、処理する画像が20〜30枚ある可能性があるため、タスク時間を短縮したいからです。

これは単純な数学の問題かもしれませんが、ある画像のある点を別の画像にマッピングしようとすると、不正確な結果が得られます。

90 x 120画像-CGPoint(64、50)rightEyePosition

480 x 640画像-CGPoint(331、303)rightEyePosition

(480/90)* 64 = 341.333-しかし、331にする必要がありますね。私はそれを間違っていますか?

更新-後でさらにいくつかのテスト。では、画像の解像度が異なるために、顔データの結果がわずかに異なるだけなのかもしれません。それは理にかなっています:データ結果の間にスケーラブルな関係はありません。私はまだ疑問に思っています:私のスケーリング計算は上で間違っていますか?


Using CIDetectorAccuracyHigh     
useImageOptions:     0
------------ aspectRatioThumbnail   90.000000  120.000000 orientation: 0
2013-01-18 12:33:30.378 SeqMeTestBed[9705:907] aspectRatioThumbnail: features {
    bounds = "{{23, 16}, {56, 56}}";
    hasLeftEyePosition = 1;
    hasMouthPosition = 1;
    hasRightEyePosition = 1;
    leftEyePosition = "{43, 59}";
    mouthPosition = "{51, 31}";
    rightEyePosition = "{64, 50}";
}
------------ fullScreenImage   480.000000  640.000000 orientation: 0
2013-01-18 12:33:33.029 SeqMeTestBed[9705:907] fullScreenImage: features {
    bounds = "{{135, 81}, {298, 298}}";
    hasLeftEyePosition = 1;
    hasMouthPosition = 1;
    hasRightEyePosition = 1;
    leftEyePosition = "{228, 321}";
    mouthPosition = "{290, 156}";
    rightEyePosition = "{331, 303}";
}
------------ fullResolutionImage   640.000000  480.000000 orientation: 0
2013-01-18 12:33:35.745 SeqMeTestBed[9705:907] fullResolutionImage: features {
    bounds = "{{195, 105}, {366, 366}}";
    hasLeftEyePosition = 1;
    hasMouthPosition = 1;
    hasRightEyePosition = 1;
    leftEyePosition = "{356, 411}";
    mouthPosition = "{350, 201}";
    rightEyePosition = "{455, 400}";

//使用されるコード//

- (void)detectFacialFeatures
{

    NSDictionary *detectorOptions = [[NSDictionary alloc] initWithObjectsAndKeys:CIDetectorAccuracyHigh, CIDetectorAccuracy, nil];
    CIDetector* faceDetector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil options:detectorOptions];


    NSDictionary *imageOptions = nil;

    UIImage *tmpImage;
    NSNumber* orientation; 
    CIImage *ciImage;
    NSArray *array;
    NSMutableDictionary* featuresDictionary;

    Boolean useImageOptions = NO;

    printf("Using CIDetectorAccuracyHigh     \n");
    printf("useImageOptions:     %d\n", useImageOptions);

    //-----------------aspectRatioThumbnail
    tmpImage = [[UIImage alloc] initWithCGImage:self.asset.aspectRatioThumbnail];
    orientation = [NSNumber numberWithInt:tmpImage.imageOrientation];

    printf("------------ aspectRatioThumbnail   %f  %f orientation: %d\n", tmpImage.size.width, tmpImage.size.height, [orientation integerValue]);
    ciImage = [CIImage imageWithCGImage:tmpImage.CGImage];
    if (ciImage == nil) printf("----------!!!aspectRatioThumbnail: ciImage is nil    \n");

    imageOptions = [NSDictionary dictionaryWithObjectsAndKeys:orientation, CIDetectorImageOrientation,
                    CIDetectorAccuracyHigh, CIDetectorAccuracy, nil];

    if (useImageOptions) {
        array = [faceDetector featuresInImage:ciImage];
    } else {
        array = [faceDetector featuresInImage:ciImage options:imageOptions];
    }

    featuresDictionary = [self convertFeaturesToDictionary:array];
    NSLog(@"aspectRatioThumbnail: features %@", featuresDictionary);

   //-----------------fullScreenImage
    tmpImage = [[UIImage alloc] initWithCGImage:self.asset.defaultRepresentation.fullScreenImage];
    orientation = [NSNumber numberWithInt:tmpImage.imageOrientation];
    printf("------------ fullScreenImage   %f  %f orientation: %d\n", tmpImage.size.width, tmpImage.size.height, [orientation integerValue]);

    ciImage = [CIImage imageWithCGImage:tmpImage.CGImage];
    if (ciImage == nil) printf("----------!!!fullScreenImage: ciImage is nil    \n");

    imageOptions = [NSDictionary dictionaryWithObjectsAndKeys:orientation, CIDetectorImageOrientation,
                    CIDetectorAccuracyHigh, CIDetectorAccuracy, nil];

    if (useImageOptions) {
        array = [faceDetector featuresInImage:ciImage];
    } else {
        array = [faceDetector featuresInImage:ciImage options:imageOptions];
    }

    featuresDictionary = [self convertFeaturesToDictionary:array];
    NSLog(@"fullScreenImage: features %@", featuresDictionary);

    //-----------------fullResolutionImage
    tmpImage = [[UIImage alloc] initWithCGImage:self.asset.defaultRepresentation.fullResolutionImage];
    orientation = [NSNumber numberWithInt:tmpImage.imageOrientation];

    printf("------------ fullResolutionImage   %f  %f orientation: %d\n", tmpImage.size.width, tmpImage.size.height, [orientation integerValue]);

    ciImage = [CIImage imageWithCGImage:tmpImage.CGImage];
    if (ciImage == nil) printf("----------!!!fullResolutionImage: ciImage is nil    \n");

    imageOptions = [NSDictionary dictionaryWithObjectsAndKeys:orientation, CIDetectorImageOrientation,
                    CIDetectorAccuracyHigh, CIDetectorAccuracy, nil];

    if (useImageOptions) {
        array = [faceDetector featuresInImage:ciImage];
    } else {
        array = [faceDetector featuresInImage:ciImage options:imageOptions];
    }

    featuresDictionary = [self convertFeaturesToDictionary:array];
    NSLog(@"fullResolutionImage: features %@", featuresDictionary);

}







- (NSMutableDictionary*)convertFeaturesToDictionary:(NSArray*)foundFaces
{
    NSMutableDictionary * faceFeatures = [[NSMutableDictionary alloc] init];

    if (foundFaces.count) {

        CIFaceFeature *face = [foundFaces objectAtIndex:0];
        NSNumber* hasMouthPosition = [NSNumber numberWithBool:face.hasMouthPosition];
        NSNumber* hasLeftEyePosition = [NSNumber numberWithBool:face.hasLeftEyePosition];
        NSNumber* hasRightEyePosition = [NSNumber numberWithBool:face.hasRightEyePosition];

        [faceFeatures setValue:hasMouthPosition forKey:@"hasMouthPosition"];
        [faceFeatures setValue:hasLeftEyePosition forKey:@"hasLeftEyePosition"];
        [faceFeatures setValue:hasRightEyePosition forKey:@"hasRightEyePosition"];

        NSString * boundRect = NSStringFromCGRect(face.bounds);
       // NSLog(@"------------boundRect %@", boundRect);
        [faceFeatures setValue:boundRect forKey:@"bounds"];

        if (hasMouthPosition){
            NSString * mouthPosition = NSStringFromCGPoint(face.mouthPosition);
            [faceFeatures setValue:mouthPosition forKey:@"mouthPosition"];
        }

        if (hasLeftEyePosition){
            NSString * leftEyePosition = NSStringFromCGPoint(face.leftEyePosition);
            [faceFeatures setValue:leftEyePosition forKey:@"leftEyePosition"];
        }

        if (hasRightEyePosition){
            NSString * rightEyePosition = NSStringFromCGPoint(face.rightEyePosition);
            [faceFeatures setValue:rightEyePosition forKey:@"rightEyePosition"];
        }

    }
    return faceFeatures;
}
4

1 に答える 1