1

実際の iPhone で OpenCV ライブラリを使用して画像をトリミングする際に問題があります。

選択した領域を持つ画像があり、この領域を持つ画像に透視変換を適用したいと考えています。これはシミュレーターでは正常に機能しますが、iPhone では新しい画像が四角形にマップされず、新しい画像も青色になります。

これが私のコードです:

+ (IplImage *)CreateIplImageFromUIImage:(UIImage *)image {
    // Getting CGImage from UIImage
    CGImageRef imageRef = image.CGImage;

    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    // Creating temporal IplImage for drawing
    IplImage *iplimage = cvCreateImage(
                                       cvSize(image.size.width,image.size.height), IPL_DEPTH_8U, 4
                                       );
    // Creating CGContext for temporal IplImage
    CGContextRef contextRef = CGBitmapContextCreate(
                                                    iplimage->imageData, iplimage->width, iplimage->height,
                                                    iplimage->depth, iplimage->widthStep,
                                                    colorSpace, kCGImageAlphaPremultipliedLast|kCGBitmapByteOrderDefault
                                                    );
    // Drawing CGImage to CGContext
    CGContextDrawImage(
                       contextRef,
                       CGRectMake(0, 0, image.size.width, image.size.height),
                       imageRef
                       );
    CGContextRelease(contextRef);
    CGColorSpaceRelease(colorSpace);

    // Creating result IplImage
    IplImage *ret = cvCreateImage(cvGetSize(iplimage), IPL_DEPTH_8U, 3);
    cvCvtColor(iplimage, ret, CV_RGBA2BGR);
    cvReleaseImage(&iplimage);

    return ret;
}

+ (UIImage *)UIImageFromIplImage:(IplImage *)img {
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

    // Creating result IplImage
    IplImage *image = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 4);
    cvCvtColor(img, image, CV_BGR2RGBA);

    // Allocating the buffer for CGImage
    NSData *data =
    [NSData dataWithBytes:image->imageData length:image->imageSize];
    CGDataProviderRef provider =
    CGDataProviderCreateWithCFData((CFDataRef)data);
    // Creating CGImage from chunk of IplImage
    CGImageRef imageRef = CGImageCreate(
                                        image->width, image->height,
                                        image->depth, image->depth * image->nChannels, image->widthStep,
                                        colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault,
                                        provider, NULL, false, kCGRenderingIntentDefault
                                        );

    // Getting UIImage from CGImage
    UIImage *ret = [UIImage imageWithCGImage:imageRef];
    CGImageRelease(imageRef);
    CGDataProviderRelease(provider);
    CGColorSpaceRelease(colorSpace);

    return ret;
}

+ (UIImage *)perspectiveTransform: (UIImage*) originalImage :(CGFloat) h :(CGFloat) w :(CGPoint) point1 :(CGPoint) point2 :(CGPoint) point3 :(CGPoint) point4
{

    CvPoint2D32f srcQuad[4];

    srcQuad[0].x = point1.x;
    srcQuad[0].y = point1.y;
    srcQuad[1].x = point2.x;
    srcQuad[1].y = point2.y;
    srcQuad[2].x = point3.x;
    srcQuad[2].y = point3.y;
    srcQuad[3].x = point4.x;
    srcQuad[3].y = point4.y;

    IplImage *src = [self CreateIplImageFromUIImage:originalImage];

    IplImage *dst = cvCreateImage(cvGetSize(src),
                                  src->depth,
                                  src->nChannels);

    cvZero(dst);

    CGFloat width = src->width;
    CGFloat height = src->height;

    CvMat* mmat = cvCreateMat(3, 3, CV_32FC1);

    CvPoint2D32f *c1 = (CvPoint2D32f *)malloc(4 * sizeof(CvPoint2D32f));
    CvPoint2D32f *c2 = (CvPoint2D32f *)malloc(4 * sizeof(CvPoint2D32f));

    c1[0].x = round((width/w)*srcQuad[0].x);   c1[0].y = round((height/h)*srcQuad[0].y);
    c1[1].x = round((width/w)*srcQuad[1].x);   c1[1].y = round((height/h)*srcQuad[1].y);
    c1[2].x = round((width/w)*srcQuad[2].x);   c1[2].y = round((height/h)*srcQuad[2].y);
    c1[3].x = round((width/w)*srcQuad[3].x);   c1[3].y = round((height/h)*srcQuad[3].y);

    c2[0].x = 0;            c2[0].y = 0;
    c2[1].x = width - 1;    c2[1].y = 0;
    c2[2].x = 0;            c2[2].y = height - 1;
    c2[3].x = width - 1;    c2[3].y = height - 1;

    mmat = cvGetPerspectiveTransform(c1, c2, mmat);
    free(c1);
    free(c2);

    cvWarpPerspective(src, dst, mmat, CV_INTER_LINEAR+CV_WARP_FILL_OUTLIERS, cvScalarAll(0));

    cvReleaseImage(&src);
    cvReleaseMat(&mmat);

    UIImage *newImage = [self UIImageFromIplImage:dst];
    cvReleaseImage(&dst);

    return newImage;
}

ご協力いただきありがとうございます !

4

0 に答える 0