3

プロジェクトにopencvをインストールするために次のリンクを使用しますが、ターミナルでコマンドを生成する方法がわからないので、誰か助けてもらえますか?? http://aptogo.co.uk/2011/09/opencv-framework-for-ios/

4

6 に答える 6

18

iOS で OpenCV を使用する場合は、OpenCV が提供する公式フレームワークを使用する必要があります (バージョン 2.4.2 以降)。

ここで最新バージョンを入手してください: OpenCV for iOS、プロジェクトにドロップし、これをプロジェクトのプレフィックスに含めます。

ExampleApp-Prefix.pch:

#ifdef __cplusplus
    #import <opencv2/opencv.hpp>
#endif

OpenCV で使用するには、UIImage を cv::Mat に「変換」する必要もあります。

UIImageCVMatConverter.h:

//
//  UIImageCVMatConverter.h
//

#import <Foundation/Foundation.h>

@interface UIImageCVMatConverter : NSObject {

}

+ (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat;
+ (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat withUIImage:(UIImage*)image;
+ (cv::Mat)cvMatFromUIImage:(UIImage *)image;
+ (cv::Mat)cvMatGrayFromUIImage:(UIImage *)image;
+ (UIImage *)scaleAndRotateImageFrontCamera:(UIImage *)image;
+ (UIImage *)scaleAndRotateImageBackCamera:(UIImage *)image;

@end

UIImageCVMatConverter.mm:

//
//  UIImageCVMatConverter.mm
//

#import "UIImageCVMatConverter.h"

@implementation UIImageCVMatConverter

+ (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat withUIImage:(UIImage*)image;
{
  CGColorSpaceRef colorSpace = CGImageGetColorSpace( image.CGImage );
    CGFloat cols = image.size.width;
    CGFloat rows = image.size.height;
    CGFloat widthStep = image.size.width;
    CGContextRef contextRef = CGBitmapContextCreate( NULL, cols, rows, 8, widthStep*4, colorSpace, kCGImageAlphaNoneSkipLast | kCGBitmapByteOrderDefault );
    CGContextDrawImage( contextRef, CGRectMake(0, 0, cols, rows), image.CGImage );
    CGContextSetRGBStrokeColor( contextRef, 1, 0, 0, 1 );
    CGImageRef cgImage = CGBitmapContextCreateImage( contextRef );
    UIImage* result = [UIImage imageWithCGImage:cgImage];
    CGImageRelease( cgImage );
    CGContextRelease( contextRef );
    CGColorSpaceRelease( colorSpace );
    return result;
}

+(UIImage *)UIImageFromCVMat:(cv::Mat)cvMat
{
    NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()];
    CGColorSpaceRef colorSpace;
    if ( cvMat.elemSize() == 1 ) {
        colorSpace = CGColorSpaceCreateDeviceGray();
    }
    else {
        colorSpace = CGColorSpaceCreateDeviceRGB();
    }
    CGDataProviderRef provider = CGDataProviderCreateWithCFData( (__bridge CFDataRef)data );
    CGImageRef imageRef = CGImageCreate( cvMat.cols, cvMat.rows, 8, 8 * cvMat.elemSize(), cvMat.step[0], colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault, provider, NULL, false, kCGRenderingIntentDefault );
    UIImage *finalImage = [UIImage imageWithCGImage:imageRef];
    CGImageRelease( imageRef );
    CGDataProviderRelease( provider );
    CGColorSpaceRelease( colorSpace );
    return finalImage;
}

+ (cv::Mat)cvMatFromUIImage:(UIImage *)image
{
    CGColorSpaceRef colorSpace = CGImageGetColorSpace( image.CGImage );
    CGFloat cols = image.size.width;
    CGFloat rows = image.size.height;
    cv::Mat cvMat( rows, cols, CV_8UC4 );
    CGContextRef contextRef = CGBitmapContextCreate( cvMat.data, cols, rows, 8, cvMat.step[0], colorSpace, kCGImageAlphaNoneSkipLast | kCGBitmapByteOrderDefault );
    CGContextDrawImage( contextRef, CGRectMake(0, 0, cols, rows), image.CGImage );
    CGContextRelease( contextRef );
    CGColorSpaceRelease( colorSpace );
    return cvMat;
}

+ (cv::Mat)cvMatGrayFromUIImage:(UIImage *)image
{
  cv::Mat cvMat = [UIImageCVMatConverter cvMatFromUIImage:image];
  cv::Mat grayMat;
    if ( cvMat.channels() == 1 ) {
        grayMat = cvMat;
  }
    else {
        grayMat = cv :: Mat( cvMat.rows,cvMat.cols, CV_8UC1 );
        cv::cvtColor( cvMat, grayMat, CV_BGR2GRAY );
    }
  return grayMat;
}

+ (UIImage *)scaleAndRotateImageBackCamera:(UIImage *)image
{
  static int kMaxResolution = 640;
  CGImageRef imgRef = image.CGImage;
  CGFloat width = CGImageGetWidth( imgRef );
  CGFloat height = CGImageGetHeight( imgRef );
  CGAffineTransform transform = CGAffineTransformIdentity;
  CGRect bounds = CGRectMake( 0, 0, width, height );
  if ( width > kMaxResolution || height > kMaxResolution ) {
    CGFloat ratio = width/height;
    if ( ratio > 1 ) {
      bounds.size.width = kMaxResolution;
      bounds.size.height = bounds.size.width / ratio;
    }
        else {
      bounds.size.height = kMaxResolution;
      bounds.size.width = bounds.size.height * ratio;
    }
  }
  CGFloat scaleRatio = bounds.size.width / width;
  CGSize imageSize = CGSizeMake( CGImageGetWidth(imgRef), CGImageGetHeight(imgRef) );
  CGFloat boundHeight;
  UIImageOrientation orient = image.imageOrientation;
  switch( orient ) {
    case UIImageOrientationUp:
      transform = CGAffineTransformIdentity;
      break;
    case UIImageOrientationUpMirrored:
      transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
      transform = CGAffineTransformScale(transform, -1.0, 1.0);
      break;
    case UIImageOrientationDown:
      transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
      transform = CGAffineTransformRotate(transform, M_PI);
      break;
    case UIImageOrientationDownMirrored:
      transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
      transform = CGAffineTransformScale(transform, 1.0, -1.0);
      break;
    case UIImageOrientationLeftMirrored:
      boundHeight = bounds.size.height;
      bounds.size.height = bounds.size.width;
      bounds.size.width = boundHeight;
      transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
      transform = CGAffineTransformScale(transform, -1.0, 1.0);
      transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
      break;
    case UIImageOrientationLeft:
      boundHeight = bounds.size.height;
      bounds.size.height = bounds.size.width;
      bounds.size.width = boundHeight;
      transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
      transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
      break;
    case UIImageOrientationRightMirrored:
      boundHeight = bounds.size.height;
      bounds.size.height = bounds.size.width;
      bounds.size.width = boundHeight;
      transform = CGAffineTransformMakeScale(-1.0, 1.0);
      transform = CGAffineTransformRotate(transform, M_PI / 2.0);
      break;
    case UIImageOrientationRight:
      boundHeight = bounds.size.height;
      bounds.size.height = bounds.size.width;
      bounds.size.width = boundHeight;
      transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0);
      transform = CGAffineTransformRotate(transform, M_PI / 2.0);
      break;
    default:
      [NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"];
  }
  UIGraphicsBeginImageContext( bounds.size );
  CGContextRef context = UIGraphicsGetCurrentContext();
  if ( orient == UIImageOrientationRight || orient == UIImageOrientationLeft ) {
    CGContextScaleCTM( context, -scaleRatio, scaleRatio );
    CGContextTranslateCTM( context, -height, 0 );
  }
    else {
    CGContextScaleCTM( context, scaleRatio, -scaleRatio );
    CGContextTranslateCTM( context, 0, -height );
  }
  CGContextConcatCTM( context, transform );
  CGContextDrawImage( UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef );
  UIImage *returnImage = UIGraphicsGetImageFromCurrentImageContext();
  UIGraphicsEndImageContext();
  return returnImage;
}

+ (UIImage *)scaleAndRotateImageFrontCamera:(UIImage *)image
{
  static int kMaxResolution = 640;
  CGImageRef imgRef = image.CGImage;
  CGFloat width = CGImageGetWidth(imgRef);
  CGFloat height = CGImageGetHeight(imgRef);
  CGAffineTransform transform = CGAffineTransformIdentity;
  CGRect bounds = CGRectMake( 0, 0, width, height);
  if (width > kMaxResolution || height > kMaxResolution) {
    CGFloat ratio = width/height;
    if (ratio > 1) {
      bounds.size.width = kMaxResolution;
      bounds.size.height = bounds.size.width / ratio;
    } else {
      bounds.size.height = kMaxResolution;
      bounds.size.width = bounds.size.height * ratio;
    }
  }

  CGFloat scaleRatio = bounds.size.width / width;
  CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef));
  CGFloat boundHeight;
  UIImageOrientation orient = image.imageOrientation;
  switch(orient) {
    case UIImageOrientationUp:
      transform = CGAffineTransformIdentity;
      break;
    case UIImageOrientationUpMirrored:
      transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
      transform = CGAffineTransformScale(transform, -1.0, 1.0);
      break;
    case UIImageOrientationDown:
      transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
      transform = CGAffineTransformRotate(transform, M_PI);
      break;
    case UIImageOrientationDownMirrored:
      transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
      transform = CGAffineTransformScale(transform, 1.0, -1.0);
      break;
    case UIImageOrientationLeftMirrored:
      boundHeight = bounds.size.height;
      bounds.size.height = bounds.size.width;
      bounds.size.width = boundHeight;
      transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
      transform = CGAffineTransformScale(transform, -1.0, 1.0);
      transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
      break;
    case UIImageOrientationLeft:
      boundHeight = bounds.size.height;
      bounds.size.height = bounds.size.width;
      bounds.size.width = boundHeight;
      transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
      transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
      break;
        case UIImageOrientationRight:
    case UIImageOrientationRightMirrored:
      boundHeight = bounds.size.height;
      bounds.size.height = bounds.size.width;
      bounds.size.width = boundHeight;
      transform = CGAffineTransformMakeScale(-1.0, 1.0);
      transform = CGAffineTransformRotate(transform, M_PI / 2.0);
      break;
        default:
      [NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"];
  }
  UIGraphicsBeginImageContext( bounds.size );
  CGContextRef context = UIGraphicsGetCurrentContext();
  if ( orient == UIImageOrientationRight || orient == UIImageOrientationLeft ) {
    CGContextScaleCTM(context, -scaleRatio, scaleRatio);
    CGContextTranslateCTM(context, -height, 0);
  }
    else {
    CGContextScaleCTM(context, scaleRatio, -scaleRatio);
    CGContextTranslateCTM(context, 0, -height);
  }
  CGContextConcatCTM( context, transform );
  CGContextDrawImage( UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef );
  UIImage *returnImage = UIGraphicsGetImageFromCurrentImageContext();
  UIGraphicsEndImageContext();
  return returnImage;
}

@end

ビュー コントローラーの実装ファイルの名前を *.mm に変更します

MyViewController.m -> MyViewController.mm

そして、 View Controller にUIImageCVMatConverterをインポートします。

#import "UIImageCVMatConverter.h"

これで、View Controller 内で Objective-C と C++ OpenCV コードを混在させることができます。

cv::Mat img = [UIImageCVMatConverter cvMatFromUIImage:[UIImage imageNamed:@"my_image.png"]];
...

楽しむ!

于 2012-08-31T13:40:10.100 に答える
2

@moosgummiが言うように、@ Nimsは機能しますが、次の手順も実行しました。

  • ライブラリlibc++.dylibを追加します
  • 「ビルド設定」-「Apple LLVM コンパイラ XX - 言語」-「ソースのコンパイル」-Object -C++
于 2013-03-02T11:54:12.977 に答える
1

これらすべてのクラス メソッドを記述することも、単純に ios.h ファイルを含めることもできます。画像処理用に既に記述された 2 つのメソッドがあります。

これが私のコードです。

すべてのコメントで申し訳ありませんが、私の研究の進捗状況を示すためにそれらを含めます。

#import "JmBViewController.h"

@interface JmBViewController ()

@end

@implementation JmBViewController

- (void)viewDidLoad {
[super viewDidLoad];
_imgtest = [UIImage imageNamed:@"IMG_0424.PNG"];

cv::Mat cvImage;
UIImageToMat(_imgtest, cvImage);
if (!cvImage.empty()) {
    cv::Mat gray;
  //  cv::Mat filteredMat;
    cv::cvtColor(cvImage, gray, CV_BGRA2GRAY);
   // cv::GaussianBlur(gray, gray, cv::Size(5, 5), 1.2, 1.2);
    cv::vector<cv::Vec3f> circles;

    /*
    for(size_t i = 0; i < circles.size(); i++)
    {
        cv::Point center((cvRound(circles[i][0]), cvRound(circles[i][1])));
        int radius = cvRound(circles[i][2]);
        cv::circle(gray, center, 3, cv::Scalar(0,255,0));
        cv::circle(gray, center, radius, cv::Scalar(0,0,255));
    }
   */

 //  for ( int i = 1; i < 15; i = i + 2 )

        cv::GaussianBlur(gray, gray, cv::Size(9, 9), 1.5, 1.5);

        cv::Mat edges;
        cv::Canny(gray, edges, 0, 50);
        //gray.setTo(cv::Scalar::all(0));
        //gray.setTo(cv::Scalar::all(255), edges);
        cv::HoughCircles(gray, circles, CV_HOUGH_GRADIENT, 1, 30, 50, 20, 10, 25);
        for(size_t i = 0; i < circles.size(); i++)
        {
            cv::Point center(cvRound(circles[i][0]), cvRound(circles[i][1]));
            int radius = cvRound(circles[i][2]);
            cv::circle(cvImage, center, 5, cv::Scalar::all(200), -1, 8, 0 );//center
            cv::circle(cvImage, center, radius, cv::Scalar::all(255), 3, 8, 0 );//diamter
        NSLog(@"Circles: %ld", i+1);

       // cv::imshow(&"circles i " [ i], gray);
    }


    _imgView.image = MatToUIImage(cvImage);
    }
    /*
cv::Mat cvImage;
cv::Mat grey;
cv::Mat filteredMat;
cv::vector<cv::Vec3f> circles;
// cv::cvtColor(_imgtest, cvImage, CV_BGR2GRAY);
cv::threshold(grey, filteredMat, 100, 255, CV_THRESH_BINARY);
[UIImageCVMatConverter cvMatGrayFromUIImage:_imgtest];
//  cv::cvtColor(cvImage, grey, CV_RGBA2GRAY);
// UIImageToMat(_imgtest, cvImage);
cv::HoughCircles(cvImage, circles, CV_HOUGH_GRADIENT, 1, 50);
//  MatToUIImage(cvImage);
_imgView.image = [UIImageCVMatConverter UIImageFromCVMat:cvImage];
_imgView.image = MatToUIImage(cvImage);
*/

// Do any additional setup after loading the view, typically from a nib.
}

- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
/*

UIImage* MatToUIImage(const cv::Mat& image) {
NSData *data = [NSData dataWithBytes:image.data length:image.elemSize()*image.total()];
CGColorSpaceRef colorSpace;
if (image.elemSize() == 1) {
    colorSpace = CGColorSpaceCreateDeviceGray();
}else { colorSpace = CGColorSpaceCreateDeviceRGB();
 }
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);

CGImageRef imageRef = CGImageCreate(image.cols, image.rows, 8, 8*image.elemSize(), image.step.p[0], colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault, provider, NULL, FALSE, kCGRenderingIntentDefault);
UIImage *finalImage = [UIImage imageWithCGImage:imageRef];

return finalImage;
 }
 */


@end

これが役立つことを願っています!

Viewer ヘッダー ファイルに含まれるすべての #include を次に示します。

#import <UIKit/UIKit.h>
//  #import "UIImageCVMatConverter.h"
#import <opencv2/highgui/highgui_c.h>
#import <opencv2/highgui/highgui.hpp>
#import <opencv2/imgproc/imgproc_c.h>
#import <opencv2/imgproc/imgproc.hpp>
#import <opencv2/highgui/ios.h>
#import <opencv2/core/core_c.h>
#import <opencv2/core/core.hpp>

@interface JmBViewController : UIViewController
@property (weak, nonatomic) IBOutlet UIImageView *imgView;
@property (weak, nonatomic) UIImage *imgtest;

@end

独自のフレームワークをコンパイルまたは作成する必要はありません。必要なバージョンを opencv の Web サイトからダウンロードし、それをフレームワークの下のプロジェクトにドラッグします。iOS を使用している場合は、Xcode から確認を求められたら、必ず「すべてのファイルをコピー先にコピー」してください。 . これは、すべての端末コマンドや cMake のナンセンスを使わずに、プロジェクトにフレームワークを含めることができる、私が見つけた最も簡単な方法です。

于 2013-11-28T14:46:34.633 に答える
0

リンクに記載されているように、次の場所にあるターミナル アプリケーションを開いてください。

/アプリケーション/ユーティリティ/Terminal.app

Mac システムで、前述のコマンドを実行します。

于 2012-08-31T12:32:22.623 に答える
0

すべての .m ファイルを .mm ファイルに変換することを忘れないでください。

于 2013-12-12T08:49:38.177 に答える
0

macports を使用して openCV ライブラリをダウンロードします。以下のリンクの指示に従ってください。

https://www.dropbox.com/s/foipmm7q9n8aaht/How%20to%20get%20OpenCV%20working%20under%20Mac%20OS%20X%20Lion%20with%20XCode%204.1%20%C2%AB%20Salem%27s% 20Log.pdf

于 2012-08-31T12:47:40.373 に答える