3

我使用波纹管链接在我的项目中安装 opencv 但是我们如何在终端中生成命令我不知道有人可以帮助我吗? http://aptogo.co.uk/2011/09/opencv-framework-for-ios/

4

6 回答 6

18

如果你想在 iOS 上使用 OpenCV,你应该使用 OpenCV 提供的官方框架(从 2.4.2 版本开始)。

在此处获取最新版本:OpenCV for iOS,将其放入您的项目中并将其包含在您的项目前缀中:

ExampleApp-Prefix.pch:

#ifdef __cplusplus
    #import <opencv2/opencv.hpp>
#endif

您还必须将 UIImage“转换”为 cv::Mat 才能将其与 OpenCV 一起使用。

UIImageCVmatConverter.h:

//
//  UIImageCVMatConverter.h
//

#import <Foundation/Foundation.h>

@interface UIImageCVMatConverter : NSObject {

}

+ (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat;
+ (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat withUIImage:(UIImage*)image;
+ (cv::Mat)cvMatFromUIImage:(UIImage *)image;
+ (cv::Mat)cvMatGrayFromUIImage:(UIImage *)image;
+ (UIImage *)scaleAndRotateImageFrontCamera:(UIImage *)image;
+ (UIImage *)scaleAndRotateImageBackCamera:(UIImage *)image;

@end

UIImageCVmatConverter.mm:

//
//  UIImageCVMatConverter.mm
//

#import "UIImageCVMatConverter.h"

@implementation UIImageCVMatConverter

+ (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat withUIImage:(UIImage*)image;
{
  CGColorSpaceRef colorSpace = CGImageGetColorSpace( image.CGImage );
    CGFloat cols = image.size.width;
    CGFloat rows = image.size.height;
    CGFloat widthStep = image.size.width;
    CGContextRef contextRef = CGBitmapContextCreate( NULL, cols, rows, 8, widthStep*4, colorSpace, kCGImageAlphaNoneSkipLast | kCGBitmapByteOrderDefault );
    CGContextDrawImage( contextRef, CGRectMake(0, 0, cols, rows), image.CGImage );
    CGContextSetRGBStrokeColor( contextRef, 1, 0, 0, 1 );
    CGImageRef cgImage = CGBitmapContextCreateImage( contextRef );
    UIImage* result = [UIImage imageWithCGImage:cgImage];
    CGImageRelease( cgImage );
    CGContextRelease( contextRef );
    CGColorSpaceRelease( colorSpace );
    return result;
}

+(UIImage *)UIImageFromCVMat:(cv::Mat)cvMat
{
    NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()];
    CGColorSpaceRef colorSpace;
    if ( cvMat.elemSize() == 1 ) {
        colorSpace = CGColorSpaceCreateDeviceGray();
    }
    else {
        colorSpace = CGColorSpaceCreateDeviceRGB();
    }
    CGDataProviderRef provider = CGDataProviderCreateWithCFData( (__bridge CFDataRef)data );
    CGImageRef imageRef = CGImageCreate( cvMat.cols, cvMat.rows, 8, 8 * cvMat.elemSize(), cvMat.step[0], colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault, provider, NULL, false, kCGRenderingIntentDefault );
    UIImage *finalImage = [UIImage imageWithCGImage:imageRef];
    CGImageRelease( imageRef );
    CGDataProviderRelease( provider );
    CGColorSpaceRelease( colorSpace );
    return finalImage;
}

+ (cv::Mat)cvMatFromUIImage:(UIImage *)image
{
    CGColorSpaceRef colorSpace = CGImageGetColorSpace( image.CGImage );
    CGFloat cols = image.size.width;
    CGFloat rows = image.size.height;
    cv::Mat cvMat( rows, cols, CV_8UC4 );
    CGContextRef contextRef = CGBitmapContextCreate( cvMat.data, cols, rows, 8, cvMat.step[0], colorSpace, kCGImageAlphaNoneSkipLast | kCGBitmapByteOrderDefault );
    CGContextDrawImage( contextRef, CGRectMake(0, 0, cols, rows), image.CGImage );
    CGContextRelease( contextRef );
    CGColorSpaceRelease( colorSpace );
    return cvMat;
}

+ (cv::Mat)cvMatGrayFromUIImage:(UIImage *)image
{
  cv::Mat cvMat = [UIImageCVMatConverter cvMatFromUIImage:image];
  cv::Mat grayMat;
    if ( cvMat.channels() == 1 ) {
        grayMat = cvMat;
  }
    else {
        grayMat = cv :: Mat( cvMat.rows,cvMat.cols, CV_8UC1 );
        cv::cvtColor( cvMat, grayMat, CV_BGR2GRAY );
    }
  return grayMat;
}

+ (UIImage *)scaleAndRotateImageBackCamera:(UIImage *)image
{
  static int kMaxResolution = 640;
  CGImageRef imgRef = image.CGImage;
  CGFloat width = CGImageGetWidth( imgRef );
  CGFloat height = CGImageGetHeight( imgRef );
  CGAffineTransform transform = CGAffineTransformIdentity;
  CGRect bounds = CGRectMake( 0, 0, width, height );
  if ( width > kMaxResolution || height > kMaxResolution ) {
    CGFloat ratio = width/height;
    if ( ratio > 1 ) {
      bounds.size.width = kMaxResolution;
      bounds.size.height = bounds.size.width / ratio;
    }
        else {
      bounds.size.height = kMaxResolution;
      bounds.size.width = bounds.size.height * ratio;
    }
  }
  CGFloat scaleRatio = bounds.size.width / width;
  CGSize imageSize = CGSizeMake( CGImageGetWidth(imgRef), CGImageGetHeight(imgRef) );
  CGFloat boundHeight;
  UIImageOrientation orient = image.imageOrientation;
  switch( orient ) {
    case UIImageOrientationUp:
      transform = CGAffineTransformIdentity;
      break;
    case UIImageOrientationUpMirrored:
      transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
      transform = CGAffineTransformScale(transform, -1.0, 1.0);
      break;
    case UIImageOrientationDown:
      transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
      transform = CGAffineTransformRotate(transform, M_PI);
      break;
    case UIImageOrientationDownMirrored:
      transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
      transform = CGAffineTransformScale(transform, 1.0, -1.0);
      break;
    case UIImageOrientationLeftMirrored:
      boundHeight = bounds.size.height;
      bounds.size.height = bounds.size.width;
      bounds.size.width = boundHeight;
      transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
      transform = CGAffineTransformScale(transform, -1.0, 1.0);
      transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
      break;
    case UIImageOrientationLeft:
      boundHeight = bounds.size.height;
      bounds.size.height = bounds.size.width;
      bounds.size.width = boundHeight;
      transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
      transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
      break;
    case UIImageOrientationRightMirrored:
      boundHeight = bounds.size.height;
      bounds.size.height = bounds.size.width;
      bounds.size.width = boundHeight;
      transform = CGAffineTransformMakeScale(-1.0, 1.0);
      transform = CGAffineTransformRotate(transform, M_PI / 2.0);
      break;
    case UIImageOrientationRight:
      boundHeight = bounds.size.height;
      bounds.size.height = bounds.size.width;
      bounds.size.width = boundHeight;
      transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0);
      transform = CGAffineTransformRotate(transform, M_PI / 2.0);
      break;
    default:
      [NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"];
  }
  UIGraphicsBeginImageContext( bounds.size );
  CGContextRef context = UIGraphicsGetCurrentContext();
  if ( orient == UIImageOrientationRight || orient == UIImageOrientationLeft ) {
    CGContextScaleCTM( context, -scaleRatio, scaleRatio );
    CGContextTranslateCTM( context, -height, 0 );
  }
    else {
    CGContextScaleCTM( context, scaleRatio, -scaleRatio );
    CGContextTranslateCTM( context, 0, -height );
  }
  CGContextConcatCTM( context, transform );
  CGContextDrawImage( UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef );
  UIImage *returnImage = UIGraphicsGetImageFromCurrentImageContext();
  UIGraphicsEndImageContext();
  return returnImage;
}

+ (UIImage *)scaleAndRotateImageFrontCamera:(UIImage *)image
{
  static int kMaxResolution = 640;
  CGImageRef imgRef = image.CGImage;
  CGFloat width = CGImageGetWidth(imgRef);
  CGFloat height = CGImageGetHeight(imgRef);
  CGAffineTransform transform = CGAffineTransformIdentity;
  CGRect bounds = CGRectMake( 0, 0, width, height);
  if (width > kMaxResolution || height > kMaxResolution) {
    CGFloat ratio = width/height;
    if (ratio > 1) {
      bounds.size.width = kMaxResolution;
      bounds.size.height = bounds.size.width / ratio;
    } else {
      bounds.size.height = kMaxResolution;
      bounds.size.width = bounds.size.height * ratio;
    }
  }

  CGFloat scaleRatio = bounds.size.width / width;
  CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef));
  CGFloat boundHeight;
  UIImageOrientation orient = image.imageOrientation;
  switch(orient) {
    case UIImageOrientationUp:
      transform = CGAffineTransformIdentity;
      break;
    case UIImageOrientationUpMirrored:
      transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
      transform = CGAffineTransformScale(transform, -1.0, 1.0);
      break;
    case UIImageOrientationDown:
      transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
      transform = CGAffineTransformRotate(transform, M_PI);
      break;
    case UIImageOrientationDownMirrored:
      transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
      transform = CGAffineTransformScale(transform, 1.0, -1.0);
      break;
    case UIImageOrientationLeftMirrored:
      boundHeight = bounds.size.height;
      bounds.size.height = bounds.size.width;
      bounds.size.width = boundHeight;
      transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
      transform = CGAffineTransformScale(transform, -1.0, 1.0);
      transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
      break;
    case UIImageOrientationLeft:
      boundHeight = bounds.size.height;
      bounds.size.height = bounds.size.width;
      bounds.size.width = boundHeight;
      transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
      transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
      break;
        case UIImageOrientationRight:
    case UIImageOrientationRightMirrored:
      boundHeight = bounds.size.height;
      bounds.size.height = bounds.size.width;
      bounds.size.width = boundHeight;
      transform = CGAffineTransformMakeScale(-1.0, 1.0);
      transform = CGAffineTransformRotate(transform, M_PI / 2.0);
      break;
        default:
      [NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"];
  }
  UIGraphicsBeginImageContext( bounds.size );
  CGContextRef context = UIGraphicsGetCurrentContext();
  if ( orient == UIImageOrientationRight || orient == UIImageOrientationLeft ) {
    CGContextScaleCTM(context, -scaleRatio, scaleRatio);
    CGContextTranslateCTM(context, -height, 0);
  }
    else {
    CGContextScaleCTM(context, scaleRatio, -scaleRatio);
    CGContextTranslateCTM(context, 0, -height);
  }
  CGContextConcatCTM( context, transform );
  CGContextDrawImage( UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef );
  UIImage *returnImage = UIGraphicsGetImageFromCurrentImageContext();
  UIGraphicsEndImageContext();
  return returnImage;
}

@end

将您的视图控制器实现文件重命名为 *.mm

MyViewController.m -> MyViewController.mm

并在您的视图控制器中导入UIImageCVmatConverter :

#import "UIImageCVMatConverter.h"

现在您可以在视图控制器中混合使用 Objective-C 和 C++ OpenCV 代码:

cv::Mat img = [UIImageCVMatConverter cvMatFromUIImage:[UIImage imageNamed:@"my_image.png"]];
...

玩得开心!

于 2012-08-31T13:40:10.100 回答
2

@Nims,正如@moosgummi 所说,它有效,但我也做了以下步骤:

  • 添加库libc++.dylib
  • 在“构建设置”-“Apple LLVM 编译器 XX-语言”-“编译源为” -Object-C++
于 2013-03-02T11:54:12.977 回答
1

您可以编写所有这些类方法,或者您可以简单地包含 ios.h 文件。它已经为图像处理编写了两种方法。

这是我的代码。

抱歉所有评论,我将它们包括在内以显示我的研究进展。

#import "JmBViewController.h"

@interface JmBViewController ()

@end

@implementation JmBViewController

- (void)viewDidLoad {
[super viewDidLoad];
_imgtest = [UIImage imageNamed:@"IMG_0424.PNG"];

cv::Mat cvImage;
UIImageToMat(_imgtest, cvImage);
if (!cvImage.empty()) {
    cv::Mat gray;
  //  cv::Mat filteredMat;
    cv::cvtColor(cvImage, gray, CV_BGRA2GRAY);
   // cv::GaussianBlur(gray, gray, cv::Size(5, 5), 1.2, 1.2);
    cv::vector<cv::Vec3f> circles;

    /*
    for(size_t i = 0; i < circles.size(); i++)
    {
        cv::Point center((cvRound(circles[i][0]), cvRound(circles[i][1])));
        int radius = cvRound(circles[i][2]);
        cv::circle(gray, center, 3, cv::Scalar(0,255,0));
        cv::circle(gray, center, radius, cv::Scalar(0,0,255));
    }
   */

 //  for ( int i = 1; i < 15; i = i + 2 )

        cv::GaussianBlur(gray, gray, cv::Size(9, 9), 1.5, 1.5);

        cv::Mat edges;
        cv::Canny(gray, edges, 0, 50);
        //gray.setTo(cv::Scalar::all(0));
        //gray.setTo(cv::Scalar::all(255), edges);
        cv::HoughCircles(gray, circles, CV_HOUGH_GRADIENT, 1, 30, 50, 20, 10, 25);
        for(size_t i = 0; i < circles.size(); i++)
        {
            cv::Point center(cvRound(circles[i][0]), cvRound(circles[i][1]));
            int radius = cvRound(circles[i][2]);
            cv::circle(cvImage, center, 5, cv::Scalar::all(200), -1, 8, 0 );//center
            cv::circle(cvImage, center, radius, cv::Scalar::all(255), 3, 8, 0 );//diamter
        NSLog(@"Circles: %ld", i+1);

       // cv::imshow(&"circles i " [ i], gray);
    }


    _imgView.image = MatToUIImage(cvImage);
    }
    /*
cv::Mat cvImage;
cv::Mat grey;
cv::Mat filteredMat;
cv::vector<cv::Vec3f> circles;
// cv::cvtColor(_imgtest, cvImage, CV_BGR2GRAY);
cv::threshold(grey, filteredMat, 100, 255, CV_THRESH_BINARY);
[UIImageCVMatConverter cvMatGrayFromUIImage:_imgtest];
//  cv::cvtColor(cvImage, grey, CV_RGBA2GRAY);
// UIImageToMat(_imgtest, cvImage);
cv::HoughCircles(cvImage, circles, CV_HOUGH_GRADIENT, 1, 50);
//  MatToUIImage(cvImage);
_imgView.image = [UIImageCVMatConverter UIImageFromCVMat:cvImage];
_imgView.image = MatToUIImage(cvImage);
*/

// Do any additional setup after loading the view, typically from a nib.
}

- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
/*

UIImage* MatToUIImage(const cv::Mat& image) {
NSData *data = [NSData dataWithBytes:image.data length:image.elemSize()*image.total()];
CGColorSpaceRef colorSpace;
if (image.elemSize() == 1) {
    colorSpace = CGColorSpaceCreateDeviceGray();
}else { colorSpace = CGColorSpaceCreateDeviceRGB();
 }
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);

CGImageRef imageRef = CGImageCreate(image.cols, image.rows, 8, 8*image.elemSize(), image.step.p[0], colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault, provider, NULL, FALSE, kCGRenderingIntentDefault);
UIImage *finalImage = [UIImage imageWithCGImage:imageRef];

return finalImage;
 }
 */


@end

我希望这有帮助!

这是查看器头文件中的所有#include。

#import <UIKit/UIKit.h>
//  #import "UIImageCVMatConverter.h"
#import <opencv2/highgui/highgui_c.h>
#import <opencv2/highgui/highgui.hpp>
#import <opencv2/imgproc/imgproc_c.h>
#import <opencv2/imgproc/imgproc.hpp>
#import <opencv2/highgui/ios.h>
#import <opencv2/core/core_c.h>
#import <opencv2/core/core.hpp>

@interface JmBViewController : UIViewController
@property (weak, nonatomic) IBOutlet UIImageView *imgView;
@property (weak, nonatomic) UIImage *imgtest;

@end

无需编译或制作自己的框架,只需从 opencv 的网站下载您想要的版本,将其拖到框架下的项目中,如果您使用的是 iOS,请确保在 Xcode 要求确认时“将所有文件复制到目标” . 这是我发现的最简单的方法,可以在没有所有终端命令和 cMake 废话的情况下将框架包含到您的项目中。

于 2013-11-28T14:46:34.633 回答
0

使用 macports 下载 openCV 库按​​照下面给出的链接中的说明进行操作

https://www.dropbox.com/s/foipmm7q9n8aaht/How%20to%20get%20OpenCV%20working%20under%20Mac%20OS%20X%20Lion%20with%20XCode%204.1%20%C2%AB%20Salem%27s% 20Log.pdf

于 2012-08-31T12:47:40.373 回答
0

如链接中所述,请打开位于的终端应用程序

/Applications/Utilities/Terminal.app

在您的 Mac 系统中并执行上述命令。

于 2012-08-31T12:32:22.623 回答
0

不要忘记将所有 .m 文件转换为 .mm 文件,否则将无法正常工作

于 2013-12-12T08:49:38.177 回答