OpenCV安装在xcode中

我在我的项目中使用bellow链接来安装opencv,但我们如何在terminal生成命令我不知道谁能帮助我? http://aptogo.co.uk/2011/09/opencv-framework-for-ios/

如果你想在iOS上使用OpenCV,你应该使用OpenCV提供的官方框架(从版本2.4.2开始)。

在这里获取最新版本: OpenCV for iOS ,将其放到您的项目中,并将其包含到项目前缀中:

ExampleApp中,Prefix.pch:

#ifdef __cplusplus #import <opencv2/opencv.hpp> #endif 

您还必须将UIImage“转换”为cv :: Mat,才能在OpenCV中使用它。

UIImageCVMatConverter.h:

 // // UIImageCVMatConverter.h // #import <Foundation/Foundation.h> @interface UIImageCVMatConverter : NSObject { } + (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat; + (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat withUIImage:(UIImage*)image; + (cv::Mat)cvMatFromUIImage:(UIImage *)image; + (cv::Mat)cvMatGrayFromUIImage:(UIImage *)image; + (UIImage *)scaleAndRotateImageFrontCamera:(UIImage *)image; + (UIImage *)scaleAndRotateImageBackCamera:(UIImage *)image; @end 

UIImageCVMatConverter.mm:

 // // UIImageCVMatConverter.mm // #import "UIImageCVMatConverter.h" @implementation UIImageCVMatConverter + (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat withUIImage:(UIImage*)image; { CGColorSpaceRef colorSpace = CGImageGetColorSpace( image.CGImage ); CGFloat cols = image.size.width; CGFloat rows = image.size.height; CGFloat widthStep = image.size.width; CGContextRef contextRef = CGBitmapContextCreate( NULL, cols, rows, 8, widthStep*4, colorSpace, kCGImageAlphaNoneSkipLast | kCGBitmapByteOrderDefault ); CGContextDrawImage( contextRef, CGRectMake(0, 0, cols, rows), image.CGImage ); CGContextSetRGBStrokeColor( contextRef, 1, 0, 0, 1 ); CGImageRef cgImage = CGBitmapContextCreateImage( contextRef ); UIImage* result = [UIImage imageWithCGImage:cgImage]; CGImageRelease( cgImage ); CGContextRelease( contextRef ); CGColorSpaceRelease( colorSpace ); return result; } +(UIImage *)UIImageFromCVMat:(cv::Mat)cvMat { NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()]; CGColorSpaceRef colorSpace; if ( cvMat.elemSize() == 1 ) { colorSpace = CGColorSpaceCreateDeviceGray(); } else { colorSpace = CGColorSpaceCreateDeviceRGB(); } CGDataProviderRef provider = CGDataProviderCreateWithCFData( (__bridge CFDataRef)data ); CGImageRef imageRef = CGImageCreate( cvMat.cols, cvMat.rows, 8, 8 * cvMat.elemSize(), cvMat.step[0], colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault, provider, NULL, false, kCGRenderingIntentDefault ); UIImage *finalImage = [UIImage imageWithCGImage:imageRef]; CGImageRelease( imageRef ); CGDataProviderRelease( provider ); CGColorSpaceRelease( colorSpace ); return finalImage; } + (cv::Mat)cvMatFromUIImage:(UIImage *)image { CGColorSpaceRef colorSpace = CGImageGetColorSpace( image.CGImage ); CGFloat cols = image.size.width; CGFloat rows = image.size.height; cv::Mat cvMat( rows, cols, CV_8UC4 ); CGContextRef contextRef = CGBitmapContextCreate( cvMat.data, cols, rows, 8, cvMat.step[0], colorSpace, kCGImageAlphaNoneSkipLast | kCGBitmapByteOrderDefault ); CGContextDrawImage( contextRef, CGRectMake(0, 0, cols, rows), image.CGImage ); CGContextRelease( contextRef ); CGColorSpaceRelease( colorSpace ); return cvMat; } + (cv::Mat)cvMatGrayFromUIImage:(UIImage *)image { cv::Mat cvMat = [UIImageCVMatConverter cvMatFromUIImage:image]; cv::Mat grayMat; if ( cvMat.channels() == 1 ) { grayMat = cvMat; } else { grayMat = cv :: Mat( cvMat.rows,cvMat.cols, CV_8UC1 ); cv::cvtColor( cvMat, grayMat, CV_BGR2GRAY ); } return grayMat; } + (UIImage *)scaleAndRotateImageBackCamera:(UIImage *)image { static int kMaxResolution = 640; CGImageRef imgRef = image.CGImage; CGFloat width = CGImageGetWidth( imgRef ); CGFloat height = CGImageGetHeight( imgRef ); CGAffineTransform transform = CGAffineTransformIdentity; CGRect bounds = CGRectMake( 0, 0, width, height ); if ( width > kMaxResolution || height > kMaxResolution ) { CGFloat ratio = width/height; if ( ratio > 1 ) { bounds.size.width = kMaxResolution; bounds.size.height = bounds.size.width / ratio; } else { bounds.size.height = kMaxResolution; bounds.size.width = bounds.size.height * ratio; } } CGFloat scaleRatio = bounds.size.width / width; CGSize imageSize = CGSizeMake( CGImageGetWidth(imgRef), CGImageGetHeight(imgRef) ); CGFloat boundHeight; UIImageOrientation orient = image.imageOrientation; switch( orient ) { case UIImageOrientationUp: transform = CGAffineTransformIdentity; break; case UIImageOrientationUpMirrored: transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0); transform = CGAffineTransformScale(transform, -1.0, 1.0); break; case UIImageOrientationDown: transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height); transform = CGAffineTransformRotate(transform, M_PI); break; case UIImageOrientationDownMirrored: transform = CGAffineTransformMakeTranslation(0.0, imageSize.height); transform = CGAffineTransformScale(transform, 1.0, -1.0); break; case UIImageOrientationLeftMirrored: boundHeight = bounds.size.height; bounds.size.height = bounds.size.width; bounds.size.width = boundHeight; transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width); transform = CGAffineTransformScale(transform, -1.0, 1.0); transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0); break; case UIImageOrientationLeft: boundHeight = bounds.size.height; bounds.size.height = bounds.size.width; bounds.size.width = boundHeight; transform = CGAffineTransformMakeTranslation(0.0, imageSize.width); transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0); break; case UIImageOrientationRightMirrored: boundHeight = bounds.size.height; bounds.size.height = bounds.size.width; bounds.size.width = boundHeight; transform = CGAffineTransformMakeScale(-1.0, 1.0); transform = CGAffineTransformRotate(transform, M_PI / 2.0); break; case UIImageOrientationRight: boundHeight = bounds.size.height; bounds.size.height = bounds.size.width; bounds.size.width = boundHeight; transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0); transform = CGAffineTransformRotate(transform, M_PI / 2.0); break; default: [NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"]; } UIGraphicsBeginImageContext( bounds.size ); CGContextRef context = UIGraphicsGetCurrentContext(); if ( orient == UIImageOrientationRight || orient == UIImageOrientationLeft ) { CGContextScaleCTM( context, -scaleRatio, scaleRatio ); CGContextTranslateCTM( context, -height, 0 ); } else { CGContextScaleCTM( context, scaleRatio, -scaleRatio ); CGContextTranslateCTM( context, 0, -height ); } CGContextConcatCTM( context, transform ); CGContextDrawImage( UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef ); UIImage *returnImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return returnImage; } + (UIImage *)scaleAndRotateImageFrontCamera:(UIImage *)image { static int kMaxResolution = 640; CGImageRef imgRef = image.CGImage; CGFloat width = CGImageGetWidth(imgRef); CGFloat height = CGImageGetHeight(imgRef); CGAffineTransform transform = CGAffineTransformIdentity; CGRect bounds = CGRectMake( 0, 0, width, height); if (width > kMaxResolution || height > kMaxResolution) { CGFloat ratio = width/height; if (ratio > 1) { bounds.size.width = kMaxResolution; bounds.size.height = bounds.size.width / ratio; } else { bounds.size.height = kMaxResolution; bounds.size.width = bounds.size.height * ratio; } } CGFloat scaleRatio = bounds.size.width / width; CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef)); CGFloat boundHeight; UIImageOrientation orient = image.imageOrientation; switch(orient) { case UIImageOrientationUp: transform = CGAffineTransformIdentity; break; case UIImageOrientationUpMirrored: transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0); transform = CGAffineTransformScale(transform, -1.0, 1.0); break; case UIImageOrientationDown: transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height); transform = CGAffineTransformRotate(transform, M_PI); break; case UIImageOrientationDownMirrored: transform = CGAffineTransformMakeTranslation(0.0, imageSize.height); transform = CGAffineTransformScale(transform, 1.0, -1.0); break; case UIImageOrientationLeftMirrored: boundHeight = bounds.size.height; bounds.size.height = bounds.size.width; bounds.size.width = boundHeight; transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width); transform = CGAffineTransformScale(transform, -1.0, 1.0); transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0); break; case UIImageOrientationLeft: boundHeight = bounds.size.height; bounds.size.height = bounds.size.width; bounds.size.width = boundHeight; transform = CGAffineTransformMakeTranslation(0.0, imageSize.width); transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0); break; case UIImageOrientationRight: case UIImageOrientationRightMirrored: boundHeight = bounds.size.height; bounds.size.height = bounds.size.width; bounds.size.width = boundHeight; transform = CGAffineTransformMakeScale(-1.0, 1.0); transform = CGAffineTransformRotate(transform, M_PI / 2.0); break; default: [NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"]; } UIGraphicsBeginImageContext( bounds.size ); CGContextRef context = UIGraphicsGetCurrentContext(); if ( orient == UIImageOrientationRight || orient == UIImageOrientationLeft ) { CGContextScaleCTM(context, -scaleRatio, scaleRatio); CGContextTranslateCTM(context, -height, 0); } else { CGContextScaleCTM(context, scaleRatio, -scaleRatio); CGContextTranslateCTM(context, 0, -height); } CGContextConcatCTM( context, transform ); CGContextDrawImage( UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef ); UIImage *returnImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return returnImage; } @end 

将您的视图控制器实现文件重命名为* .mm

 MyViewController.m -> MyViewController.mm 

并在您的视图控制器中导入UIImageCVMatConverter

 #import "UIImageCVMatConverter.h" 

现在,您可以在视图控制器中混合使用Objective-C和C ++ OpenCV代码:

 cv::Mat img = [UIImageCVMatConverter cvMatFromUIImage:[UIImage imageNamed:@"my_image.png"]]; ... 

玩的开心!

@Nims,@ moosgummi说,它的工作原理,但我也做了以下步骤:

  • 添加库libc ++。dylib
  • 在“生成设置” – “苹果LLVM编译器XX – 语言” – “编译源代码” – Object-C ++

你可以编写所有这些类方法,或者你可以简单地包含ios.h文件。 它有两种方法已经写入image processing。

这是我的代码。

对不起所有的评论,我包括他们来显示我的研究进展。

 #import "JmBViewController.h" @interface JmBViewController () @end @implementation JmBViewController - (void)viewDidLoad { [super viewDidLoad]; _imgtest = [UIImage imageNamed:@"IMG_0424.PNG"]; cv::Mat cvImage; UIImageToMat(_imgtest, cvImage); if (!cvImage.empty()) { cv::Mat gray; // cv::Mat filteredMat; cv::cvtColor(cvImage, gray, CV_BGRA2GRAY); // cv::GaussianBlur(gray, gray, cv::Size(5, 5), 1.2, 1.2); cv::vector<cv::Vec3f> circles; /* for(size_t i = 0; i < circles.size(); i++) { cv::Point center((cvRound(circles[i][0]), cvRound(circles[i][1]))); int radius = cvRound(circles[i][2]); cv::circle(gray, center, 3, cv::Scalar(0,255,0)); cv::circle(gray, center, radius, cv::Scalar(0,0,255)); } */ // for ( int i = 1; i < 15; i = i + 2 ) cv::GaussianBlur(gray, gray, cv::Size(9, 9), 1.5, 1.5); cv::Mat edges; cv::Canny(gray, edges, 0, 50); //gray.setTo(cv::Scalar::all(0)); //gray.setTo(cv::Scalar::all(255), edges); cv::HoughCircles(gray, circles, CV_HOUGH_GRADIENT, 1, 30, 50, 20, 10, 25); for(size_t i = 0; i < circles.size(); i++) { cv::Point center(cvRound(circles[i][0]), cvRound(circles[i][1])); int radius = cvRound(circles[i][2]); cv::circle(cvImage, center, 5, cv::Scalar::all(200), -1, 8, 0 );//center cv::circle(cvImage, center, radius, cv::Scalar::all(255), 3, 8, 0 );//diamter NSLog(@"Circles: %ld", i+1); // cv::imshow(&"circles i " [ i], gray); } _imgView.image = MatToUIImage(cvImage); } /* cv::Mat cvImage; cv::Mat grey; cv::Mat filteredMat; cv::vector<cv::Vec3f> circles; // cv::cvtColor(_imgtest, cvImage, CV_BGR2GRAY); cv::threshold(grey, filteredMat, 100, 255, CV_THRESH_BINARY); [UIImageCVMatConverter cvMatGrayFromUIImage:_imgtest]; // cv::cvtColor(cvImage, grey, CV_RGBA2GRAY); // UIImageToMat(_imgtest, cvImage); cv::HoughCircles(cvImage, circles, CV_HOUGH_GRADIENT, 1, 50); // MatToUIImage(cvImage); _imgView.image = [UIImageCVMatConverter UIImageFromCVMat:cvImage]; _imgView.image = MatToUIImage(cvImage); */ // Do any additional setup after loading the view, typically from a nib. } - (void)didReceiveMemoryWarning { [super didReceiveMemoryWarning]; // Dispose of any resources that can be recreated. } /* UIImage* MatToUIImage(const cv::Mat& image) { NSData *data = [NSData dataWithBytes:image.data length:image.elemSize()*image.total()]; CGColorSpaceRef colorSpace; if (image.elemSize() == 1) { colorSpace = CGColorSpaceCreateDeviceGray(); }else { colorSpace = CGColorSpaceCreateDeviceRGB(); } CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data); CGImageRef imageRef = CGImageCreate(image.cols, image.rows, 8, 8*image.elemSize(), image.step.p[0], colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault, provider, NULL, FALSE, kCGRenderingIntentDefault); UIImage *finalImage = [UIImage imageWithCGImage:imageRef]; return finalImage; } */ @end 

我希望这有帮助!

这里是所有我在浏览器头文件#包括。

 #import <UIKit/UIKit.h> // #import "UIImageCVMatConverter.h" #import <opencv2/highgui/highgui_c.h> #import <opencv2/highgui/highgui.hpp> #import <opencv2/imgproc/imgproc_c.h> #import <opencv2/imgproc/imgproc.hpp> #import <opencv2/highgui/ios.h> #import <opencv2/core/core_c.h> #import <opencv2/core/core.hpp> @interface JmBViewController : UIViewController @property (weak, nonatomic) IBOutlet UIImageView *imgView; @property (weak, nonatomic) UIImage *imgtest; @end 

不需要编译或制作你自己的框架,只需从opencv的网站上下载你想要的版本,拖到你的框架下的项目中,当被要求通过Xcode进行确认时,确保将所有文件复制到目的地,如果你使用的是iOS 。 这是我发现的最简单的方法,即在没有所有terminal命令的情况下完成将框架包含到您的项目中,并制作废话。

如链接所述,请打开terminal应用程序位于

/Applications/Utilities/Terminal.app

在你的Mac系统中执行所提到的命令。

不要忘了把你所有的.m文件转换成.mm文件,否则任何东西都不行