如何在iOS(iPhone)中使用opencv比较图像

我想在我的项目中比较iPhone相机拍摄的2张照片。 我正在使用OpenCV来做到这一点。 还有其他更好的方法吗? 如果我得到%相似性,它会很棒。

我正在使用OpenCV以下代码进行图像比较:

-(void)opencvImageCompare{ NSMutableArray *valuesArray=[[NSMutableArray alloc]init]; IplImage *img = [self CreateIplImageFromUIImage:imageView.image]; // always check camera image if(img == 0) { printf("Cannot load camera img"); } IplImage *res; CvPoint minloc, maxloc; double minval, maxval; double values; UIImage *imageTocompare = [UIImage imageNamed:@"MyImageName"]; IplImage *imageTocompareIpl = [self CreateIplImageFromUIImage:imageTocompare]; // always check server image if(imageTocompareIpl == 0) { printf("Cannot load serverIplImageArray image"); } if(img->width-imageTocompareIpl->widthheight-imageTocompareIpl->heightwidth-img->width; int balHeight=imageTocompareIpl->height-img->height; img->width=img->width+balWidth+100; img->height=img->height+balHeight+100; } CvSize size = cvSize( img->width - imageTocompareIpl->width + 1, img->height - imageTocompareIpl->height + 1 ); res = cvCreateImage(size, IPL_DEPTH_32F, 1); // CV_TM_SQDIFF CV_TM_SQDIFF_NORMED // CV_TM_CCORR CV_TM_CCORR_NORMED // CV_TM_CCOEFF CV_TM_CCOEFF_NORMED cvMatchTemplate(img, imageTocompareIpl, res,CV_TM_CCOEFF); cvMinMaxLoc(res, &minval, &maxval, &minloc, &maxloc, 0); printf("\n value %f", maxval-minval); values=maxval-minval; NSString *valString=[NSString stringWithFormat:@"%f",values]; [valuesArray addObject:valString]; weedObject.values=[valString doubleValue]; printf("\n------------------------------"); cvReleaseImage(&imageTocompareIpl); cvReleaseImage(&res); } cvReleaseImage(&img); } 

对于相同的图像,我得到非零结果(14956 …),如果我通过不同的图像崩溃。

尝试这段代码,它逐位比较图像,即100%

 UIImage *img1 = // Some photo; UIImage *img2 = // Some photo; NSData *imgdata1 = UIImagePNGRepresentation(img1); NSData *imgdata2 = UIImagePNGRepresentation(img2); if ([imgdata1 isEqualToData:imgdata2]) { NSLog(@"Same Image"); } 

试试这段代码 – 它逐像素地比较图像

 -(void)removeindicator :(UIImage *)image { for (int i =0; i < [imageArray count]; i++) { CGFloat width =100.0f; CGFloat height=100.0f; CGSize newSize1 = CGSizeMake(width, height); //whaterver size UIGraphicsBeginImageContext(newSize1); [[UIImage imageNamed:[imageArray objectAtIndex:i]] drawInRect:CGRectMake(0, 0, newSize1.width, newSize1.height)]; UIImage *newImage1 = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); UIImageView *imageview_camera=(UIImageView *)[self.view viewWithTag:-3]; CGSize newSize2 = CGSizeMake(width, height); //whaterver size UIGraphicsBeginImageContext(newSize2); [[imageview_camera image] drawInRect:CGRectMake(0, 0, newSize2.width, newSize2.height)]; UIImage *newImage2 = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); float numDifferences = 0.0f; float totalCompares = width * height; NSArray *img1RGB=[[NSArray alloc]init]; NSArray *img2RGB=[[NSArray alloc]init]; for (int yCoord = 0; yCoord < height; yCoord += 1) { for (int xCoord = 0; xCoord < width; xCoord += 1) { img1RGB = [self getRGBAsFromImage:newImage1 atX:xCoord andY:yCoord]; img2RGB = [self getRGBAsFromImage:newImage2 atX:xCoord andY:yCoord]; if (([[img1RGB objectAtIndex:0]floatValue] - [[img2RGB objectAtIndex:0]floatValue]) == 0 || ([[img1RGB objectAtIndex:1]floatValue] - [[img2RGB objectAtIndex:1]floatValue]) == 0 || ([[img1RGB objectAtIndex:2]floatValue] - [[img2RGB objectAtIndex:2]floatValue]) == 0) { //one or more pixel components differs by 10% or more numDifferences++; } } } // It will show result in percentage at last CGFloat percentage_similar=((numDifferences*100)/totalCompares); NSString *str=NULL; if (percentage_similar>=10.0f) { str=[[NSString alloc]initWithString:[NSString stringWithFormat:@"%i%@ Identical", (int)((numDifferences*100)/totalCompares),@"%"]]; UIAlertView *alertview=[[UIAlertView alloc]initWithTitle:@"i-App" message:[NSString stringWithFormat:@"%@ Images are same",str] delegate:nil cancelButtonTitle:@"Ok" otherButtonTitles:nil]; [alertview show]; break; } else { str=[[NSString alloc]initWithString:[NSString stringWithFormat:@"Result: %i%@ Identical",(int)((numDifferences*100)/totalCompares),@"%"]]; UIAlertView *alertview=[[UIAlertView alloc]initWithTitle:@"i-App" message:[NSString stringWithFormat:@"%@ Images are not same",str] delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil]; [alertview show]; } } 

}

 -(NSArray*)getRGBAsFromImage:(UIImage*)image atX:(int)xx andY:(int)yy { //NSArray *result = [[NSArray alloc]init]; // First get the image into your data buffer CGImageRef imageRef = [image CGImage]; NSUInteger width = CGImageGetWidth(imageRef); NSUInteger height = CGImageGetHeight(imageRef); CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); unsigned char *rawData = (unsigned char*) calloc(height * width * 4, sizeof(unsigned char)); NSUInteger bytesPerPixel = 4; NSUInteger bytesPerRow = bytesPerPixel * width; NSUInteger bitsPerComponent = 8; CGContextRef context = CGBitmapContextCreate(rawData, width, height, bitsPerComponent, bytesPerRow, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big); CGColorSpaceRelease(colorSpace); CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef); CGContextRelease(context); // Now your rawData contains the image data in the RGBA8888 pixel format. int byteIndex = (bytesPerRow * yy) + xx * bytesPerPixel; // for (int ii = 0 ; ii < count ; ++ii) // { CGFloat red = (rawData[byteIndex] * 1.0) / 255.0; CGFloat green = (rawData[byteIndex + 1] * 1.0) / 255.0; CGFloat blue = (rawData[byteIndex + 2] * 1.0) / 255.0; //CGFloat alpha = (rawData[byteIndex + 3] * 1.0) / 255.0; byteIndex += 4; // UIColor *acolor = [UIColor colorWithRed:red green:green blue:blue alpha:alpha]; //} free(rawData); NSArray *result = [NSArray arrayWithObjects: [NSNumber numberWithFloat:red], [NSNumber numberWithFloat:green], [NSNumber numberWithFloat:blue],nil]; return result; }