如何有效和快速地模糊iPhone上的图像?

如果我有一个UIImage或CGContextRef或纯粹的位图数据(直接访问解压缩的ARGB-8像素),我最好的select是尽可能快地模糊半径为10像素的图像?

可以使用堆栈模糊,模糊框或使用OpenGL纹理模糊(google前两个,并检查后者的Apple开发示例)。

我已经实现了iOS的stackBluralgorithm,它接近于GaussianBlur,但非常快:

https://github.com/tomsoft1/StackBluriOS

在这里检查例子:

在滑块的变化上模糊UIImage

https://github.com/rnystrom/RNBlurModalView

-(UIImage *)boxblurImageWithBlur:(CGFloat)blur bluringImage : (UIImage *) image { int boxSize = (int)(blur * 40); boxSize = boxSize - (boxSize % 2) + 1; CGImageRef img = image.CGImage; enter code here vImage_Buffer inBuffer, outBuffer; vImage_Error error; void *pixelBuffer; //create vImage_Buffer with data from CGImageRef CGDataProviderRef inProvider = CGImageGetDataProvider(img); CFDataRef inBitmapData = CGDataProviderCopyData(inProvider); inBuffer.width = CGImageGetWidth(img); inBuffer.height = CGImageGetHeight(img); inBuffer.rowBytes = CGImageGetBytesPerRow(img); inBuffer.data = (void*)CFDataGetBytePtr(inBitmapData); //create vImage_Buffer for output pixelBuffer = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img)); if(pixelBuffer == NULL) NSLog(@"No pixelbuffer"); outBuffer.data = pixelBuffer; outBuffer.width = CGImageGetWidth(img); outBuffer.height = CGImageGetHeight(img); outBuffer.rowBytes = CGImageGetBytesPerRow(img); // Create a third buffer for intermediate processing void *pixelBuffer2 = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img)); vImage_Buffer outBuffer2; outBuffer2.data = pixelBuffer2; outBuffer2.width = CGImageGetWidth(img); outBuffer2.height = CGImageGetHeight(img); outBuffer2.rowBytes = CGImageGetBytesPerRow(img); //perform convolution error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer2, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend); error = vImageBoxConvolve_ARGB8888(&outBuffer2, &inBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend); error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend); if (error) { NSLog(@"error from convolution %ld", error); } CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef ctx = CGBitmapContextCreate(outBuffer.data, outBuffer.width, outBuffer.height, 8, outBuffer.rowBytes, colorSpace, kCGImageAlphaNoneSkipLast); CGImageRef imageRef = CGBitmapContextCreateImage (ctx); UIImage *returnImage = [UIImage imageWithCGImage:imageRef]; //clean up CGContextRelease(ctx); CGColorSpaceRelease(colorSpace); free(pixelBuffer); CFRelease(inBitmapData); CGImageRelease(imageRef); return returnImage; } 
Interesting Posts