在手机里面经常可以看到模糊效果,比如说控制中心。
那么这种效果怎么去实现的呢,一般有一下几种办法。
1.CoreImage
2.vImage(UIImageView+Effective)
3.GPUImage
4.UIVisualEfftiveView
下面来说说这几种方法的使用方法:
一、CoreImage
- (void)test1 { //原始图片 UIImage *originImage = [UIImage imageNamed:@"bg1.jpg"]; //创建上下文 CIContext *context = [CIContext contextWithOptions:nil]; //将原始图片转换成CIImage CIImage *inputImage = [CIImage imageWithCGImage:originImage.CGImage]; //创建滤镜 CIFilter *filter = [CIFilter filterWithName:@"CIGaussianBlur"]; //设置滤镜的属性 [filter setValue:inputImage forKey:kCIInputImageKey]; //模糊半径 [filter setValue:@10.f forKey:@"inputRadius"]; //输出图片 CIImage *outputImage = [filter valueForKey:kCIOutputImageKey]; //转换成CGImage CGImageRef ref = [context createCGImage:outputImage fromRect:[outputImage extent]]; //转换成UIImage UIImage *newImage = [UIImage imageWithCGImage:ref]; //释放 CGImageRelease(ref); }
二、vImage
vImage是苹果推出的库,是在Accelerate.framework中
Accelerate这个framework主要是用来做数字信号处理、图像处理相关的向量、矩阵运算的库。我们可以认为我们的图像都是由向量或者矩阵数据构成的,Accelerate里既然提供了高效的数学运算API,自然就能方便我们对图像做各种各样的处理。
基于vImage我们可以根据图像的处理原理直接做模糊效果,或者使用现有的工具。UIImage+ImageEffects是个很好的图像处理库,看名字也知道是对UIImage做的分类扩展。这个工具被广泛地使用着。
@import UIKit; @interface UIImage (ImageEffects) #pragma mark - Blur Image /** * Get blured image. * * @return Blured image. */ - (UIImage *)blurImage; /** * Get the blured image masked by another image. * * @param maskImage Image used for mask. * * @return the Blured image. */ - (UIImage *)blurImageWithMask:(UIImage *)maskImage; /** * Get blured image and you can set the blur radius. * * @param radius Blur radius. * * @return Blured image. */ - (UIImage *)blurImageWithRadius:(CGFloat)radius; /** * Get blured image at specified frame. * * @param frame The specified frame that you use to blur. * * @return Blured image. */ - (UIImage *)blurImageAtFrame:(CGRect)frame; #pragma mark - Grayscale Image /** * Get grayScale image. * * @return GrayScaled image. */ - (UIImage *)grayScale; #pragma mark - Some Useful Method /** * Scale image with fixed width. * * @param width The fixed width you give. * * @return Scaled image. */ - (UIImage *)scaleWithFixedWidth:(CGFloat)width; /** * Scale image with fixed height. * * @param height The fixed height you give. * * @return Scaled image. */ - (UIImage *)scaleWithFixedHeight:(CGFloat)height; /** * Get the image average color. * * @return Average color from the image. */ - (UIColor *)averageColor; /** * Get cropped image at specified frame. * * @param frame The specified frame that you use to crop. * * @return Cropped image */ - (UIImage *)croppedImageAtFrame:(CGRect)frame; @end
#import "UIImage+ImageEffects.h" #import <float.h> @import Accelerate; @implementation UIImage (ImageEffects) - (UIImage *)applyLightEffect{ UIColor *tintColor = [UIColor colorWithWhite:1.0 alpha:0.3]; return [self applyBlurWithRadius:30 tintColor:tintColor saturationDeltaFactor:1.8 maskImage:nil]; } - (UIImage *)applyExtraLightEffect { UIColor *tintColor = [UIColor colorWithWhite:0.97 alpha:0.82]; return [self applyBlurWithRadius:20 tintColor:tintColor saturationDeltaFactor:1.8 maskImage:nil]; } - (UIImage *)applyDarkEffect { UIColor *tintColor = [UIColor colorWithWhite:0.11 alpha:0.73]; return [self applyBlurWithRadius:20 tintColor:tintColor saturationDeltaFactor:1.8 maskImage:nil]; } - (UIImage *)applyTintEffectWithColor:(UIColor *)tintColor { const CGFloat EffectColorAlpha = 0.6; UIColor *effectColor = tintColor; int componentCount = (int)CGColorGetNumberOfComponents(tintColor.CGColor); if (componentCount == 2) { CGFloat b; if ([tintColor getWhite:&b alpha:NULL]) { effectColor = [UIColor colorWithWhite:b alpha:EffectColorAlpha]; } } else { CGFloat r, g, b; if ([tintColor getRed:&r green:&g blue:&b alpha:NULL]) { effectColor = [UIColor colorWithRed:r green:g blue:b alpha:EffectColorAlpha]; } } return [self applyBlurWithRadius:20 tintColor:effectColor saturationDeltaFactor:1.4 maskImage:nil]; } - (UIImage *)blurImage { return [self applyBlurWithRadius:20 tintColor:[UIColor colorWithWhite:0 alpha:0.0] saturationDeltaFactor:1.4 maskImage:nil]; } - (UIImage *)blurImageWithRadius:(CGFloat)radius { return [self applyBlurWithRadius:radius tintColor:[UIColor colorWithWhite:0 alpha:0.0] saturationDeltaFactor:1.4 maskImage:nil]; } - (UIImage *)blurImageWithMask:(UIImage *)maskImage { return [self applyBlurWithRadius:20 tintColor:[UIColor colorWithWhite:0 alpha:0.0] saturationDeltaFactor:1.4 maskImage:maskImage]; } - (UIImage *)blurImageAtFrame:(CGRect)frame { return [self applyBlurWithRadius:20 tintColor:[UIColor colorWithWhite:0 alpha:0.0] saturationDeltaFactor:1.4 maskImage:nil atFrame:frame]; } - (UIImage *)applyBlurWithRadius:(CGFloat)blurRadius tintColor:(UIColor *)tintColor saturationDeltaFactor:(CGFloat)saturationDeltaFactor maskImage:(UIImage *)maskImage { // Check pre-conditions. if (self.size.width < 1 || self.size.height < 1) { NSLog (@"*** error: invalid size: (%.2f x %.2f). Both dimensions must be >= 1: %@", self.size.width, self.size.height, self); return nil; } if (!self.CGImage) { NSLog (@"*** error: image must be backed by a CGImage: %@", self); return nil; } if (maskImage && !maskImage.CGImage) { NSLog (@"*** error: maskImage must be backed by a CGImage: %@", maskImage); return nil; } CGRect imageRect = { CGPointZero, self.size }; UIImage *effectImage = self; BOOL hasBlur = blurRadius > __FLT_EPSILON__; BOOL hasSaturationChange = fabs(saturationDeltaFactor - 1.) > __FLT_EPSILON__; if (hasBlur || hasSaturationChange) { UIGraphicsBeginImageContextWithOptions(self.size, NO, [[UIScreen mainScreen] scale]); CGContextRef effectInContext = UIGraphicsGetCurrentContext(); CGContextScaleCTM(effectInContext, 1.0, -1.0); CGContextTranslateCTM(effectInContext, 0, -self.size.height); CGContextDrawImage(effectInContext, imageRect, self.CGImage); vImage_Buffer effectInBuffer; effectInBuffer.data = CGBitmapContextGetData(effectInContext); effectInBuffer.width = CGBitmapContextGetWidth(effectInContext); effectInBuffer.height = CGBitmapContextGetHeight(effectInContext); effectInBuffer.rowBytes = CGBitmapContextGetBytesPerRow(effectInContext); UIGraphicsBeginImageContextWithOptions(self.size, NO, [[UIScreen mainScreen] scale]); CGContextRef effectOutContext = UIGraphicsGetCurrentContext(); vImage_Buffer effectOutBuffer; effectOutBuffer.data = CGBitmapContextGetData(effectOutContext); effectOutBuffer.width = CGBitmapContextGetWidth(effectOutContext); effectOutBuffer.height = CGBitmapContextGetHeight(effectOutContext); effectOutBuffer.rowBytes = CGBitmapContextGetBytesPerRow(effectOutContext); if (hasBlur) { // A description of how to compute the box kernel width from the Gaussian // radius (aka standard deviation) appears in the SVG spec: // http://www.w3.org/TR/SVG/filters.html#feGaussianBlurElement // // For larger values of 's' (s >= 2.0), an approximation can be used: Three // successive box-blurs build a piece-wise quadratic convolution kernel, which // approximates the Gaussian kernel to within roughly 3%. // // let d = floor(s * 3*sqrt(2*pi)/4 + 0.5) // // ... if d is odd, use three box-blurs of size 'd', centered on the output pixel. // CGFloat inputRadius = blurRadius * [[UIScreen mainScreen] scale]; NSUInteger radius = floor(inputRadius * 3. * sqrt(2 * M_PI) / 4 + 0.5); if (radius % 2 != 1) { radius += 1; // force radius to be odd so that the three box-blur methodology works. } vImageBoxConvolve_ARGB8888(&effectInBuffer, &effectOutBuffer, NULL, 0, 0, (uint32_t)radius, (uint32_t)radius, 0, kvImageEdgeExtend); vImageBoxConvolve_ARGB8888(&effectOutBuffer, &effectInBuffer, NULL, 0, 0, (uint32_t)radius, (uint32_t)radius, 0, kvImageEdgeExtend); vImageBoxConvolve_ARGB8888(&effectInBuffer, &effectOutBuffer, NULL, 0, 0, (uint32_t)radius, (uint32_t)radius, 0, kvImageEdgeExtend); } BOOL effectImageBuffersAreSwapped = NO; if (hasSaturationChange) { CGFloat s = saturationDeltaFactor; CGFloat floatingPointSaturationMatrix[] = { 0.0722 + 0.9278 * s, 0.0722 - 0.0722 * s, 0.0722 - 0.0722 * s, 0, 0.7152 - 0.7152 * s, 0.7152 + 0.2848 * s, 0.7152 - 0.7152 * s, 0, 0.2126 - 0.2126 * s, 0.2126 - 0.2126 * s, 0.2126 + 0.7873 * s, 0, 0, 0, 0, 1, }; const int32_t divisor = 256; NSUInteger matrixSize = sizeof(floatingPointSaturationMatrix)/sizeof(floatingPointSaturationMatrix[0]); int16_t saturationMatrix[matrixSize]; for (NSUInteger i = 0; i < matrixSize; ++i) { saturationMatrix[i] = (int16_t)roundf(floatingPointSaturationMatrix[i] * divisor); } if (hasBlur) { vImageMatrixMultiply_ARGB8888(&effectOutBuffer, &effectInBuffer, saturationMatrix, divisor, NULL, NULL, kvImageNoFlags); effectImageBuffersAreSwapped = YES; } else { vImageMatrixMultiply_ARGB8888(&effectInBuffer, &effectOutBuffer, saturationMatrix, divisor, NULL, NULL, kvImageNoFlags); } } if (!effectImageBuffersAreSwapped) { effectImage = UIGraphicsGetImageFromCurrentImageContext(); } UIGraphicsEndImageContext(); if (effectImageBuffersAreSwapped) { effectImage = UIGraphicsGetImageFromCurrentImageContext(); } UIGraphicsEndImageContext(); } // Set up output context. UIGraphicsBeginImageContextWithOptions(self.size, NO, [[UIScreen mainScreen] scale]); CGContextRef outputContext = UIGraphicsGetCurrentContext(); CGContextScaleCTM(outputContext, 1.0, -1.0); CGContextTranslateCTM(outputContext, 0, -self.size.height); // Draw base image. CGContextDrawImage(outputContext, imageRect, self.CGImage); // Draw effect image. if (hasBlur) { CGContextSaveGState(outputContext); if (maskImage) { CGContextClipToMask(outputContext, imageRect, maskImage.CGImage); } CGContextDrawImage(outputContext, imageRect, effectImage.CGImage); CGContextRestoreGState(outputContext); } // Add in color tint. if (tintColor) { CGContextSaveGState(outputContext); CGContextSetFillColorWithColor(outputContext, tintColor.CGColor); CGContextFillRect(outputContext, imageRect); CGContextRestoreGState(outputContext); } // Output image is ready. UIImage *outputImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return outputImage; } - (UIImage *)grayScale { int width = self.size.width; int height = self.size.height; CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray(); CGContextRef context = CGBitmapContextCreate(nil, width, height, 8, // bits per component 0, colorSpace, kCGBitmapByteOrderDefault); CGColorSpaceRelease(colorSpace); if (context == NULL) { return nil; } CGContextDrawImage(context, CGRectMake(0, 0, width, height), self.CGImage); CGImageRef image = CGBitmapContextCreateImage(context); UIImage *grayImage = [UIImage imageWithCGImage:image]; CFRelease(image); CGContextRelease(context); return grayImage; } - (UIImage *)scaleWithFixedWidth:(CGFloat)width { float newHeight = self.size.height * (width / self.size.width); CGSize size = CGSizeMake(width, newHeight); UIGraphicsBeginImageContextWithOptions(size, NO, 0); CGContextRef context = UIGraphicsGetCurrentContext(); CGContextTranslateCTM(context, 0.0, size.height); CGContextScaleCTM(context, 1.0, -1.0); CGContextSetBlendMode(context, kCGBlendModeCopy); CGContextDrawImage(context, CGRectMake(0.0f, 0.0f, size.width, size.height), self.CGImage); UIImage *imageOut = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return imageOut; } - (UIImage *)scaleWithFixedHeight:(CGFloat)height { float newWidth = self.size.width * (height / self.size.height); CGSize size = CGSizeMake(newWidth, height); UIGraphicsBeginImageContextWithOptions(size, NO, 0); CGContextRef context = UIGraphicsGetCurrentContext(); CGContextTranslateCTM(context, 0.0, size.height); CGContextScaleCTM(context, 1.0, -1.0); CGContextSetBlendMode(context, kCGBlendModeCopy); CGContextDrawImage(context, CGRectMake(0.0f, 0.0f, size.width, size.height), self.CGImage); UIImage *imageOut = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return imageOut; } - (UIColor *)averageColor { CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); unsigned char rgba[4]; CGContextRef context = CGBitmapContextCreate(rgba, 1, 1, 8, 4, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big); CGContextDrawImage(context, CGRectMake(0, 0, 1, 1), self.CGImage); CGColorSpaceRelease(colorSpace); CGContextRelease(context); if(rgba[3] > 0) { CGFloat alpha = ((CGFloat)rgba[3])/255.0; CGFloat multiplier = alpha/255.0; return [UIColor colorWithRed:((CGFloat)rgba[0])*multiplier green:((CGFloat)rgba[1])*multiplier blue:((CGFloat)rgba[2])*multiplier alpha:alpha]; } else { return [UIColor colorWithRed:((CGFloat)rgba[0])/255.0 green:((CGFloat)rgba[1])/255.0 blue:((CGFloat)rgba[2])/255.0 alpha:((CGFloat)rgba[3])/255.0]; } } - (UIImage *)croppedImageAtFrame:(CGRect)frame { frame = CGRectMake(frame.origin.x * self.scale, frame.origin.y * self.scale, frame.size.width * self.scale, frame.size.height * self.scale); CGImageRef sourceImageRef = [self CGImage]; CGImageRef newImageRef = CGImageCreateWithImageInRect(sourceImageRef, frame); UIImage *newImage = [UIImage imageWithCGImage:newImageRef scale:[self scale] orientation:[self imageOrientation]]; CGImageRelease(newImageRef); return newImage; } - (UIImage *)addImageToImage:(UIImage *)img atRect:(CGRect)cropRect { CGSize size = CGSizeMake(self.size.width, self.size.height); UIGraphicsBeginImageContextWithOptions(size, NO, self.scale); CGPoint pointImg1 = CGPointMake(0,0); [self drawAtPoint:pointImg1]; CGPoint pointImg2 = cropRect.origin; [img drawAtPoint: pointImg2]; UIImage* result = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return result; } - (UIImage *)applyBlurWithRadius:(CGFloat)blurRadius tintColor:(UIColor *)tintColor saturationDeltaFactor:(CGFloat)saturationDeltaFactor maskImage:(UIImage *)maskImage atFrame:(CGRect)frame { UIImage *blurredFrame = \ [[self croppedImageAtFrame:frame] applyBlurWithRadius:blurRadius tintColor:tintColor saturationDeltaFactor:saturationDeltaFactor maskImage:maskImage]; return [self addImageToImage:blurredFrame atRect:frame]; } @end
使用方法很简单,如下所示:
- (void)test2 { //原始图片 UIImage *originImage = [UIImage imageNamed:@"bg1.jpg"]; //整体模糊 UIImage *newImage1 = [originImage blurImage]; //局部模糊 UIImage *newImage2 = [originImage blurImageAtFrame:CGRectMake(0, 100, 320, 200)]; //设置模糊半径 UIImage *newImage3 = [originImage blurImageWithRadius:10.f]; }
三、GPUImage
GPUImage框架如何导入,我之前的文章有说过了(戳我)。
使用起来也是很简单的:
- (void)test3 { //创建滤镜 GPUImageGaussianBlurFilter *filter = [[GPUImageGaussianBlurFilter alloc] init]; //设置模糊半径 filter.blurRadiusInPixels = 10.f; //原始图片 UIImage *image = [UIImage imageNamed:@"bg1.jpg"]; //模糊后的图片 UIImage *newImage = [filter imageByFilteringImage:image]; }
四、UIVisualEffectView
注意:这个只能在IOS8以上才能使用,并且可以动态的模糊哟~
效果图:
- (void)viewDidLoad { [super viewDidLoad]; //初始化scrollView self.scrollView = [[UIScrollView alloc] initWithFrame:self.view.bounds]; UIImageView *imgView = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"bg1.jpg"]]; [self.scrollView addSubview:imgView]; self.scrollView.contentSize = imgView.image.size; self.scrollView.bounces = NO; [self.view addSubview:self.scrollView]; //使用UIVisualEffectiveView,添加模糊效果 UIVisualEffectView *effectView = [[UIVisualEffectView alloc] initWithEffect:[UIBlurEffect effectWithStyle:UIBlurEffectStyleLight]]; //设置frame effectView.frame = CGRectMake(0, 100, SCREEN_WIDTH, 200); //添加到view上,注意一定要添加在scrollView之上 [self.view addSubview:effectView]; //在effective上添加子view,使其子view也具有模糊效果 //初始化一个label UILabel *txt = [[UILabel alloc] initWithFrame:effectView.bounds]; txt.text = @"模糊效果"; txt.textAlignment = NSTextAlignmentCenter; txt.font = [UIFont systemFontOfSize:50.f]; //创建子模糊view UIVisualEffectView *subView = [[UIVisualEffectView alloc] initWithEffect:[UIVibrancyEffect effectForBlurEffect:(UIBlurEffect *)effectView.effect]]; //设置frame subView.frame = effectView.bounds; //添加至contentView中去 [effectView.contentView addSubview:subView]; [subView.contentView addSubview:txt]; }