//
|
// UIImage+YYAdd.m
|
// YYCategories <https://github.com/ibireme/YYCategories>
|
//
|
// Created by ibireme on 13/4/4.
|
// Copyright (c) 2015 ibireme.
|
//
|
// This source code is licensed under the MIT-style license found in the
|
// LICENSE file in the root directory of this source tree.
|
//
|
|
#import "UIImage+YYAdd.h"
|
#import "UIDevice+YYAdd.h"
|
#import "NSString+YYAdd.h"
|
#import "YYCategoriesMacro.h"
|
#import "YYCGUtilities.h"
|
#import <ImageIO/ImageIO.h>
|
#import <Accelerate/Accelerate.h>
|
#import <CoreText/CoreText.h>
|
#import <objc/runtime.h>
|
#import "YYCGUtilities.h"
|
|
YYSYNTH_DUMMY_CLASS(UIImage_YYAdd)
|
|
static NSTimeInterval _yy_CGImageSourceGetGIFFrameDelayAtIndex(CGImageSourceRef source, size_t index) {
|
NSTimeInterval delay = 0;
|
CFDictionaryRef dic = CGImageSourceCopyPropertiesAtIndex(source, index, NULL);
|
if (dic) {
|
CFDictionaryRef dicGIF = CFDictionaryGetValue(dic, kCGImagePropertyGIFDictionary);
|
if (dicGIF) {
|
NSNumber *num = CFDictionaryGetValue(dicGIF, kCGImagePropertyGIFUnclampedDelayTime);
|
if (num.doubleValue <= __FLT_EPSILON__) {
|
num = CFDictionaryGetValue(dicGIF, kCGImagePropertyGIFDelayTime);
|
}
|
delay = num.doubleValue;
|
}
|
CFRelease(dic);
|
}
|
|
// http://nullsleep.tumblr.com/post/16524517190/animated-gif-minimum-frame-delay-browser-compatibility
|
if (delay < 0.02) delay = 0.1;
|
return delay;
|
}
|
|
|
|
@implementation UIImage (YYAdd)
|
|
+ (UIImage *)imageWithSmallGIFData:(NSData *)data scale:(CGFloat)scale {
|
CGImageSourceRef source = CGImageSourceCreateWithData((__bridge CFTypeRef)(data), NULL);
|
if (!source) return nil;
|
|
size_t count = CGImageSourceGetCount(source);
|
if (count <= 1) {
|
CFRelease(source);
|
return [self.class imageWithData:data scale:scale];
|
}
|
|
NSUInteger frames[count];
|
double oneFrameTime = 1 / 50.0; // 50 fps
|
NSTimeInterval totalTime = 0;
|
NSUInteger totalFrame = 0;
|
NSUInteger gcdFrame = 0;
|
for (size_t i = 0; i < count; i++) {
|
NSTimeInterval delay = _yy_CGImageSourceGetGIFFrameDelayAtIndex(source, i);
|
totalTime += delay;
|
NSInteger frame = lrint(delay / oneFrameTime);
|
if (frame < 1) frame = 1;
|
frames[i] = frame;
|
totalFrame += frames[i];
|
if (i == 0) gcdFrame = frames[i];
|
else {
|
NSUInteger frame = frames[i], tmp;
|
if (frame < gcdFrame) {
|
tmp = frame; frame = gcdFrame; gcdFrame = tmp;
|
}
|
while (true) {
|
tmp = frame % gcdFrame;
|
if (tmp == 0) break;
|
frame = gcdFrame;
|
gcdFrame = tmp;
|
}
|
}
|
}
|
NSMutableArray *array = [NSMutableArray new];
|
for (size_t i = 0; i < count; i++) {
|
CGImageRef imageRef = CGImageSourceCreateImageAtIndex(source, i, NULL);
|
if (!imageRef) {
|
CFRelease(source);
|
return nil;
|
}
|
size_t width = CGImageGetWidth(imageRef);
|
size_t height = CGImageGetHeight(imageRef);
|
if (width == 0 || height == 0) {
|
CFRelease(source);
|
CFRelease(imageRef);
|
return nil;
|
}
|
|
CGImageAlphaInfo alphaInfo = CGImageGetAlphaInfo(imageRef) & kCGBitmapAlphaInfoMask;
|
BOOL hasAlpha = NO;
|
if (alphaInfo == kCGImageAlphaPremultipliedLast ||
|
alphaInfo == kCGImageAlphaPremultipliedFirst ||
|
alphaInfo == kCGImageAlphaLast ||
|
alphaInfo == kCGImageAlphaFirst) {
|
hasAlpha = YES;
|
}
|
// BGRA8888 (premultiplied) or BGRX8888
|
// same as UIGraphicsBeginImageContext() and -[UIView drawRect:]
|
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
|
bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
|
CGColorSpaceRef space = CGColorSpaceCreateDeviceRGB();
|
CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, 0, space, bitmapInfo);
|
CGColorSpaceRelease(space);
|
if (!context) {
|
CFRelease(source);
|
CFRelease(imageRef);
|
return nil;
|
}
|
CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef); // decode
|
CGImageRef decoded = CGBitmapContextCreateImage(context);
|
CFRelease(context);
|
if (!decoded) {
|
CFRelease(source);
|
CFRelease(imageRef);
|
return nil;
|
}
|
UIImage *image = [UIImage imageWithCGImage:decoded scale:scale orientation:UIImageOrientationUp];
|
CGImageRelease(imageRef);
|
CGImageRelease(decoded);
|
if (!image) {
|
CFRelease(source);
|
return nil;
|
}
|
for (size_t j = 0, max = frames[i] / gcdFrame; j < max; j++) {
|
[array addObject:image];
|
}
|
}
|
CFRelease(source);
|
UIImage *image = [self.class animatedImageWithImages:array duration:totalTime];
|
return image;
|
}
|
|
+ (BOOL)isAnimatedGIFData:(NSData *)data {
|
if (data.length < 16) return NO;
|
UInt32 magic = *(UInt32 *)data.bytes;
|
// http://www.w3.org/Graphics/GIF/spec-gif89a.txt
|
if ((magic & 0xFFFFFF) != '\0FIG') return NO;
|
CGImageSourceRef source = CGImageSourceCreateWithData((__bridge CFTypeRef)data, NULL);
|
if (!source) return NO;
|
size_t count = CGImageSourceGetCount(source);
|
CFRelease(source);
|
return count > 1;
|
}
|
|
+ (BOOL)isAnimatedGIFFile:(NSString *)path {
|
if (path.length == 0) return NO;
|
const char *cpath = path.UTF8String;
|
FILE *fd = fopen(cpath, "rb");
|
if (!fd) return NO;
|
|
BOOL isGIF = NO;
|
UInt32 magic = 0;
|
if (fread(&magic, sizeof(UInt32), 1, fd) == 1) {
|
if ((magic & 0xFFFFFF) == '\0FIG') isGIF = YES;
|
}
|
fclose(fd);
|
return isGIF;
|
}
|
|
+ (UIImage *)imageWithPDF:(id)dataOrPath {
|
return [self _yy_imageWithPDF:dataOrPath resize:NO size:CGSizeZero];
|
}
|
|
+ (UIImage *)imageWithPDF:(id)dataOrPath size:(CGSize)size {
|
return [self _yy_imageWithPDF:dataOrPath resize:YES size:size];
|
}
|
|
+ (UIImage *)imageWithEmoji:(NSString *)emoji size:(CGFloat)size {
|
if (emoji.length == 0) return nil;
|
if (size < 1) return nil;
|
|
CGFloat scale = [UIScreen mainScreen].scale;
|
CTFontRef font = CTFontCreateWithName(CFSTR("AppleColorEmoji"), size * scale, NULL);
|
if (!font) return nil;
|
|
NSAttributedString *str = [[NSAttributedString alloc] initWithString:emoji attributes:@{ (__bridge id)kCTFontAttributeName:(__bridge id)font, (__bridge id)kCTForegroundColorAttributeName:(__bridge id)[UIColor whiteColor].CGColor }];
|
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
CGContextRef ctx = CGBitmapContextCreate(NULL, size * scale, size * scale, 8, 0, colorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedFirst);
|
CGContextSetInterpolationQuality(ctx, kCGInterpolationHigh);
|
CTLineRef line = CTLineCreateWithAttributedString((__bridge CFTypeRef)str);
|
CGRect bounds = CTLineGetBoundsWithOptions(line, kCTLineBoundsUseGlyphPathBounds);
|
CGContextSetTextPosition(ctx, 0, -bounds.origin.y);
|
CTLineDraw(line, ctx);
|
CGImageRef imageRef = CGBitmapContextCreateImage(ctx);
|
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp];
|
|
CFRelease(font);
|
CGColorSpaceRelease(colorSpace);
|
CGContextRelease(ctx);
|
if (line)CFRelease(line);
|
if (imageRef) CFRelease(imageRef);
|
|
return image;
|
}
|
|
+ (UIImage *)_yy_imageWithPDF:(id)dataOrPath resize:(BOOL)resize size:(CGSize)size {
|
CGPDFDocumentRef pdf = NULL;
|
if ([dataOrPath isKindOfClass:[NSData class]]) {
|
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)dataOrPath);
|
pdf = CGPDFDocumentCreateWithProvider(provider);
|
CGDataProviderRelease(provider);
|
} else if ([dataOrPath isKindOfClass:[NSString class]]) {
|
pdf = CGPDFDocumentCreateWithURL((__bridge CFURLRef)[NSURL fileURLWithPath:dataOrPath]);
|
}
|
if (!pdf) return nil;
|
|
CGPDFPageRef page = CGPDFDocumentGetPage(pdf, 1);
|
if (!page) {
|
CGPDFDocumentRelease(pdf);
|
return nil;
|
}
|
|
CGRect pdfRect = CGPDFPageGetBoxRect(page, kCGPDFCropBox);
|
CGSize pdfSize = resize ? size : pdfRect.size;
|
CGFloat scale = [UIScreen mainScreen].scale;
|
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
CGContextRef ctx = CGBitmapContextCreate(NULL, pdfSize.width * scale, pdfSize.height * scale, 8, 0, colorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedFirst);
|
if (!ctx) {
|
CGColorSpaceRelease(colorSpace);
|
CGPDFDocumentRelease(pdf);
|
return nil;
|
}
|
|
CGContextScaleCTM(ctx, scale, scale);
|
CGContextTranslateCTM(ctx, -pdfRect.origin.x, -pdfRect.origin.y);
|
CGContextDrawPDFPage(ctx, page);
|
CGPDFDocumentRelease(pdf);
|
|
CGImageRef image = CGBitmapContextCreateImage(ctx);
|
UIImage *pdfImage = [[UIImage alloc] initWithCGImage:image scale:scale orientation:UIImageOrientationUp];
|
CGImageRelease(image);
|
CGContextRelease(ctx);
|
CGColorSpaceRelease(colorSpace);
|
|
return pdfImage;
|
}
|
|
+ (UIImage *)imageWithColor:(UIColor *)color {
|
return [self imageWithColor:color size:CGSizeMake(1, 1)];
|
}
|
|
+ (UIImage *)imageWithColor:(UIColor *)color size:(CGSize)size {
|
if (!color || size.width <= 0 || size.height <= 0) return nil;
|
CGRect rect = CGRectMake(0.0f, 0.0f, size.width, size.height);
|
UIGraphicsBeginImageContextWithOptions(rect.size, NO, 0);
|
CGContextRef context = UIGraphicsGetCurrentContext();
|
CGContextSetFillColorWithColor(context, color.CGColor);
|
CGContextFillRect(context, rect);
|
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
|
UIGraphicsEndImageContext();
|
return image;
|
}
|
|
+ (UIImage *)imageWithSize:(CGSize)size drawBlock:(void (^)(CGContextRef context))drawBlock {
|
if (!drawBlock) return nil;
|
UIGraphicsBeginImageContextWithOptions(size, NO, 0);
|
CGContextRef context = UIGraphicsGetCurrentContext();
|
if (!context) return nil;
|
drawBlock(context);
|
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
|
UIGraphicsEndImageContext();
|
return image;
|
}
|
|
- (BOOL)hasAlphaChannel {
|
if (self.CGImage == NULL) return NO;
|
CGImageAlphaInfo alpha = CGImageGetAlphaInfo(self.CGImage) & kCGBitmapAlphaInfoMask;
|
return (alpha == kCGImageAlphaFirst ||
|
alpha == kCGImageAlphaLast ||
|
alpha == kCGImageAlphaPremultipliedFirst ||
|
alpha == kCGImageAlphaPremultipliedLast);
|
}
|
|
- (void)drawInRect:(CGRect)rect withContentMode:(UIViewContentMode)contentMode clipsToBounds:(BOOL)clips{
|
CGRect drawRect = YYCGRectFitWithContentMode(rect, self.size, contentMode);
|
if (drawRect.size.width == 0 || drawRect.size.height == 0) return;
|
if (clips) {
|
CGContextRef context = UIGraphicsGetCurrentContext();
|
if (context) {
|
CGContextSaveGState(context);
|
CGContextAddRect(context, rect);
|
CGContextClip(context);
|
[self drawInRect:drawRect];
|
CGContextRestoreGState(context);
|
}
|
} else {
|
[self drawInRect:drawRect];
|
}
|
}
|
|
- (UIImage *)imageByResizeToSize:(CGSize)size {
|
if (size.width <= 0 || size.height <= 0) return nil;
|
UIGraphicsBeginImageContextWithOptions(size, NO, self.scale);
|
[self drawInRect:CGRectMake(0, 0, size.width, size.height)];
|
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
|
UIGraphicsEndImageContext();
|
return image;
|
}
|
|
- (UIImage *)imageByResizeToSize:(CGSize)size contentMode:(UIViewContentMode)contentMode {
|
if (size.width <= 0 || size.height <= 0) return nil;
|
UIGraphicsBeginImageContextWithOptions(size, NO, self.scale);
|
[self drawInRect:CGRectMake(0, 0, size.width, size.height) withContentMode:contentMode clipsToBounds:NO];
|
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
|
UIGraphicsEndImageContext();
|
return image;
|
}
|
|
- (UIImage *)imageByCropToRect:(CGRect)rect {
|
rect.origin.x *= self.scale;
|
rect.origin.y *= self.scale;
|
rect.size.width *= self.scale;
|
rect.size.height *= self.scale;
|
if (rect.size.width <= 0 || rect.size.height <= 0) return nil;
|
CGImageRef imageRef = CGImageCreateWithImageInRect(self.CGImage, rect);
|
UIImage *image = [UIImage imageWithCGImage:imageRef scale:self.scale orientation:self.imageOrientation];
|
CGImageRelease(imageRef);
|
return image;
|
}
|
|
- (UIImage *)imageByInsetEdge:(UIEdgeInsets)insets withColor:(UIColor *)color {
|
CGSize size = self.size;
|
size.width -= insets.left + insets.right;
|
size.height -= insets.top + insets.bottom;
|
if (size.width <= 0 || size.height <= 0) return nil;
|
CGRect rect = CGRectMake(-insets.left, -insets.top, self.size.width, self.size.height);
|
UIGraphicsBeginImageContextWithOptions(size, NO, self.scale);
|
CGContextRef context = UIGraphicsGetCurrentContext();
|
if (color) {
|
CGContextSetFillColorWithColor(context, color.CGColor);
|
CGMutablePathRef path = CGPathCreateMutable();
|
CGPathAddRect(path, NULL, CGRectMake(0, 0, size.width, size.height));
|
CGPathAddRect(path, NULL, rect);
|
CGContextAddPath(context, path);
|
CGContextEOFillPath(context);
|
CGPathRelease(path);
|
}
|
[self drawInRect:rect];
|
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
|
UIGraphicsEndImageContext();
|
return image;
|
}
|
|
- (UIImage *)imageByRoundCornerRadius:(CGFloat)radius {
|
return [self imageByRoundCornerRadius:radius borderWidth:0 borderColor:nil];
|
}
|
|
- (UIImage *)imageByRoundCornerRadius:(CGFloat)radius
|
borderWidth:(CGFloat)borderWidth
|
borderColor:(UIColor *)borderColor {
|
return [self imageByRoundCornerRadius:radius
|
corners:UIRectCornerAllCorners
|
borderWidth:borderWidth
|
borderColor:borderColor
|
borderLineJoin:kCGLineJoinMiter];
|
}
|
|
- (UIImage *)imageByRoundCornerRadius:(CGFloat)radius
|
corners:(UIRectCorner)corners
|
borderWidth:(CGFloat)borderWidth
|
borderColor:(UIColor *)borderColor
|
borderLineJoin:(CGLineJoin)borderLineJoin {
|
|
if (corners != UIRectCornerAllCorners) {
|
UIRectCorner tmp = 0;
|
if (corners & UIRectCornerTopLeft) tmp |= UIRectCornerBottomLeft;
|
if (corners & UIRectCornerTopRight) tmp |= UIRectCornerBottomRight;
|
if (corners & UIRectCornerBottomLeft) tmp |= UIRectCornerTopLeft;
|
if (corners & UIRectCornerBottomRight) tmp |= UIRectCornerTopRight;
|
corners = tmp;
|
}
|
|
UIGraphicsBeginImageContextWithOptions(self.size, NO, self.scale);
|
CGContextRef context = UIGraphicsGetCurrentContext();
|
CGRect rect = CGRectMake(0, 0, self.size.width, self.size.height);
|
CGContextScaleCTM(context, 1, -1);
|
CGContextTranslateCTM(context, 0, -rect.size.height);
|
|
CGFloat minSize = MIN(self.size.width, self.size.height);
|
if (borderWidth < minSize / 2) {
|
UIBezierPath *path = [UIBezierPath bezierPathWithRoundedRect:CGRectInset(rect, borderWidth, borderWidth) byRoundingCorners:corners cornerRadii:CGSizeMake(radius, borderWidth)];
|
[path closePath];
|
|
CGContextSaveGState(context);
|
[path addClip];
|
CGContextDrawImage(context, rect, self.CGImage);
|
CGContextRestoreGState(context);
|
}
|
|
if (borderColor && borderWidth < minSize / 2 && borderWidth > 0) {
|
CGFloat strokeInset = (floor(borderWidth * self.scale) + 0.5) / self.scale;
|
CGRect strokeRect = CGRectInset(rect, strokeInset, strokeInset);
|
CGFloat strokeRadius = radius > self.scale / 2 ? radius - self.scale / 2 : 0;
|
UIBezierPath *path = [UIBezierPath bezierPathWithRoundedRect:strokeRect byRoundingCorners:corners cornerRadii:CGSizeMake(strokeRadius, borderWidth)];
|
[path closePath];
|
|
path.lineWidth = borderWidth;
|
path.lineJoinStyle = borderLineJoin;
|
[borderColor setStroke];
|
[path stroke];
|
}
|
|
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
|
UIGraphicsEndImageContext();
|
return image;
|
}
|
|
- (UIImage *)imageByRotate:(CGFloat)radians fitSize:(BOOL)fitSize {
|
size_t width = (size_t)CGImageGetWidth(self.CGImage);
|
size_t height = (size_t)CGImageGetHeight(self.CGImage);
|
CGRect newRect = CGRectApplyAffineTransform(CGRectMake(0., 0., width, height),
|
fitSize ? CGAffineTransformMakeRotation(radians) : CGAffineTransformIdentity);
|
|
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
CGContextRef context = CGBitmapContextCreate(NULL,
|
(size_t)newRect.size.width,
|
(size_t)newRect.size.height,
|
8,
|
(size_t)newRect.size.width * 4,
|
colorSpace,
|
kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedFirst);
|
CGColorSpaceRelease(colorSpace);
|
if (!context) return nil;
|
|
CGContextSetShouldAntialias(context, true);
|
CGContextSetAllowsAntialiasing(context, true);
|
CGContextSetInterpolationQuality(context, kCGInterpolationHigh);
|
|
CGContextTranslateCTM(context, +(newRect.size.width * 0.5), +(newRect.size.height * 0.5));
|
CGContextRotateCTM(context, radians);
|
|
CGContextDrawImage(context, CGRectMake(-(width * 0.5), -(height * 0.5), width, height), self.CGImage);
|
CGImageRef imgRef = CGBitmapContextCreateImage(context);
|
UIImage *img = [UIImage imageWithCGImage:imgRef scale:self.scale orientation:self.imageOrientation];
|
CGImageRelease(imgRef);
|
CGContextRelease(context);
|
return img;
|
}
|
|
- (UIImage *)_yy_flipHorizontal:(BOOL)horizontal vertical:(BOOL)vertical {
|
if (!self.CGImage) return nil;
|
size_t width = (size_t)CGImageGetWidth(self.CGImage);
|
size_t height = (size_t)CGImageGetHeight(self.CGImage);
|
size_t bytesPerRow = width * 4;
|
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedFirst);
|
CGColorSpaceRelease(colorSpace);
|
if (!context) return nil;
|
|
CGContextDrawImage(context, CGRectMake(0, 0, width, height), self.CGImage);
|
UInt8 *data = (UInt8 *)CGBitmapContextGetData(context);
|
if (!data) {
|
CGContextRelease(context);
|
return nil;
|
}
|
vImage_Buffer src = { data, height, width, bytesPerRow };
|
vImage_Buffer dest = { data, height, width, bytesPerRow };
|
if (vertical) {
|
vImageVerticalReflect_ARGB8888(&src, &dest, kvImageBackgroundColorFill);
|
}
|
if (horizontal) {
|
vImageHorizontalReflect_ARGB8888(&src, &dest, kvImageBackgroundColorFill);
|
}
|
CGImageRef imgRef = CGBitmapContextCreateImage(context);
|
CGContextRelease(context);
|
UIImage *img = [UIImage imageWithCGImage:imgRef scale:self.scale orientation:self.imageOrientation];
|
CGImageRelease(imgRef);
|
return img;
|
}
|
|
- (UIImage *)imageByRotateLeft90 {
|
return [self imageByRotate:DegreesToRadians(90) fitSize:YES];
|
}
|
|
- (UIImage *)imageByRotateRight90 {
|
return [self imageByRotate:DegreesToRadians(-90) fitSize:YES];
|
}
|
|
- (UIImage *)imageByRotate180 {
|
return [self _yy_flipHorizontal:YES vertical:YES];
|
}
|
|
- (UIImage *)imageByFlipVertical {
|
return [self _yy_flipHorizontal:NO vertical:YES];
|
}
|
|
- (UIImage *)imageByFlipHorizontal {
|
return [self _yy_flipHorizontal:YES vertical:NO];
|
}
|
|
- (UIImage *)imageByTintColor:(UIColor *)color {
|
UIGraphicsBeginImageContextWithOptions(self.size, NO, self.scale);
|
CGRect rect = CGRectMake(0, 0, self.size.width, self.size.height);
|
[color set];
|
UIRectFill(rect);
|
[self drawAtPoint:CGPointMake(0, 0) blendMode:kCGBlendModeDestinationIn alpha:1];
|
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
|
UIGraphicsEndImageContext();
|
return newImage;
|
}
|
|
- (UIImage *)imageByGrayscale {
|
return [self imageByBlurRadius:0 tintColor:nil tintMode:0 saturation:0 maskImage:nil];
|
}
|
|
- (UIImage *)imageByBlurSoft {
|
return [self imageByBlurRadius:60 tintColor:[UIColor colorWithWhite:0.84 alpha:0.36] tintMode:kCGBlendModeNormal saturation:1.8 maskImage:nil];
|
}
|
|
- (UIImage *)imageByBlurLight {
|
return [self imageByBlurRadius:60 tintColor:[UIColor colorWithWhite:1.0 alpha:0.3] tintMode:kCGBlendModeNormal saturation:1.8 maskImage:nil];
|
}
|
|
- (UIImage *)imageByBlurExtraLight {
|
return [self imageByBlurRadius:40 tintColor:[UIColor colorWithWhite:0.97 alpha:0.82] tintMode:kCGBlendModeNormal saturation:1.8 maskImage:nil];
|
}
|
|
- (UIImage *)imageByBlurDark {
|
return [self imageByBlurRadius:40 tintColor:[UIColor colorWithWhite:0.11 alpha:0.73] tintMode:kCGBlendModeNormal saturation:1.8 maskImage:nil];
|
}
|
|
- (UIImage *)imageByBlurWithTint:(UIColor *)tintColor {
|
const CGFloat EffectColorAlpha = 0.6;
|
UIColor *effectColor = tintColor;
|
size_t componentCount = CGColorGetNumberOfComponents(tintColor.CGColor);
|
if (componentCount == 2) {
|
CGFloat b;
|
if ([tintColor getWhite:&b alpha:NULL]) {
|
effectColor = [UIColor colorWithWhite:b alpha:EffectColorAlpha];
|
}
|
} else {
|
CGFloat r, g, b;
|
if ([tintColor getRed:&r green:&g blue:&b alpha:NULL]) {
|
effectColor = [UIColor colorWithRed:r green:g blue:b alpha:EffectColorAlpha];
|
}
|
}
|
return [self imageByBlurRadius:20 tintColor:effectColor tintMode:kCGBlendModeNormal saturation:-1.0 maskImage:nil];
|
}
|
|
- (UIImage *)imageByBlurRadius:(CGFloat)blurRadius
|
tintColor:(UIColor *)tintColor
|
tintMode:(CGBlendMode)tintBlendMode
|
saturation:(CGFloat)saturation
|
maskImage:(UIImage *)maskImage {
|
if (self.size.width < 1 || self.size.height < 1) {
|
NSLog(@"UIImage+YYAdd error: invalid size: (%.2f x %.2f). Both dimensions must be >= 1: %@", self.size.width, self.size.height, self);
|
return nil;
|
}
|
if (!self.CGImage) {
|
NSLog(@"UIImage+YYAdd error: inputImage must be backed by a CGImage: %@", self);
|
return nil;
|
}
|
if (maskImage && !maskImage.CGImage) {
|
NSLog(@"UIImage+YYAdd error: effectMaskImage must be backed by a CGImage: %@", maskImage);
|
return nil;
|
}
|
|
// iOS7 and above can use new func.
|
BOOL hasNewFunc = (long)vImageBuffer_InitWithCGImage != 0 && (long)vImageCreateCGImageFromBuffer != 0;
|
BOOL hasBlur = blurRadius > __FLT_EPSILON__;
|
BOOL hasSaturation = fabs(saturation - 1.0) > __FLT_EPSILON__;
|
|
CGSize size = self.size;
|
CGRect rect = { CGPointZero, size };
|
CGFloat scale = self.scale;
|
CGImageRef imageRef = self.CGImage;
|
BOOL opaque = NO;
|
|
if (!hasBlur && !hasSaturation) {
|
return [self _yy_mergeImageRef:imageRef tintColor:tintColor tintBlendMode:tintBlendMode maskImage:maskImage opaque:opaque];
|
}
|
|
vImage_Buffer effect = { 0 }, scratch = { 0 };
|
vImage_Buffer *input = NULL, *output = NULL;
|
|
vImage_CGImageFormat format = {
|
.bitsPerComponent = 8,
|
.bitsPerPixel = 32,
|
.colorSpace = NULL,
|
.bitmapInfo = kCGImageAlphaPremultipliedFirst | kCGBitmapByteOrder32Little, //requests a BGRA buffer.
|
.version = 0,
|
.decode = NULL,
|
.renderingIntent = kCGRenderingIntentDefault
|
};
|
|
if (hasNewFunc) {
|
vImage_Error err;
|
err = vImageBuffer_InitWithCGImage(&effect, &format, NULL, imageRef, kvImagePrintDiagnosticsToConsole);
|
if (err != kvImageNoError) {
|
NSLog(@"UIImage+YYAdd error: vImageBuffer_InitWithCGImage returned error code %zi for inputImage: %@", err, self);
|
return nil;
|
}
|
err = vImageBuffer_Init(&scratch, effect.height, effect.width, format.bitsPerPixel, kvImageNoFlags);
|
if (err != kvImageNoError) {
|
NSLog(@"UIImage+YYAdd error: vImageBuffer_Init returned error code %zi for inputImage: %@", err, self);
|
return nil;
|
}
|
} else {
|
UIGraphicsBeginImageContextWithOptions(size, opaque, scale);
|
CGContextRef effectCtx = UIGraphicsGetCurrentContext();
|
CGContextScaleCTM(effectCtx, 1.0, -1.0);
|
CGContextTranslateCTM(effectCtx, 0, -size.height);
|
CGContextDrawImage(effectCtx, rect, imageRef);
|
effect.data = CGBitmapContextGetData(effectCtx);
|
effect.width = CGBitmapContextGetWidth(effectCtx);
|
effect.height = CGBitmapContextGetHeight(effectCtx);
|
effect.rowBytes = CGBitmapContextGetBytesPerRow(effectCtx);
|
|
UIGraphicsBeginImageContextWithOptions(size, opaque, scale);
|
CGContextRef scratchCtx = UIGraphicsGetCurrentContext();
|
scratch.data = CGBitmapContextGetData(scratchCtx);
|
scratch.width = CGBitmapContextGetWidth(scratchCtx);
|
scratch.height = CGBitmapContextGetHeight(scratchCtx);
|
scratch.rowBytes = CGBitmapContextGetBytesPerRow(scratchCtx);
|
}
|
|
input = &effect;
|
output = &scratch;
|
|
if (hasBlur) {
|
// A description of how to compute the box kernel width from the Gaussian
|
// radius (aka standard deviation) appears in the SVG spec:
|
// http://www.w3.org/TR/SVG/filters.html#feGaussianBlurElement
|
//
|
// For larger values of 's' (s >= 2.0), an approximation can be used: Three
|
// successive box-blurs build a piece-wise quadratic convolution kernel, which
|
// approximates the Gaussian kernel to within roughly 3%.
|
//
|
// let d = floor(s * 3*sqrt(2*pi)/4 + 0.5)
|
//
|
// ... if d is odd, use three box-blurs of size 'd', centered on the output pixel.
|
//
|
CGFloat inputRadius = blurRadius * scale;
|
if (inputRadius - 2.0 < __FLT_EPSILON__) inputRadius = 2.0;
|
uint32_t radius = floor((inputRadius * 3.0 * sqrt(2 * M_PI) / 4 + 0.5) / 2);
|
radius |= 1; // force radius to be odd so that the three box-blur methodology works.
|
int iterations;
|
if (blurRadius * scale < 0.5) iterations = 1;
|
else if (blurRadius * scale < 1.5) iterations = 2;
|
else iterations = 3;
|
NSInteger tempSize = vImageBoxConvolve_ARGB8888(input, output, NULL, 0, 0, radius, radius, NULL, kvImageGetTempBufferSize | kvImageEdgeExtend);
|
void *temp = malloc(tempSize);
|
for (int i = 0; i < iterations; i++) {
|
vImageBoxConvolve_ARGB8888(input, output, temp, 0, 0, radius, radius, NULL, kvImageEdgeExtend);
|
YY_SWAP(input, output);
|
}
|
free(temp);
|
}
|
|
|
if (hasSaturation) {
|
// These values appear in the W3C Filter Effects spec:
|
// https://dvcs.w3.org/hg/FXTF/raw-file/default/filters/Publish.html#grayscaleEquivalent
|
CGFloat s = saturation;
|
CGFloat matrixFloat[] = {
|
0.0722 + 0.9278 * s, 0.0722 - 0.0722 * s, 0.0722 - 0.0722 * s, 0,
|
0.7152 - 0.7152 * s, 0.7152 + 0.2848 * s, 0.7152 - 0.7152 * s, 0,
|
0.2126 - 0.2126 * s, 0.2126 - 0.2126 * s, 0.2126 + 0.7873 * s, 0,
|
0, 0, 0, 1,
|
};
|
const int32_t divisor = 256;
|
NSUInteger matrixSize = sizeof(matrixFloat) / sizeof(matrixFloat[0]);
|
int16_t matrix[matrixSize];
|
for (NSUInteger i = 0; i < matrixSize; ++i) {
|
matrix[i] = (int16_t)roundf(matrixFloat[i] * divisor);
|
}
|
vImageMatrixMultiply_ARGB8888(input, output, matrix, divisor, NULL, NULL, kvImageNoFlags);
|
YY_SWAP(input, output);
|
}
|
|
UIImage *outputImage = nil;
|
if (hasNewFunc) {
|
CGImageRef effectCGImage = NULL;
|
effectCGImage = vImageCreateCGImageFromBuffer(input, &format, &_yy_cleanupBuffer, NULL, kvImageNoAllocate, NULL);
|
if (effectCGImage == NULL) {
|
effectCGImage = vImageCreateCGImageFromBuffer(input, &format, NULL, NULL, kvImageNoFlags, NULL);
|
free(input->data);
|
}
|
free(output->data);
|
outputImage = [self _yy_mergeImageRef:effectCGImage tintColor:tintColor tintBlendMode:tintBlendMode maskImage:maskImage opaque:opaque];
|
CGImageRelease(effectCGImage);
|
} else {
|
CGImageRef effectCGImage;
|
UIImage *effectImage;
|
if (input != &effect) effectImage = UIGraphicsGetImageFromCurrentImageContext();
|
UIGraphicsEndImageContext();
|
if (input == &effect) effectImage = UIGraphicsGetImageFromCurrentImageContext();
|
UIGraphicsEndImageContext();
|
effectCGImage = effectImage.CGImage;
|
outputImage = [self _yy_mergeImageRef:effectCGImage tintColor:tintColor tintBlendMode:tintBlendMode maskImage:maskImage opaque:opaque];
|
}
|
return outputImage;
|
}
|
|
// Helper function to handle deferred cleanup of a buffer.
|
static void _yy_cleanupBuffer(void *userData, void *buf_data) {
|
free(buf_data);
|
}
|
|
// Helper function to add tint and mask.
|
- (UIImage *)_yy_mergeImageRef:(CGImageRef)effectCGImage
|
tintColor:(UIColor *)tintColor
|
tintBlendMode:(CGBlendMode)tintBlendMode
|
maskImage:(UIImage *)maskImage
|
opaque:(BOOL)opaque {
|
BOOL hasTint = tintColor != nil && CGColorGetAlpha(tintColor.CGColor) > __FLT_EPSILON__;
|
BOOL hasMask = maskImage != nil;
|
CGSize size = self.size;
|
CGRect rect = { CGPointZero, size };
|
CGFloat scale = self.scale;
|
|
if (!hasTint && !hasMask) {
|
return [UIImage imageWithCGImage:effectCGImage];
|
}
|
|
UIGraphicsBeginImageContextWithOptions(size, opaque, scale);
|
CGContextRef context = UIGraphicsGetCurrentContext();
|
CGContextScaleCTM(context, 1.0, -1.0);
|
CGContextTranslateCTM(context, 0, -size.height);
|
if (hasMask) {
|
CGContextDrawImage(context, rect, self.CGImage);
|
CGContextSaveGState(context);
|
CGContextClipToMask(context, rect, maskImage.CGImage);
|
}
|
CGContextDrawImage(context, rect, effectCGImage);
|
if (hasTint) {
|
CGContextSaveGState(context);
|
CGContextSetBlendMode(context, tintBlendMode);
|
CGContextSetFillColorWithColor(context, tintColor.CGColor);
|
CGContextFillRect(context, rect);
|
CGContextRestoreGState(context);
|
}
|
if (hasMask) {
|
CGContextRestoreGState(context);
|
}
|
UIImage *outputImage = UIGraphicsGetImageFromCurrentImageContext();
|
UIGraphicsEndImageContext();
|
return outputImage;
|
}
|
|
@end
|