// // UIImage+RQExtension.m // RQCommon // // Created by 张嵘 on 2018/11/16. // Copyright © 2018 张嵘. All rights reserved. // #import "UIImage+RQExtension.h" #import #import @implementation UIImage (RQExtension) /** * 根据图片名返回一张能够自由拉伸的图片 (从中间拉伸) */ + (UIImage *)rq_resizableImage:(NSString *)imgName { UIImage *image = [UIImage imageNamed:imgName]; return [self rq_resizableImage:imgName capInsets:UIEdgeInsetsMake(image.size.height *.5f, image.size.width*.5f, image.size.height*.5f, image.size.width*.5f)]; } + (UIImage *)rq_resizableImage:(NSString *)imgName capInsets:(UIEdgeInsets)capInsets { UIImage *image = [UIImage imageNamed:imgName]; return [image resizableImageWithCapInsets:capInsets]; } + (UIImage *)rq_imageAlwaysShowOriginalImageWithImageName:(NSString *)imageName { UIImage *image = [UIImage imageNamed:imageName]; if ([image respondsToSelector:@selector(imageWithRenderingMode:)]) { //iOS 7.0+ return [image imageWithRenderingMode:UIImageRenderingModeAlwaysOriginal]; }else{ return image; } } + (UIImage*)rq_thumbnailImageForVideo:(NSURL *)videoURL atTime:(NSTimeInterval)time { AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:videoURL options:nil]; NSParameterAssert(asset); AVAssetImageGenerator *assetImageGenerator =[[AVAssetImageGenerator alloc] initWithAsset:asset]; assetImageGenerator.appliesPreferredTrackTransform = YES; assetImageGenerator.apertureMode = AVAssetImageGeneratorApertureModeEncodedPixels; CGImageRef thumbnailImageRef = NULL; CFTimeInterval thumbnailImageTime = time; NSError *thumbnailImageGenerationError = nil; thumbnailImageRef = [assetImageGenerator copyCGImageAtTime:CMTimeMake(thumbnailImageTime, 60)actualTime:NULL error:&thumbnailImageGenerationError]; if(!thumbnailImageRef) NSLog(@"thumbnailImageGenerationError %@",thumbnailImageGenerationError); UIImage*thumbnailImage = thumbnailImageRef ? [[UIImage alloc]initWithCGImage: thumbnailImageRef] : nil; return thumbnailImage; } /// 获取屏幕截图 /// /// @return 屏幕截图图像 + (UIImage *)rq_screenShot { // 1. 获取到窗口 UIWindow *window = [UIApplication sharedApplication].keyWindow; // 2. 开始上下文 UIGraphicsBeginImageContextWithOptions(window.bounds.size, YES, 0); // 3. 将 window 中的内容绘制输出到当前上下文 [window drawViewHierarchyInRect:window.bounds afterScreenUpdates:NO]; // 4. 获取图片 UIImage *screenShot = UIGraphicsGetImageFromCurrentImageContext(); // 5. 关闭上下文 UIGraphicsEndImageContext(); return screenShot; } - (UIImage *)rq_fixOrientation { // No-op if the orientation is already correct if (self.imageOrientation == UIImageOrientationUp) return self; // We need to calculate the proper transformation to make the image upright. // We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored. CGAffineTransform transform = CGAffineTransformIdentity; switch (self.imageOrientation) { case UIImageOrientationDown: case UIImageOrientationDownMirrored: transform = CGAffineTransformTranslate(transform, self.size.width, self.size.height); transform = CGAffineTransformRotate(transform, M_PI); break; case UIImageOrientationLeft: case UIImageOrientationLeftMirrored: transform = CGAffineTransformTranslate(transform, self.size.width, 0); transform = CGAffineTransformRotate(transform, M_PI_2); break; case UIImageOrientationRight: case UIImageOrientationRightMirrored: transform = CGAffineTransformTranslate(transform, 0, self.size.height); transform = CGAffineTransformRotate(transform, -M_PI_2); break; case UIImageOrientationUp: case UIImageOrientationUpMirrored: break; } switch (self.imageOrientation) { case UIImageOrientationUpMirrored: case UIImageOrientationDownMirrored: transform = CGAffineTransformTranslate(transform, self.size.width, 0); transform = CGAffineTransformScale(transform, -1, 1); break; case UIImageOrientationLeftMirrored: case UIImageOrientationRightMirrored: transform = CGAffineTransformTranslate(transform, self.size.height, 0); transform = CGAffineTransformScale(transform, -1, 1); break; case UIImageOrientationUp: case UIImageOrientationDown: case UIImageOrientationLeft: case UIImageOrientationRight: break; } // Now we draw the underlying CGImage into a new context, applying the transform // calculated above. CGContextRef ctx = CGBitmapContextCreate(NULL, self.size.width, self.size.height, CGImageGetBitsPerComponent(self.CGImage), 0, CGImageGetColorSpace(self.CGImage), CGImageGetBitmapInfo(self.CGImage)); CGContextConcatCTM(ctx, transform); switch (self.imageOrientation) { case UIImageOrientationLeft: case UIImageOrientationLeftMirrored: case UIImageOrientationRight: case UIImageOrientationRightMirrored: // Grr... CGContextDrawImage(ctx, CGRectMake(0,0,self.size.height,self.size.width), self.CGImage); break; default: CGContextDrawImage(ctx, CGRectMake(0,0,self.size.width,self.size.height), self.CGImage); break; } // And now we just create a new UIImage from the drawing context CGImageRef cgimg = CGBitmapContextCreateImage(ctx); UIImage *img = [UIImage imageWithCGImage:cgimg]; CGContextRelease(ctx); CGImageRelease(cgimg); return img; } //将JPEG格式转换为PNG -(UIImage *)reduceImage:(UIImage *)image percent:(float)percent { NSData *imageData = UIImageJPEGRepresentation(image, percent); UIImage *newImage = [UIImage imageWithData:imageData]; return newImage; } //压缩图片尺寸 - (UIImage *)scaledToSize:(CGSize)newSize { //首先要找到缩放比 按长的适配 不足的部分空白 CGFloat rate =newSize.width*1.0/ self.size.width ; if (self.size.height* rate > newSize.height) { //过长了。 rate =newSize.height*1.0/ self.size.height ; } CGSize size = CGSizeMake(self.size.width*rate, self.size.height*rate); // Create a graphics image context UIGraphicsBeginImageContext(size); // new size [self drawInRect:CGRectMake(0,0,size.width,size.height)]; // Get the new image from the context UIImage* newImage = UIGraphicsGetImageFromCurrentImageContext(); // End the context UIGraphicsEndImageContext(); // Return the new image. return newImage; } - (UIImage *)scaledAndCutToSize:(CGSize)newSize{ //首先要找到缩放比 按短的适配 长的部分裁减掉 CGFloat rate =newSize.width*1.0/ self.size.width ; if (self.size.height* rate < newSize.height) { //过长了。 rate =newSize.height*1.0/ self.size.height ; } CGSize size = CGSizeMake(self.size.width*rate, self.size.height*rate); UIGraphicsBeginImageContext(size); [self drawInRect:CGRectMake(0,0,size.width,size.height)]; // Get the new image from the context UIImage* newImage = UIGraphicsGetImageFromCurrentImageContext(); // End the context UIGraphicsEndImageContext(); // Return the new image. return newImage; } - (UIImage*)imageRotatedByDegrees:(CGFloat)degrees { CGFloat width = CGImageGetWidth(self.CGImage); CGFloat height = CGImageGetHeight(self.CGImage); CGSize rotatedSize; rotatedSize.width = width; rotatedSize.height = height; UIGraphicsBeginImageContext(rotatedSize); CGContextRef bitmap = UIGraphicsGetCurrentContext(); CGContextTranslateCTM(bitmap, rotatedSize.width/2, rotatedSize.height/2); CGContextRotateCTM(bitmap, degrees * M_PI / 180); CGContextRotateCTM(bitmap, M_PI); CGContextScaleCTM(bitmap, -1.0, 1.0); CGContextDrawImage(bitmap, CGRectMake(-rotatedSize.width/2, -rotatedSize.height/2, rotatedSize.width, rotatedSize.height), self.CGImage); UIImage* newImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return newImage; } #pragma mark - 压缩图片到指定大小(单位KB) + (NSData *)resetSizeOfImageData:(UIImage *)sourceImage maxSize:(NSInteger)maxSize { //先判断当前质量是否满足要求,不满足再进行压缩 __block NSData *finallImageData = UIImageJPEGRepresentation(sourceImage,1.0); NSUInteger sizeOrigin = finallImageData.length; NSUInteger sizeOriginKB = sizeOrigin / 1000; if (sizeOriginKB <= maxSize) { return finallImageData; } //获取原图片宽高比 CGFloat sourceImageAspectRatio = sourceImage.size.width/sourceImage.size.height; //先调整分辨率 CGSize defaultSize = CGSizeMake(1024, 1024/sourceImageAspectRatio); UIImage *newImage = [self newSizeImage:defaultSize image:sourceImage]; finallImageData = UIImageJPEGRepresentation(newImage,1.0); //保存压缩系数 NSMutableArray *compressionQualityArr = [NSMutableArray array]; CGFloat avg = 1.0/250; CGFloat value = avg; for (int i = 250; i >= 1; i--) { value = i*avg; [compressionQualityArr addObject:@(value)]; } /* 调整大小 说明:压缩系数数组compressionQualityArr是从大到小存储。 */ //思路:使用二分法搜索 __block NSData *canCompressMinData = [NSData data];//当无法压缩到指定大小时,用于存储当前能够压缩到的最小值数据。 [self halfFuntion:compressionQualityArr image:newImage sourceData:finallImageData maxSize:maxSize resultBlock:^(NSData *finallData, NSData *tempData) { finallImageData = finallData; canCompressMinData = tempData; }]; //如果还是未能压缩到指定大小,则进行降分辨率 while (finallImageData.length == 0) { //每次降100分辨率 CGFloat reduceWidth = 100.0; CGFloat reduceHeight = 100.0/sourceImageAspectRatio; if (defaultSize.width-reduceWidth <= 0 || defaultSize.height-reduceHeight <= 0) { break; } defaultSize = CGSizeMake(defaultSize.width-reduceWidth, defaultSize.height-reduceHeight); UIImage *image = [self newSizeImage:defaultSize image:[UIImage imageWithData:UIImageJPEGRepresentation(newImage,[[compressionQualityArr lastObject] floatValue])]]; [self halfFuntion:compressionQualityArr image:image sourceData:UIImageJPEGRepresentation(image,1.0) maxSize:maxSize resultBlock:^(NSData *finallData, NSData *tempData) { finallImageData = finallData; canCompressMinData = tempData; }]; } //如果分辨率已经无法再降低,则直接使用能够压缩的那个最小值即可 if (finallImageData.length==0) { finallImageData = canCompressMinData; } return finallImageData; } #pragma mark 调整图片分辨率/尺寸(等比例缩放) ///调整图片分辨率/尺寸(等比例缩放) + (UIImage *)newSizeImage:(CGSize)size image:(UIImage *)sourceImage { CGSize newSize = CGSizeMake(sourceImage.size.width, sourceImage.size.height); CGFloat tempHeight = newSize.height / size.height; CGFloat tempWidth = newSize.width / size.width; if (tempWidth > 1.0 && tempWidth > tempHeight) { newSize = CGSizeMake(sourceImage.size.width / tempWidth, sourceImage.size.height / tempWidth); } else if (tempHeight > 1.0 && tempWidth < tempHeight) { newSize = CGSizeMake(sourceImage.size.width / tempHeight, sourceImage.size.height / tempHeight); } // UIGraphicsBeginImageContext(newSize); UIGraphicsBeginImageContextWithOptions(newSize, NO, 1); [sourceImage drawInRect:CGRectMake(0,0,newSize.width,newSize.height)]; UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return newImage; } #pragma mark 二分法 ///二分法,block回调中finallData长度不为零表示最终压缩到了指定的大小,如果为零则表示压缩不到指定大小。tempData表示当前能够压缩到的最小值。 + (void)halfFuntion:(NSArray *)arr image:(UIImage *)image sourceData:(NSData *)finallImageData maxSize:(NSInteger)maxSize resultBlock:(void(^)(NSData *finallData, NSData *tempData))block { NSData *tempData = [NSData data]; NSUInteger start = 0; NSUInteger end = arr.count - 1; NSUInteger index = 0; NSUInteger difference = NSIntegerMax; while(start <= end) { index = start + (end - start)/2; finallImageData = UIImageJPEGRepresentation(image,[arr[index] floatValue]); NSUInteger sizeOrigin = finallImageData.length; NSUInteger sizeOriginKB = sizeOrigin / 1000; NSLog(@"当前降到的质量:%ld", (unsigned long)sizeOriginKB); // NSLog(@"\nstart:%zd\nend:%zd\nindex:%zd\n压缩系数:%lf", start, end, (unsigned long)index, [arr[index] floatValue]); if (sizeOriginKB > maxSize) { start = index + 1; } else if (sizeOriginKB < maxSize) { if (maxSize-sizeOriginKB < difference) { difference = maxSize-sizeOriginKB; tempData = finallImageData; } if (index<=0) { break; } end = index - 1; } else { break; } } NSData *d = [NSData data]; if (tempData.length==0) { d = finallImageData; } if (block) { block(tempData, d); } // return tempData; } + (UIImage *)fixOrientation:(UIImage *)image { // No-op if the orientation is already correct if (image.imageOrientation == UIImageOrientationUp) return image; // We need to calculate the proper transformation to make the image upright. // We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored. CGAffineTransform transform = CGAffineTransformIdentity; switch (image.imageOrientation) { case UIImageOrientationDown: case UIImageOrientationDownMirrored: transform = CGAffineTransformTranslate(transform, image.size.width, image.size.height); transform = CGAffineTransformRotate(transform, M_PI); break; case UIImageOrientationLeft: case UIImageOrientationLeftMirrored: transform = CGAffineTransformTranslate(transform, image.size.width, 0); transform = CGAffineTransformRotate(transform, M_PI_2); break; case UIImageOrientationRight: case UIImageOrientationRightMirrored: transform = CGAffineTransformTranslate(transform, 0, image.size.height); transform = CGAffineTransformRotate(transform, -M_PI_2); break; default: break; } switch (image.imageOrientation) { case UIImageOrientationUpMirrored: case UIImageOrientationDownMirrored: transform = CGAffineTransformTranslate(transform, image.size.width, 0); transform = CGAffineTransformScale(transform, -1, 1); break; case UIImageOrientationLeftMirrored: case UIImageOrientationRightMirrored: transform = CGAffineTransformTranslate(transform, image.size.height, 0); transform = CGAffineTransformScale(transform, -1, 1); break; default: break; } // Now we draw the underlying CGImage into a new context, applying the transform // calculated above. CGContextRef ctx = CGBitmapContextCreate(NULL, image.size.width, image.size.height, CGImageGetBitsPerComponent(image.CGImage), 0, CGImageGetColorSpace(image.CGImage), CGImageGetBitmapInfo(image.CGImage)); CGContextConcatCTM(ctx, transform); switch (image.imageOrientation) { case UIImageOrientationLeft: case UIImageOrientationLeftMirrored: case UIImageOrientationRight: case UIImageOrientationRightMirrored: // Grr... CGContextDrawImage(ctx, CGRectMake(0, 0, image.size.height, image.size.width), image.CGImage); break; default: CGContextDrawImage(ctx, CGRectMake(0, 0, image.size.width, image.size.height), image.CGImage); break; } // And now we just create a new UIImage from the drawing context CGImageRef cgimg = CGBitmapContextCreateImage(ctx); UIImage *img = [UIImage imageWithCGImage:cgimg]; CGContextRelease(ctx); CGImageRelease(cgimg); return img; } - (UIImage*)imageWaterMarkWithImage:(UIImage *)image imageRect:(CGRect)imgRect alpha:(CGFloat)alpha { return [self imageWaterMarkWithString:nil rect:CGRectZero attribute:nil image:image imageRect:imgRect alpha:alpha]; } - (UIImage*)imageWaterMarkWithImage:(UIImage*)image imagePoint:(CGPoint)imgPoint alpha:(CGFloat)alpha { return [self imageWaterMarkWithString:nil point:CGPointZero attribute:nil image:image imagePoint:imgPoint alpha:alpha]; } - (UIImage*)imageWaterMarkWithString:(NSString*)str rect:(CGRect)strRect attribute:(NSDictionary *)attri { return [self imageWaterMarkWithString:str rect:strRect attribute:attri image:nil imageRect:CGRectZero alpha:0]; } - (UIImage*)imageWaterMarkWithString:(NSString*)str point:(CGPoint)strPoint attribute:(NSDictionary*)attri { return [self imageWaterMarkWithString:str point:strPoint attribute:attri image:nil imagePoint:CGPointZero alpha:0]; } - (UIImage*)imageWaterMarkWithString:(NSString*)str point:(CGPoint)strPoint attribute:(NSDictionary*)attri image:(UIImage*)image imagePoint:(CGPoint)imgPoint alpha:(CGFloat)alpha { UIGraphicsBeginImageContext(self.size); [self drawAtPoint:CGPointMake(0, 0) blendMode:kCGBlendModeNormal alpha:1.0]; if (image) { [image drawAtPoint:imgPoint blendMode:kCGBlendModeNormal alpha:alpha]; } if (str) { [str drawAtPoint:strPoint withAttributes:attri]; } UIImage *resultImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return resultImage; } - (UIImage*)imageWaterMarkWithString:(NSString*)str rect:(CGRect)strRect attribute:(NSDictionary *)attri image:(UIImage *)image imageRect:(CGRect)imgRect alpha:(CGFloat)alpha { UIGraphicsBeginImageContext(self.size); [self drawInRect:CGRectMake(0, 0, self.size.width, self.size.height) blendMode:kCGBlendModeNormal alpha:1.0]; if (image) { [image drawInRect:imgRect blendMode:kCGBlendModeNormal alpha:alpha]; } if (str) { [str drawInRect:strRect withAttributes:attri]; } UIImage *resultImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return resultImage; } - (UIImage *)imageAddCornerWithCornerRadiusArray:(NSArray *)cornerRadius lineWidth:(CGFloat)lineWidth borderPosition:(QMUIImageBorderPosition)borderPosition lineColor:(NSString *)lineColor andSize:(CGSize)size { CGRect rect = CGRectMake(0, 0, size.width, size.height); UIGraphicsBeginImageContextWithOptions(size, NO, [UIScreen mainScreen].scale); CGContextRef contextRef = UIGraphicsGetCurrentContext(); UIBezierPath *path = [UIBezierPath qmui_bezierPathWithRoundedRect:rect cornerRadiusArray:cornerRadius lineWidth:lineWidth]; if (borderPosition == QMUIImageBorderPositionAll) { CGContextSetStrokeColorWithColor(contextRef, RQColorFromHexString(lineColor).CGColor); [path setLineWidth:lineWidth]; [path stroke]; } else { // TODO 使用bezierPathWithRoundedRect:byRoundingCorners:cornerRadii:这个系统接口 if ((QMUIImageBorderPositionBottom & borderPosition) == QMUIImageBorderPositionBottom) { [path moveToPoint:CGPointMake(0, size.height - lineWidth / 2)]; [path addLineToPoint:CGPointMake(size.width, size.height - lineWidth / 2)]; } if ((QMUIImageBorderPositionTop & borderPosition) == QMUIImageBorderPositionTop) { [path moveToPoint:CGPointMake(0, lineWidth / 2)]; [path addLineToPoint:CGPointMake(size.width, lineWidth / 2)]; } if ((QMUIImageBorderPositionLeft & borderPosition) == QMUIImageBorderPositionLeft) { [path moveToPoint:CGPointMake(lineWidth / 2, 0)]; [path addLineToPoint:CGPointMake(lineWidth / 2, size.height)]; } if ((QMUIImageBorderPositionRight & borderPosition) == QMUIImageBorderPositionRight) { [path moveToPoint:CGPointMake(size.width - lineWidth / 2, 0)]; [path addLineToPoint:CGPointMake(size.width - lineWidth / 2, size.height)]; } [path setLineWidth:lineWidth]; [path closePath]; } CGContextAddPath(contextRef,path.CGPath); CGContextClip(contextRef); [self drawInRect:rect]; CGContextDrawPath(contextRef, kCGPathFillStroke); UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return image; } @end