国产探花免费观看_亚洲丰满少妇自慰呻吟_97日韩有码在线_资源在线日韩欧美_一区二区精品毛片,辰东完美世界有声小说,欢乐颂第一季,yy玄幻小说排行榜完本

首頁 > 學院 > 開發設計 > 正文

iOS開發中常用的分類方法---UIImage+Category

2019-11-14 18:03:52
字體:
來源:轉載
供稿:網友

在開發中使用分類對原有的系統類進行方法擴展,是增強系統原有類功能的常見做法。

/** *  自由拉伸一張圖片 * *  @param name 圖片名字 *  @param left 左邊開始位置比例  值范圍0-1 *  @param top  上邊開始位置比例  值范圍0-1 * *  @return 拉伸后的Image */ + (UIImage *)resizedImageWithName:(NSString *)name left:(CGFloat)left top:(CGFloat)top{    UIImage *image = [UIImage imageNamed:name];    return [image stretchableImageWithLeftCapWidth:image.size.width * left topCapHeight:image.size.height * top];}/** *  根據顏色和大小獲取Image * *  @param color 顏色 *  @param size  大小 * */+ (UIImage *)imageWithColor:(UIColor *)color size:(CGSize)size{    UIGraphicsBeginImageContextWithOptions(size, 0, [UIScreen mainScreen].scale);    [color set];    UIRectFill(CGRectMake(0, 0, size.width, size.height));    UIImage *image = UIGraphicsGetImageFromCurrentImageContext();    UIGraphicsEndImageContext();    return image;}/** *  根據圖片和顏色返回一張加深顏色以后的圖片 */+ (UIImage *)colorizeImage:(UIImage *)baseImage withColor:(UIColor *)theColor {        UIGraphicsBeginImageContext(CGSizeMake(baseImage.size.width*2, baseImage.size.height*2));        CGContextRef ctx = UIGraphicsGetCurrentContext();    CGRect area = CGRectMake(0, 0, baseImage.size.width * 2, baseImage.size.height * 2);        CGContextScaleCTM(ctx, 1, -1);    CGContextTranslateCTM(ctx, 0, -area.size.height);        CGContextSaveGState(ctx);    CGContextClipToMask(ctx, area, baseImage.CGImage);        [theColor set];    CGContextFillRect(ctx, area);        CGContextRestoreGState(ctx);        CGContextSetBlendMode(ctx, kCGBlendModeMultiply);        CGContextDrawImage(ctx, area, baseImage.CGImage);        UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();        UIGraphicsEndImageContext();        return newImage;}/** *  根據圖片返回一張高斯模糊的圖片 * *  @param blur 模糊系數 * *  @return 新的圖片 */- (UIImage *)boxblurImageWithBlur:(CGFloat)blur {        NSData *imageData = UIImageJPEGRePResentation(self, 1); // convert to jpeg    UIImage* destImage = [UIImage imageWithData:imageData];            if (blur < 0.f || blur > 1.f) {        blur = 0.5f;    }    int boxSize = (int)(blur * 40);    boxSize = boxSize - (boxSize % 2) + 1;        CGImageRef img = destImage.CGImage;        vImage_Buffer inBuffer, outBuffer;        vImage_Error error;        void *pixelBuffer;            //create vImage_Buffer with data from CGImageRef        CGDataProviderRef inProvider = CGImageGetDataProvider(img);    CFDataRef inBitmapData = CGDataProviderCopyData(inProvider);            inBuffer.width = CGImageGetWidth(img);    inBuffer.height = CGImageGetHeight(img);    inBuffer.rowBytes = CGImageGetBytesPerRow(img);        inBuffer.data = (void*)CFDataGetBytePtr(inBitmapData);        //create vImage_Buffer for output        pixelBuffer = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));        if(pixelBuffer == NULL)        NSLog(@"No pixelbuffer");        outBuffer.data = pixelBuffer;    outBuffer.width = CGImageGetWidth(img);    outBuffer.height = CGImageGetHeight(img);    outBuffer.rowBytes = CGImageGetBytesPerRow(img);        // Create a third buffer for intermediate processing    void *pixelBuffer2 = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));    vImage_Buffer outBuffer2;    outBuffer2.data = pixelBuffer2;    outBuffer2.width = CGImageGetWidth(img);    outBuffer2.height = CGImageGetHeight(img);    outBuffer2.rowBytes = CGImageGetBytesPerRow(img);        //perform convolution    error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer2, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);    if (error) {        NSLog(@"error from convolution %ld", error);    }    error = vImageBoxConvolve_ARGB8888(&outBuffer2, &inBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);    if (error) {        NSLog(@"error from convolution %ld", error);    }    error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);    if (error) {        NSLog(@"error from convolution %ld", error);    }        CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();    CGContextRef ctx = CGBitmapContextCreate(outBuffer.data,                                             outBuffer.width,                                             outBuffer.height,                                             8,                                             outBuffer.rowBytes,                                             colorSpace,                                             (CGBitmapInfo)kCGImageAlphaNoneSkipLast);    CGImageRef imageRef = CGBitmapContextCreateImage (ctx);    UIImage *returnImage = [UIImage imageWithCGImage:imageRef];        //clean up    CGContextRelease(ctx);    CGColorSpaceRelease(colorSpace);        free(pixelBuffer);    free(pixelBuffer2);    CFRelease(inBitmapData);        CGImageRelease(imageRef);        return returnImage;}/** *  自由改變Image的大小 * *  @param size 目的大小 * *  @return 修改后的Image */- (UIImage *)cropImageWithSize:(CGSize)size {        float scale = self.size.width/self.size.height;    CGRect rect = CGRectMake(0, 0, 0, 0);        if (scale > size.width/size.height) {                rect.origin.x = (self.size.width - self.size.height * size.width/size.height)/2;        rect.size.width  = self.size.height * size.width/size.height;        rect.size.height = self.size.height;            }else {                rect.origin.y = (self.size.height - self.size.width/size.width * size.height)/2;        rect.size.width  = self.size.width;        rect.size.height = self.size.width/size.width * size.height;            }        CGImageRef imageRef   = CGImageCreateWithImageInRect(self.CGImage, rect);    UIImage *croppedImage = [UIImage imageWithCGImage:imageRef];    CGImageRelease(imageRef);        return croppedImage;}

發表評論 共有條評論
用戶名: 密碼:
驗證碼: 匿名發表
主站蜘蛛池模板: 自贡市| 南华县| 郴州市| 建阳市| 安岳县| 龙井市| 肥乡县| 石柱| 吉安县| 靖远县| 沧州市| 拜泉县| 武穴市| 舞阳县| 云霄县| 台东市| 沾益县| 无为县| 海门市| 犍为县| 北川| 左权县| 汪清县| 崇阳县| 临夏市| 浙江省| 巴林右旗| 常宁市| 会东县| 乌什县| 当雄县| 民乐县| 万源市| 葫芦岛市| 长治县| 清水县| 泾源县| 芜湖市| 岳西县| 开原市| 砀山县|