//
//  CTImageProcessing.m
//  ConstellationTalk_0.1
//
//  Created by riley on 15/3/23.
//  Copyright (c) 2015年 Suberverter. All rights reserved.
//

#import "CTImageProcessing.h"
#import <Accelerate/Accelerate.h>

static UIImage * iiimg;

@implementation CTImageProcessing

+(UIImage *)imageCompressionImg:(UIImage *)img
                       WithSize:(CGSize)size
{
    UIGraphicsBeginImageContext(size);
    [img drawInRect:CGRectMake(0,0,size.width,size.height)];
    UIImage* newImage = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();
    return newImage;
}

+(UIImage *)blurryImage:(UIImage *)image withBlurLevel:(CGFloat)blur
{
    
    if (image == nil) return image;
    UIImage *returnImage;
    
    @try {
        //模糊度,
        if ((blur < 0.02f) || (blur > 2.0f)) {
            blur = 0.02f;
        }
        
        //boxSize必须大于0
        int boxSize = (int)(blur * 100);
        boxSize -= (boxSize % 2) + 1;
        //图像处理
        CGImageRef img = image.CGImage;
        //需要引入#import <Accelerate/Accelerate.h>
        
        //图像缓存,输入缓存，输出缓存
        vImage_Buffer inBuffer, outBuffer;
        vImage_Error error;
        //像素缓存
        void *pixelBuffer;
        
        //数据源提供者，Defines an opaque type that supplies Quartz with data.
        CGDataProviderRef inProvider = CGImageGetDataProvider(img);
        // provider’s data.
        CFDataRef inBitmapData = CGDataProviderCopyData(inProvider);
        
        //宽，高，字节/行，data
        inBuffer.width = CGImageGetWidth(img);
        inBuffer.height = CGImageGetHeight(img);
        inBuffer.rowBytes = CGImageGetBytesPerRow(img);
        inBuffer.data = (void*)CFDataGetBytePtr(inBitmapData);
        
        //像数缓存，字节行*图片高
        pixelBuffer = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
        
        outBuffer.data = pixelBuffer;
        outBuffer.width = CGImageGetWidth(img);
        outBuffer.height = CGImageGetHeight(img);
        outBuffer.rowBytes = CGImageGetBytesPerRow(img);
        
        
        // 第三个中间的缓存区,抗锯齿的效果
        void *pixelBuffer2 = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
        vImage_Buffer outBuffer2;
        outBuffer2.data = pixelBuffer2;
        outBuffer2.width = CGImageGetWidth(img);
        outBuffer2.height = CGImageGetHeight(img);
        outBuffer2.rowBytes = CGImageGetBytesPerRow(img);
        
        //Convolves a region of interest within an ARGB8888 source image by an implicit M x N kernel that has the effect of a box filter.
        error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer2, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
        error = vImageBoxConvolve_ARGB8888(&outBuffer2, &inBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
        error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
        
        
        if (error) {
            NSLog(@"error from convolution %ld", error);
        }
        
        //颜色空间DeviceRGB
        CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
        //用图片创建上下文,CGImageGetBitsPerComponent(img),7,8
        CGContextRef ctx = CGBitmapContextCreate(
                                                 outBuffer.data,
                                                 outBuffer.width,
                                                 outBuffer.height,
                                                 8,
                                                 outBuffer.rowBytes,
                                                 colorSpace,
                                                 CGImageGetBitmapInfo(image.CGImage));
        
        //根据上下文，处理过的图片，重新组件
        CGImageRef imageRef = CGBitmapContextCreateImage (ctx);
        returnImage = [UIImage imageWithCGImage:imageRef];
        
        //clean up
        CGContextRelease(ctx);
        //CGColorSpaceRelease(colorSpace);
        
        free(pixelBuffer);
        free(pixelBuffer2);
        CFRelease(inBitmapData);
        
        // CGColorSpaceRelease(colorSpace);
        CGImageRelease(imageRef);
    }
    @catch (NSException *exception) {
        returnImage = image;
    }
    
    return returnImage;

}


+(void)blurryImage:(UIImage *)image withBlurLevel:(CGFloat)blur withCompletionHandeler:(blurryImgBlock)block
{
    if (image == nil) return;
    __block CGFloat bb = blur;
    __block UIImage * imageR = image;
    
    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
        
        UIImage *returnImage;
        
        @try {
            //模糊度,
            if ((bb < 0.02f) || (bb > 2.0f)) {
                bb = 0.02f;
            }
            
            //boxSize必须大于0
            int boxSize = (int)(blur * 100);
            boxSize -= (boxSize % 2) + 1;
            //图像处理
            CGImageRef img = imageR.CGImage;
            //需要引入#import <Accelerate/Accelerate.h>
            
            //图像缓存,输入缓存，输出缓存
            vImage_Buffer inBuffer, outBuffer;
            vImage_Error error;
            //像素缓存
            void *pixelBuffer;
            
            //数据源提供者，Defines an opaque type that supplies Quartz with data.
            CGDataProviderRef inProvider = CGImageGetDataProvider(img);
            // provider’s data.
            CFDataRef inBitmapData = CGDataProviderCopyData(inProvider);
            
            //宽，高，字节/行，data
            inBuffer.width = CGImageGetWidth(img);
            inBuffer.height = CGImageGetHeight(img);
            inBuffer.rowBytes = CGImageGetBytesPerRow(img);
            inBuffer.data = (void*)CFDataGetBytePtr(inBitmapData);
            
            //像数缓存，字节行*图片高
            pixelBuffer = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
            
            outBuffer.data = pixelBuffer;
            outBuffer.width = CGImageGetWidth(img);
            outBuffer.height = CGImageGetHeight(img);
            outBuffer.rowBytes = CGImageGetBytesPerRow(img);
            
            
            // 第三个中间的缓存区,抗锯齿的效果
            void *pixelBuffer2 = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
            vImage_Buffer outBuffer2;
            outBuffer2.data = pixelBuffer2;
            outBuffer2.width = CGImageGetWidth(img);
            outBuffer2.height = CGImageGetHeight(img);
            outBuffer2.rowBytes = CGImageGetBytesPerRow(img);
            
            //Convolves a region of interest within an ARGB8888 source image by an implicit M x N kernel that has the effect of a box filter.
            error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer2, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
            error = vImageBoxConvolve_ARGB8888(&outBuffer2, &inBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
            error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
            
            
            if (error) {
                NSLog(@"error from convolution %ld", error);
            }
            
            //颜色空间DeviceRGB
            CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
            //用图片创建上下文,CGImageGetBitsPerComponent(img),7,8
            CGContextRef ctx = CGBitmapContextCreate(
                                                     outBuffer.data,
                                                     outBuffer.width,
                                                     outBuffer.height,
                                                     8,
                                                     outBuffer.rowBytes,
                                                     colorSpace,
                                                     CGImageGetBitmapInfo(image.CGImage));
            
            //根据上下文，处理过的图片，重新组件
            CGImageRef imageRef = CGBitmapContextCreateImage (ctx);
            returnImage = [UIImage imageWithCGImage:imageRef];
            
            //clean up
            CGContextRelease(ctx);
            //CGColorSpaceRelease(colorSpace);
            
            free(pixelBuffer);
            free(pixelBuffer2);
            CFRelease(inBitmapData);
            
            // CGColorSpaceRelease(colorSpace);
            CGImageRelease(imageRef);
        }
        @catch (NSException *exception) {
            
        }
        
        dispatch_async(dispatch_get_main_queue(), ^{
            block(returnImage);
        });
    });
    
}

@end
