/*
 * (C) 2014-2015 Alibaba Group Holding Limited
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License version 2 as
 * published by the Free Software Foundation.
 *
 *
 *     __  ___   ______        ___       __          ___
 *    |  |/  /  /  __  \      /   \     |  |        /   \
 *    |  '  /  |  |  |  |    /  ^  \    |  |       /  ^  \
 *    |    <   |  |  |  |   /  /_\  \   |  |      /  /_\  \
 *    |  .  \  |  `--'  |  /  _____  \  |  `----./  _____  \
 *    |__|\__\  \______/  /__/     \__\ |_______/__/     \__\
 *
 *
 *
 */
//
// Created by KOALA TEAM on 12/19/14.
//

#import "UIImage+KLConvert.h"


@implementation UIImage (KLConvert)
//http://teh-1337.studiobebop.net/blog.py?post=198


// Method that processes a CVPixelBuffer representation of a preview frame
//+ (void)processPixelBuffer: (CVImageBufferRef)pixelBuffer
//{
//    
//    int BYTES_PER_PIXEL = 4;
//    // lock the pixel buffer into place in memory
//    CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
//    
//    // Get the dimensions of the preview frame
//    int bufferWidth = CVPixelBufferGetWidth(pixelBuffer);
//    int bufferHeight = CVPixelBufferGetHeight(pixelBuffer);
//    
//    // Turn the CVPixelBuffer into something the intrinsic function can process
//    uint8_t *pixel = CVPixelBufferGetBaseAddress(pixelBuffer);
//    
//    // Allocate some memory for the grayscale values that the intrinsic function will create
//    uint8_t * baseAddressGray = (uint8_t *) malloc(bufferWidth*bufferHeight);
//    TIME_START
//    // Convert BGRA values to grayscale values
//    neon_convert(baseAddressGray, pixel, bufferWidth*bufferHeight);
//    LOG_TIME(@"processPixelBuffer");
//    // Iterate through each pixel in the preview frame, and apply the weighted value of that pixel's RGB color channels
//    for (int i = 0; i < (bufferWidth * bufferHeight); i++) {
//        pixel[0] = baseAddressGray[i];
//        pixel[1] = baseAddressGray[i];
//        pixel[2] = baseAddressGray[i];
//        pixel += BYTES_PER_PIXEL;
//    }
//    
//    // Release the grayscale values buffer
//    free(baseAddressGray);
//    
//    // Unlock the pixel buffer, we're done processing it
//    CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
//    
//    
//}

+ (UIImage*) _whf_imageFromCVImageBuffer_32BGRA:(CVImageBufferRef)imageBuffer gray:(BOOL)gray{

    OSType format = CVPixelBufferGetPixelFormatType(imageBuffer);
    if(kCVPixelFormatType_32BGRA == format){
        UIImage* image = nil;
        CGImageRef dstImage = nil;
        CVPixelBufferLockBaseAddress(imageBuffer, 0);
        
        void* bufferAddress = CVPixelBufferGetBaseAddress(imageBuffer);;
        size_t width = CVPixelBufferGetWidth(imageBuffer);
        size_t height = CVPixelBufferGetHeight(imageBuffer);;
        size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
        
        CGColorSpaceRef colorSpace = gray?CGColorSpaceCreateDeviceGray():CGColorSpaceCreateDeviceRGB();
        CGBitmapInfo bitmapInfo = gray? kCGImageAlphaNone |kCGBitmapByteOrderDefault: kCGImageAlphaPremultipliedFirst|kCGBitmapByteOrder32Little;
        
        CGContextRef context = CGBitmapContextCreate(bufferAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo);
        dstImage = CGBitmapContextCreateImage(context);
        image = [UIImage imageWithCGImage:dstImage];
        CGContextRelease(context);
        
        
        CGImageRelease(dstImage);
        CGColorSpaceRelease(colorSpace);
        CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
        
        
        return image;
        
    }else{
        NSAssert(0, @"not 32RGBA format");
        return nil;
    }
}

+ (UIImage*) imageFromCVImageBuffer:(CVImageBufferRef)imageBuffer{
    return [self imageFromCVImageBuffer:imageBuffer gray:NO];
}

+ (UIImage*) imageFromCVImageBuffer:(CVImageBufferRef)imageBuffer gray:(BOOL)gray{
    OSType format = CVPixelBufferGetPixelFormatType(imageBuffer);
    switch (format){
        case kCVPixelFormatType_32BGRA:{

            UIImage* image = [self _whf_imageFromCVImageBuffer_32BGRA:imageBuffer gray:NO];
            return gray?[image grayImage]:image;
            
            return nil;
        }
        default:
        {
            NSAssert(0, @"only support 32RGBA fromat");
            return nil;
        }
    }
}

- (UIImage*)grayImage{

    CGColorSpaceRef imageColorSpace =  CGImageGetColorSpace(self.CGImage);
    CGColorSpaceModel model = CGColorSpaceGetModel(imageColorSpace);
    
    if(model == kCGColorSpaceModelMonochrome){
        return self;
    }
    
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray();
    CGContextRef contextRef = CGBitmapContextCreate(nil,
                                                    self.size.width,
                                                    self.size.height,
                                                    8,
                                                    0,
                                                    colorSpace,
                                                    kCGImageAlphaNone |
                                                    kCGBitmapByteOrderDefault);
    
    
    CGContextDrawImage(contextRef,
                       CGRectMake(0, 0, self.size.width, self.size.height),
                       self.CGImage);
    
    CGImageRef imageRef = CGBitmapContextCreateImage(contextRef);
    CGContextRelease(contextRef);
    CGColorSpaceRelease(colorSpace);

    UIImage *image = [UIImage imageWithCGImage:imageRef];
    CGImageRelease(imageRef);

    return image;
}





@end
