//
//  Utility.m
//  Moo-O Touch
//
//  Created by Reisen on 11/17/11.
//  Copyright 2011 __MyCompanyName__. All rights reserved.
//

#import "Utility.h"
#import <mach/mach.h>
#import <CommonCrypto/CommonDigest.h>
#import "TalkDefine.h"

@implementation Utility


- (id)init
{
    self = [super init];
    if (self) {
        // Initialization code here.
    }
    
    return self;
}

+(CGImageRef) rotateCGImage:(CGImageRef) image toOrientation:(UIImageOrientation)orientation
{
    size_t width = CGImageGetWidth(image);
    size_t height = CGImageGetHeight(image);
    
   // void* data = malloc(width * height * 4);
    
    size_t resWidth = width;
    size_t resHeight = height;
    
    if(orientation == UIImageOrientationLeftMirrored)
    {
        resWidth = height;
        resHeight = width;
    }
    
    CGColorSpaceRef cSpace = CGColorSpaceCreateDeviceRGB();
    
    CGContextRef context = CGBitmapContextCreate(nil, resWidth, resHeight, 8, 0, cSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    
    CGContextTranslateCTM(context, resWidth, 0);
    CGContextScaleCTM(context, -1, 1);
    CGContextTranslateCTM(context, 0, resHeight);
    CGContextRotateCTM(context, -M_PI / 2);
    
    //CGContextRotateCTM(context, -M_PI / 2);
    //CGContextTranslateCTM(context, 0, resHeight);
    
    CGContextDrawImage(context, CGRectMake(0, 0, width, height), image);
    
    CGImageRef retval = CGBitmapContextCreateImage(context);
    
    CGContextRelease(context);
    CGColorSpaceRelease(cSpace);
    
    //free(data);
    
    return retval;
}

// Create a UIImage from sample buffer data
+(UIImage *) UIimageFromSampleBuffer:(CVImageBufferRef) imageBuffer withOrientation:(UIImageOrientation) orientation
{ 
    // Lock the base address of the pixel buffer
    CVPixelBufferLockBaseAddress(imageBuffer, 0); 
    
    // Get the number of bytes per row for the pixel buffer
    void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer); 
    
    // Get the number of bytes per row for the pixel buffer
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
    // Get the pixel buffer width and height
    size_t width = CVPixelBufferGetWidth(imageBuffer); 
    size_t height = CVPixelBufferGetHeight(imageBuffer); 
    
    // Create a device-dependent RGB color space
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 
    
    // Create a bitmap graphics context with the sample buffer data
    CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, 
                                                 bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); 
    
    // Create a Quartz image from the pixel data in the bitmap graphics context
    CGImageRef quartzImage = CGBitmapContextCreateImage(context); 
    // Unlock the pixel buffer
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);
    
    // Free up the context and color space
    CGContextRelease(context); 
    CGColorSpaceRelease(colorSpace);
    
    CGImageRef quartzImageRotated = [self rotateCGImage:quartzImage toOrientation:orientation];
    
    // Create an image object from the Quartz image
    UIImage *image = [UIImage imageWithCGImage:quartzImageRotated];
    
    // Release the Quartz image
    CGImageRelease(quartzImage);
    CGImageRelease(quartzImageRotated);
    
    return image;
}


// Create a UIImage from sample buffer data
+(UIImage *) UIimageFromSampleBuffer:(CVImageBufferRef) imageBuffer
{ 
    // Lock the base address of the pixel buffer
    CVPixelBufferLockBaseAddress(imageBuffer, 0); 
    
    // Get the number of bytes per row for the pixel buffer
    void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer); 
    
    // Get the number of bytes per row for the pixel buffer
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
    // Get the pixel buffer width and height
    size_t width = CVPixelBufferGetWidth(imageBuffer); 
    size_t height = CVPixelBufferGetHeight(imageBuffer); 
    
    // Create a device-dependent RGB color space
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 
    
    // Create a bitmap graphics context with the sample buffer data
    CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, 
                                                 bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); 
    
    // Create a Quartz image from the pixel data in the bitmap graphics context
    CGImageRef quartzImage = CGBitmapContextCreateImage(context); 
    // Unlock the pixel buffer
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);
    
    // Free up the context and color space
    CGContextRelease(context); 
    CGColorSpaceRelease(colorSpace);
    
    
    // Create an image object from the Quartz image
    UIImage *image = [UIImage imageWithCGImage:quartzImage];
    
    // Release the Quartz image
    CGImageRelease(quartzImage);
    
    return image;
}

+(CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image
{
    
    CGSize size = CGSizeMake(CGImageGetWidth(image), CGImageGetHeight(image));
    
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,nil];
    CVPixelBufferRef pxbuffer = NULL;
    
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
                                          size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,&pxbuffer);
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);
    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
                                                 size.height, 8, CVPixelBufferGetBytesPerRow(pxbuffer), rgbColorSpace,
                                                 kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    NSParameterAssert(context);
    CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
                                           CGImageGetHeight(image)), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);
    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
    return pxbuffer;
}

static double lastTimestamp;

+(void) startTiming
{
    lastTimestamp = [NSDate timeIntervalSinceReferenceDate];
}

+(NSTimeInterval) endTimingWithTag:(NSString*)tag
{
    NSTimeInterval delta = [NSDate timeIntervalSinceReferenceDate] - lastTimestamp;
    
    TLDebugS(@"%@ : %f ms", tag, delta * 1000);
    
    lastTimestamp = [NSDate timeIntervalSinceReferenceDate];
    
    return delta;
}

#pragma mark - Image Reflection

CGImageRef CreateGradientImage(int pixelsWide, int pixelsHigh)
{
	CGImageRef theCGImage = NULL;
    
	// gradient is always black-white and the mask must be in the gray colorspace
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray();
	
	// create the bitmap context
	CGContextRef gradientBitmapContext = CGBitmapContextCreate(NULL, pixelsWide, pixelsHigh,
															   8, 0, colorSpace, kCGImageAlphaNone);
	
	// define the start and end grayscale values (with the alpha, even though
	// our bitmap context doesn't support alpha the gradient requires it)
	CGFloat colors[] = {0.0, 1.0, 1.0, 1.0};
	
	// create the CGGradient and then release the gray color space
	CGGradientRef grayScaleGradient = CGGradientCreateWithColorComponents(colorSpace, colors, NULL, 2);
	CGColorSpaceRelease(colorSpace);
	
	// create the start and end points for the gradient vector (straight down)
	CGPoint gradientStartPoint = CGPointZero;
	CGPoint gradientEndPoint = CGPointMake(0, pixelsHigh);
	
	// draw the gradient into the gray bitmap context
	CGContextDrawLinearGradient(gradientBitmapContext, grayScaleGradient, gradientStartPoint,
								gradientEndPoint, kCGGradientDrawsAfterEndLocation);
	CGGradientRelease(grayScaleGradient);
	
	// convert the context into a CGImageRef and release the context
	theCGImage = CGBitmapContextCreateImage(gradientBitmapContext);
	CGContextRelease(gradientBitmapContext);
	
	// return the imageref containing the gradient
    return theCGImage;
}

CGContextRef MyCreateBitmapContext(int pixelsWide, int pixelsHigh)
{
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
	
	// create the bitmap context
	CGContextRef bitmapContext = CGBitmapContextCreate (NULL, pixelsWide, pixelsHigh, 8,
														0, colorSpace,
														// this will give us an optimal BGRA format for the device:
														(kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst));
	CGColorSpaceRelease(colorSpace);
    
    return bitmapContext;
}

+ (UIImage *)reflectedImage:(UIImageView *)fromImage withHeight:(NSUInteger)height
{
    if(height == 0)
		return nil;
    
	// create a bitmap graphics context the size of the image
	CGContextRef mainViewContentContext = MyCreateBitmapContext(fromImage.bounds.size.width, (int)height);
	
	// create a 2 bit CGImage containing a gradient that will be used for masking the
	// main view content to create the 'fade' of the reflection.  The CGImageCreateWithMask
	// function will stretch the bitmap image as required, so we can create a 1 pixel wide gradient
	CGImageRef gradientMaskImage = CreateGradientImage(1, (int)height);
	
	// create an image by masking the bitmap of the mainView content with the gradient view
	// then release the  pre-masked content bitmap and the gradient bitmap
	CGContextClipToMask(mainViewContentContext, CGRectMake(0.0, 0.0, fromImage.bounds.size.width, height), gradientMaskImage);
	CGImageRelease(gradientMaskImage);
	
	// In order to grab the part of the image that we want to render, we move the context origin to the
	// height of the image that we want to capture, then we flip the context so that the image draws upside down.
	CGContextTranslateCTM(mainViewContentContext, 0.0, height);
	CGContextScaleCTM(mainViewContentContext, 1.0, -1.0);
	
	// draw the image into the bitmap context
	CGContextDrawImage(mainViewContentContext, fromImage.bounds, fromImage.image.CGImage);
	
	// create CGImageRef of the main view bitmap content, and then release that bitmap context
	CGImageRef reflectionImage = CGBitmapContextCreateImage(mainViewContentContext);
	CGContextRelease(mainViewContentContext);
	
	// convert the finished reflection image to a UIImage
	UIImage *theImage = [UIImage imageWithCGImage:reflectionImage];
	
	// image is retained by the property setting above, so we can release the original
	CGImageRelease(reflectionImage);
	
	return theImage;
}

+ (UIImage *)borderImage:(UIImageView *)fromImage
{
	return [Utility borderImage:fromImage withStroke:2.0f];
}

+ (UIImage *)borderImage:(UIImageView *)fromImage withStroke:(CGFloat)stroke
{
	// create a bitmap graphics context the size of the image
	CGContextRef mainViewContentContext = MyCreateBitmapContext(fromImage.bounds.size.width, fromImage.bounds.size.height);
    
    CGContextSetAllowsAntialiasing(mainViewContentContext, NO);
    
	CGContextSetRGBStrokeColor(mainViewContentContext, 0, 0, 0, 1);
    CGContextSetLineWidth(mainViewContentContext, stroke);
    
    CGContextStrokeRect(mainViewContentContext, fromImage.bounds);
	// draw the image into the bitmap context
    
	// create CGImageRef of the main view bitmap content, and then release that bitmap context
	CGImageRef reflectionImage = CGBitmapContextCreateImage(mainViewContentContext);
	CGContextRelease(mainViewContentContext);
	
	// convert the finished reflection image to a UIImage
	UIImage *theImage = [UIImage imageWithCGImage:reflectionImage];
	
	// image is retained by the property setting above, so we can release the original
	CGImageRelease(reflectionImage);
	
	return theImage;
}

+ (UIImage *)borderImageWithRect:(CGRect)bound withStroke:(CGFloat)stroke
{
	// create a bitmap graphics context the size of the image
	CGContextRef mainViewContentContext = MyCreateBitmapContext(bound.size.width, bound.size.height);
    
    CGContextSetAllowsAntialiasing(mainViewContentContext, NO);
    
	CGContextSetRGBStrokeColor(mainViewContentContext, 0, 0, 0, 1);
    CGContextSetLineWidth(mainViewContentContext, stroke);
    
    CGContextStrokeRect(mainViewContentContext, bound);
	// draw the image into the bitmap context
    
	// create CGImageRef of the main view bitmap content, and then release that bitmap context
	CGImageRef reflectionImage = CGBitmapContextCreateImage(mainViewContentContext);
	CGContextRelease(mainViewContentContext);
	
	// convert the finished reflection image to a UIImage
	UIImage *theImage = [UIImage imageWithCGImage:reflectionImage];
	
	// image is retained by the property setting above, so we can release the original
	CGImageRelease(reflectionImage);
	
	return theImage;
}

+ (NSURL*)applicationDataDirectory {
    NSFileManager* sharedFM = [NSFileManager defaultManager];
    NSArray* possibleURLs = [sharedFM URLsForDirectory:NSApplicationSupportDirectory
                                             inDomains:NSUserDomainMask];
    NSURL* appSupportDir = nil;
    NSURL* appDirectory = nil;
    
    if ([possibleURLs count] >= 1) {
        // Use the first directory (if multiple are returned)
        appSupportDir = [possibleURLs objectAtIndex:0];
    }
    
    // If a valid app support directory exists, add the
    // app's bundle ID to it to specify the final directory.
    if (appSupportDir) {
        NSString* appBundleID = [[NSBundle mainBundle] bundleIdentifier];
        appDirectory = [appSupportDir URLByAppendingPathComponent:appBundleID];
    }
    
    if(![[NSFileManager defaultManager] fileExistsAtPath:appDirectory.path])
    {
        [[NSFileManager defaultManager] createDirectoryAtURL:appDirectory withIntermediateDirectories:YES attributes:nil error:nil];
        [appDirectory setResourceValue:[NSNumber numberWithBool:YES] forKey:NSURLIsExcludedFromBackupKey error:nil];
    }
    
    return appDirectory;
}

+(void) reportMemoryUsage
{
#ifndef DEBUG
    return;
#endif
    
    TLDebugS(@"Memory usage = %.4lf MB", (double)([self getMemoryUsage]/(1024.0 * 1024.0)));
}

+(vm_size_t) getMemoryUsage
{
#ifndef DEBUG
    return 0;
#endif
    
    struct task_basic_info info;
    mach_msg_type_number_t size = sizeof(info);
    
    task_info(mach_task_self(), TASK_BASIC_INFO, (task_info_t)&info, &size);
    
    return info.resident_size;
}

static vm_size_t lastMemory;

+(void) startMemory
{
    lastMemory = [Utility getMemoryUsage];
}

+(void) endMemoryWithTag:(NSString*)tag
{
    vm_size_t newMemory = [Utility getMemoryUsage];
    
    TLDebugS(@"%@ Memory delta = %lu bytes", tag, newMemory - lastMemory);
    
    lastMemory = newMemory;
}


+(UIImage*)imageNamed:(NSString*)name
{
    return [UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForResource:[name stringByDeletingPathExtension] ofType:[name pathExtension]]];
}

+ (UIImage *)resizedImage: (UIImage*)source
                     size:(CGSize)newSize
{
    CGRect newRect = CGRectIntegral(CGRectMake(0, 0, newSize.width, newSize.height));
    CGImageRef imageRef = source.CGImage;
    
    // Build a context that's the same dimensions as the new size
    CGContextRef bitmap = CGBitmapContextCreate(NULL,
                                                newRect.size.width,
                                                newRect.size.height,
                                                CGImageGetBitsPerComponent(imageRef),
                                                0,
                                                CGImageGetColorSpace(imageRef),
                                                CGImageGetBitmapInfo(imageRef));
    
    // Set the quality level to use when rescaling
    CGContextSetInterpolationQuality(bitmap, kCGInterpolationMedium);
    
    // Draw into the context; this scales the image
    CGContextDrawImage(bitmap, newRect, imageRef);
    
    // Get the resized image from the context and a UIImage
    CGImageRef newImageRef = CGBitmapContextCreateImage(bitmap);
    UIImage *newImage = [UIImage imageWithCGImage:newImageRef];
    
    // Clean up
    CGContextRelease(bitmap);
    CGImageRelease(newImageRef);
    
    return newImage;
}

+(UIImage*)imageNamed: (NSString*)name size:(CGSize)newSize;
{
    return [Utility resizedImage:[Utility imageNamed:name] size:newSize];
}


+(NSString*) md5Hash:(NSString*)string
{
    const char* cString = [string UTF8String];
    unsigned char resultChar[16];
    
    CC_MD5(cString, strlen(cString), resultChar);
    
    NSString* result = [NSString stringWithFormat:@"%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x", resultChar[0], resultChar[1], resultChar[2], resultChar[3], resultChar[4], resultChar[5], resultChar[6], resultChar[7], resultChar[8], resultChar[9], resultChar[10], resultChar[11], resultChar[12], resultChar[13], resultChar[14], resultChar[15]];
    
    return [result stringByReplacingOccurrencesOfString:@"-" withString:@""];
}

+(NSString*) APIRoot
{
    return @"http://staging.newsmaker.tv/";
}

+(NSString*) rootStorage
{
    return [Utility applicationDataDirectory].path;
}


+(NSData*) postRequest:(NSString*) url :(NSMutableDictionary*) data
{
    TLDebugS(@"Post to %@", url);
    NSString* parameters = @"";
    
    if(data != nil)
    {
        BOOL isFirst = YES;
        
        for(NSString* key in data)
        {
            if(!isFirst) parameters = [parameters stringByAppendingString:@"&"];
            isFirst = NO;
            
            parameters = [parameters stringByAppendingFormat:@"%@=%@",
                          [key stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding],
                          [(NSString*)[data objectForKey:key] stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding]];
        }
        
        TLDebugS(@"Parameters = %@", parameters);
        
        NSData* dataParam = [parameters dataUsingEncoding:NSUTF8StringEncoding];
        NSMutableURLRequest* request = [[NSMutableURLRequest alloc] initWithURL:[NSURL URLWithString:url]];
        request.HTTPMethod = @"POST";
        request.HTTPBody = dataParam;
        [request setValue:@"application/x-www-form-urlencoded" forHTTPHeaderField:@"Content-Type"];
        [request setValue:[NSString stringWithFormat:@"%zd", dataParam.length] forHTTPHeaderField:@"Content-Length"];
        
        NSURLResponse* res = nil;
        NSError* error = nil;
        
        NSData* data = [NSURLConnection sendSynchronousRequest:request returningResponse:&res error:&error];
        
        return data;
    }
    
    return nil;
}

+(NSData*) postRequestWithString:(NSString*) url :(NSString*)parameters
{
    
    {
        TLDebugS(@"URL = %@, Parameters = %@", url, parameters);
        
        NSData* dataParam = [parameters dataUsingEncoding:NSUTF8StringEncoding];
        NSMutableURLRequest* request = [[NSMutableURLRequest alloc] initWithURL:[NSURL URLWithString:url]];
        request.HTTPMethod = @"POST";
        request.HTTPBody = dataParam;
        [request setValue:@"application/x-www-form-urlencoded" forHTTPHeaderField:@"Content-Type"];
        [request setValue:[NSString stringWithFormat:@"%zd", dataParam.length] forHTTPHeaderField:@"Content-Length"];
        
        NSURLResponse* res = nil;
        NSError* error = nil;
        
        NSData* data = [NSURLConnection sendSynchronousRequest:request returningResponse:&res error:&error];
        
        if(data == nil)TLDebugS(@"NIL!");
        
        return data;
    }
    
    return nil;
}

+ (NSString *)hexStringFromColor:(UIColor *)color
{
    CGColorSpaceModel colorSpace = CGColorSpaceGetModel(CGColorGetColorSpace(color.CGColor));
    const CGFloat *components = CGColorGetComponents(color.CGColor);
    
    CGFloat r, g, b;
    
    if (colorSpace == kCGColorSpaceModelMonochrome) {
        r = components[0];
        g = components[0];
        b = components[0];
    }
    else if (colorSpace == kCGColorSpaceModelRGB) {
        r = components[0];
        g = components[1];
        b = components[2];
    }
    
    return [NSString stringWithFormat:@"#%02lX%02lX%02lX",
            lroundf(r * 255),
            lroundf(g * 255),
            lroundf(b * 255)];
}

+(NSString*)NSStringFromUIColor:(UIColor*) color
{
    int componentCount = CGColorGetNumberOfComponents(color.CGColor);
    const CGFloat *components = CGColorGetComponents(color.CGColor);
    
    if(componentCount == 2)
        return [NSString stringWithFormat:@"%f,%f,%f,%f",
                components[0],
                components[0],
                components[0],
                components[1]];
    
    else
    return [NSString stringWithFormat:@"%f,%f,%f,%f",
            components[0],
            components[1],
            components[2],
            components[3]];
}

+(UIColor*)UIColorFromNSString:(NSString*)string
{
    NSArray *components = [string componentsSeparatedByString:@","];
    return [UIColor colorWithRed:[(NSString*)components[0] floatValue]
                           green:[(NSString*)components[1] floatValue]
                            blue:[(NSString*)components[2] floatValue]
                           alpha:[(NSString*)components[3] floatValue]];
}

+(CGRect)frameFromFrame:(CGRect)frame beforeTransform:(CGAffineTransform)trans
{
    float cx = frame.origin.x + frame.size.width / 2;
    float cy = frame.origin.y + frame.size.height / 2;
    
    CGRect fraem = CGRectApplyAffineTransform(frame, CGAffineTransformMakeTranslation(-cx, -cy));
    fraem = CGRectApplyAffineTransform(fraem, CGAffineTransformInvert(trans));
    fraem = CGRectApplyAffineTransform(fraem, CGAffineTransformMakeTranslation(cx, cy));
    
    return fraem;
}

+(NSString*)XMLEncodedString:(NSString*)s
{
    NSString* ret = [s stringByReplacingOccurrencesOfString:@"<" withString:@"&lt;"];
    ret = [s stringByReplacingOccurrencesOfString:@">" withString:@"&gt;"];
    return ret;
}

+(void)setAudioCategory:(NSString*)category
{
    NSError *setCategoryError = nil;
    
    BOOL setCategorySuccess = [[AVAudioSession sharedInstance] setCategory:category error:&setCategoryError];
    
    if (setCategorySuccess) {
        TLDebugS(@"Audio Session options set.");
    } else {
        TLDebugS(@"WARNING: Could not set audio session options. %@", setCategoryError);
    }
}
@end
