//
//  Common.m
//  YXVideoKit
//
//  Created by Jeakin on 13-10-15.
//  Copyright (c) 2013年 Jeakin. All rights reserved.
//

#import "YXVideoCommon.h"
#import <sys/utsname.h>
//#import <CoreLocation/CoreLocation.h>
#import <AVFoundation/AVFoundation.h>
//#import ".Output.h"

long long fileSizeAtPath(NSString* filePath) {
    NSFileManager* manager = [NSFileManager defaultManager];
    if ([manager fileExistsAtPath:filePath]){
        return [[manager attributesOfItemAtPath:filePath error:nil] fileSize];
    }
    return 0;
}

NSDictionary * mediaInfo(NSString * filePath) {
    if (filePath ) {
        NSMutableDictionary *mediaInfo = [NSMutableDictionary dictionaryWithCapacity:0];
        AVAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:filePath]];
        if (asset != nil) {
            [mediaInfo setObject:[NSString stringWithFormat:@"%f",CMTimeGetSeconds([asset duration])] forKey:@"duration"];
            NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
            if ([videoTracks count]) {
                AVAssetTrack *videoTrack = [videoTracks objectAtIndex:0];
                [mediaInfo setObject:[NSString stringWithFormat:@"%f",[videoTrack naturalSize].width] forKey:@"width"];
                [mediaInfo setObject:[NSString stringWithFormat:@"%f",[videoTrack naturalSize].height] forKey:@"height"];
            }
        }
        
        return mediaInfo;
    }
    return nil;
}

NSString *movieThumb(NSString *filePath,CGSize thumbSize,NSString *suffix,CMTime atTime) {
    AVAsset *asset      = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:filePath]];
    NSString *thumbPath = [[filePath stringByDeletingLastPathComponent] stringByAppendingFormat:@"/%@%@%@",[[filePath lastPathComponent] stringByDeletingPathExtension],suffix,@".jpg"];
    NSError *error;
    NSFileManager *fm = [NSFileManager defaultManager];
    [fm removeItemAtPath:thumbPath error:&error];
    assetThumb(asset,thumbSize,thumbPath,atTime);
    return thumbPath;
}

UIImage *thumb(AVAsset *asset,CGSize thumbSize) {
    if (asset != nil) {
        AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
        if (imageGenerator != nil) {
            imageGenerator.appliesPreferredTrackTransform = YES;
            imageGenerator.apertureMode = AVAssetImageGeneratorApertureModeEncodedPixels;
            
            CGImageRef thumbnailImageRef = nil;
            NSError *thumbnailImageGenerationError = nil;
            if (!CGSizeEqualToSize(thumbSize, CGSizeZero)) {
                imageGenerator.maximumSize = thumbSize;
            }
            thumbnailImageRef = [imageGenerator copyCGImageAtTime:kCMTimeZero actualTime:NULL error:&thumbnailImageGenerationError];
            
            if (!thumbnailImageRef)
                NSLog(@"thumbnailImageGenerationError %@", thumbnailImageGenerationError);
            
            UIImage *thumbnailImage = thumbnailImageRef ? [[UIImage alloc] initWithCGImage:thumbnailImageRef]  : nil;
            CGImageRelease(thumbnailImageRef);
            return thumbnailImage;
            
        }
    }
    return nil;
 }


void assetThumb(AVAsset *asset, CGSize thumbSize, NSString *thumbPath,CMTime atTime) {
    if (asset != nil) {
        AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
        if (imageGenerator != nil) {
            imageGenerator.appliesPreferredTrackTransform = YES;
            imageGenerator.apertureMode = AVAssetImageGeneratorApertureModeEncodedPixels;
            
            CGImageRef thumbnailImageRef = nil;
            NSError *thumbnailImageGenerationError = nil;
            if (!CGSizeEqualToSize(thumbSize, CGSizeZero)) {
                imageGenerator.maximumSize = thumbSize;
            }
            thumbnailImageRef = [imageGenerator copyCGImageAtTime:atTime actualTime:NULL error:&thumbnailImageGenerationError];
            
            if (!thumbnailImageRef)
                NSLog(@"thumbnailImageGenerationError %@", thumbnailImageGenerationError);
            
            UIImage *thumbnailImage = thumbnailImageRef ? [[UIImage alloc] initWithCGImage:thumbnailImageRef] : nil;

            if(thumbnailImage != nil){
                NSData *image = UIImageJPEGRepresentation(thumbnailImage, 0.6f);
                [image writeToURL:[NSURL fileURLWithPath:thumbPath] atomically:YES];
                CGImageRelease(thumbnailImageRef);
            }
        }
    }
}

NSString *deviceName() {
    struct utsname systemInfo;
    uname(&systemInfo);
    
    NSString *sDeviceModel = [NSString stringWithCString:systemInfo.machine encoding:NSUTF8StringEncoding];
    if ([sDeviceModel isEqual:@"i386"])
        return [NSString stringWithFormat:@"ios_%@_apple_Simulator",[[UIDevice currentDevice] systemVersion]];
    if ([sDeviceModel isEqual:@"iPhone1,1"])
        return [NSString stringWithFormat:@"ios_%@_apple_iPhone",[[UIDevice currentDevice] systemVersion]];
    if ([sDeviceModel isEqual:@"iPhone1,2"])
        return [NSString stringWithFormat:@"ios_%@_apple_iPhone3G",[[UIDevice currentDevice] systemVersion]];
    if ([sDeviceModel isEqual:@"iPhone2,1"])
        return [NSString stringWithFormat:@"ios_%@_apple_iPhone3GS",[[UIDevice currentDevice] systemVersion]];
    if ([sDeviceModel isEqual:@"iPhone3,1"])
        return [NSString stringWithFormat:@"ios_%@_apple_iPhone4",[[UIDevice currentDevice] systemVersion]];
    if ([sDeviceModel isEqual:@"iPhone4,1"])
        return [NSString stringWithFormat:@"ios_%@_apple_iPhone4S",[[UIDevice currentDevice] systemVersion]];
    if ([sDeviceModel isEqual:@"iPad1,1"])
        return [NSString stringWithFormat:@"ios_%@_apple_iPad",[[UIDevice currentDevice] systemVersion]];
    if ([sDeviceModel isEqual:@"iPad2,1"])
        return [NSString stringWithFormat:@"ios_%@_apple_iPad2",[[UIDevice currentDevice] systemVersion]];
    if ([sDeviceModel isEqual:@"iPod1,1"])
        return [NSString stringWithFormat:@"ios_%@_apple_Touch1",[[UIDevice currentDevice] systemVersion]];
    if ([sDeviceModel isEqual:@"iPod2,1"])
        return [NSString stringWithFormat:@"ios_%@_apple_Touch2",[[UIDevice currentDevice] systemVersion]];
    if ([sDeviceModel isEqual:@"iPod3,1"])
        return [NSString stringWithFormat:@"ios_%@_apple_Touch3",[[UIDevice currentDevice] systemVersion]];
    if ([sDeviceModel isEqual:@"iPod4,1"])
        return [NSString stringWithFormat:@"ios_%@_apple_Touch4",[[UIDevice currentDevice] systemVersion]];
    return [NSString stringWithFormat:@"ios_%@_apple_%@",[[UIDevice currentDevice] systemVersion], sDeviceModel];
}

NSString * getAPPName() {
   return  [[NSBundle mainBundle] objectForInfoDictionaryKey:@"CFBundleDisplayName"];
}

NSString *getAPPVersion() {
    NSString *version = [[[NSBundle mainBundle] infoDictionary] objectForKey:(NSString*)kCFBundleVersionKey];

    return version;
}

NSString *getAPPShortVersion() {
         NSString *shortVersion = [[[NSBundle mainBundle] infoDictionary] objectForKey:@"CFBundleShortVersionString"];
    return shortVersion;
}

NSString * generateMovieFilePath(NSString *directoryPath) {
    NSString *formatedDate = dateFormat(@"yyyy'-'MM'-'dd'-'HH'-'mm'-'ss'-'");
    NSString * movileFilePath = [directoryPath stringByAppendingString:[NSString stringWithFormat:@"/%@%f.mp4",formatedDate,[NSProcessInfo processInfo].systemUptime]];
    NSFileManager *fileManager= [NSFileManager defaultManager];
    BOOL isDir=YES;
    if(![fileManager fileExistsAtPath:directoryPath isDirectory:&isDir])
        if(![fileManager createDirectoryAtPath:directoryPath withIntermediateDirectories:YES attributes:nil error:NULL])
            NSLog(@"Error: Create folder failed %@", directoryPath);
    return movileFilePath;
}

NSString * dateFormat(NSString *format) {
    NSDate *date = [NSDate date];
    NSDateFormatter *formatter = [[NSDateFormatter alloc] init] ;
    [formatter setDateFormat:format];
    NSString *formatedDate = [formatter stringFromDate:date];
    return formatedDate;
}

BOOL isVideoValid(NSURL *URL) {
    AVAsset *asset = [AVURLAsset assetWithURL:URL];
    if (asset == nil) {
        return NO;
    }
    if ([[asset tracksWithMediaType:AVMediaTypeVideo] count] == 0) {
        return NO;
    }
    
    if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] == 0) {
        return NO;
    }
    return YES;
}

NSInteger sort(id a, id b, void *reverse) {
    return [a compare:b options:NSNumericSearch];
}

UIImage *resizeToSize(CGImageRef image, CGSize newSize, CGRect cropRect) {
    CGContextRef                context;
    CGImageRef                  imageRef;
    CGSize                      inputSize;
    UIImage                     *outputImage = nil;
    CGFloat                     scaleFactor, width;
    
    // resize, maintaining aspect ratio:
    
    inputSize = CGSizeMake(CGImageGetWidth(image), CGImageGetHeight(image));
    scaleFactor = newSize.height / inputSize.height;
    width = roundf( inputSize.width * scaleFactor );
    
    if ( width > newSize.width ) {
        scaleFactor = newSize.width / inputSize.width;
        newSize.height = roundf( inputSize.height * scaleFactor );
    } else {
        newSize.width = width;
    }
    
    UIGraphicsBeginImageContext( newSize );
    
    context = UIGraphicsGetCurrentContext();
    CGContextDrawImage( context, CGRectMake( 0, 0, newSize.width, newSize.height ), image);
    outputImage = UIGraphicsGetImageFromCurrentImageContext();
    
    UIGraphicsEndImageContext();
    
    inputSize = newSize;
    
    // constrain crop rect to legitimate bounds
    if ( cropRect.origin.x >= inputSize.width || cropRect.origin.y >= inputSize.height ) return outputImage;
    if ( cropRect.origin.x + cropRect.size.width >= inputSize.width ) cropRect.size.width = inputSize.width - cropRect.origin.x;
    if ( cropRect.origin.y + cropRect.size.height >= inputSize.height ) cropRect.size.height = inputSize.height - cropRect.origin.y;
    
    // crop
    if ( ( imageRef = CGImageCreateWithImageInRect( outputImage.CGImage, cropRect ) ) ) {
        outputImage = [[UIImage alloc] initWithCGImage: imageRef];
        CGImageRelease( imageRef );
    }
    
    return outputImage;
}

CVPixelBufferRef pixelBufferFromCGImage(CGImageRef image) {
    
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                             nil];
    CVPixelBufferRef pxbuffer = NULL;
    
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, CGImageGetWidth(image),
                                          CGImageGetHeight(image), kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
                                          &pxbuffer);
    if (status != kCVReturnSuccess) {
        NSLog(@"CVPixelBufferCreate error %d",status);
    }
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pxbuffer);
    
    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    
    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, CGImageGetWidth(image),
                                                 CGImageGetHeight(image), 8, bytesPerRow, rgbColorSpace,
                                                 (CGBitmapInfo)kCGImageAlphaNoneSkipFirst);
    CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
                                           CGImageGetHeight(image)), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);
    
    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
    
    return pxbuffer;
}

void imageSequenceToVideo(NSArray *imageSequence , NSString *filePath) {
    
    UIImage *first = [imageSequence objectAtIndex:0];
    
    CGSize frameSize = first.size;
    
    NSError *error = nil;
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                                  [NSURL fileURLWithPath:filePath] fileType:AVFileTypeMPEG4
                                                              error:&error];
    
    if(error) {
        NSLog(@"error creating AssetWriter: %@",[error description]);
    }
    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:frameSize.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:frameSize.height], AVVideoHeightKey,
                                   nil];
    
    AVAssetWriterInput* writerInput = [AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings];
    
    NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
    [attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
    [attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.width] forKey:(NSString*)kCVPixelBufferWidthKey];
    [attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.height] forKey:(NSString*)kCVPixelBufferHeightKey];
    
    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                     assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                     sourcePixelBufferAttributes:attributes];
    
    [videoWriter addInput:writerInput];
    
    // fixes all errors
    writerInput.expectsMediaDataInRealTime = YES;
    
    //Start a session:
    BOOL start = [videoWriter startWriting];
    NSLog(@"Session started? %d", start);
    [videoWriter startSessionAtSourceTime:kCMTimeZero];
    
    CVPixelBufferRef buffer = pixelBufferFromCGImage([first CGImage]);
    BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
    
    if (result == NO)
        NSLog(@"failed to append buffer");
    
    if(buffer)
        CVBufferRelease(buffer);
    
    [NSThread sleepForTimeInterval:0.05];
    
    int reverseSort = NO;
    NSArray *newArray = [imageSequence sortedArrayUsingFunction:sort context:&reverseSort];
    
    int fps = 24;
    
    int i = 0;
    for (UIImage *imgFrame in newArray)
    {
        if (adaptor.assetWriterInput.readyForMoreMediaData)
        {
            
            i++;
            NSLog(@"inside for loop %d %@ ",i, imgFrame);
            CMTime frameTime = CMTimeMake(1, fps);
            CMTime lastTime=CMTimeMake(i, fps);
            CMTime presentTime=CMTimeAdd(lastTime, frameTime);
            
            buffer = pixelBufferFromCGImage([imgFrame CGImage]);
            BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
            
            if (result == NO) //failes on 3GS, but works on iphone 4
            {
                NSLog(@"failed to append buffer");
                NSLog(@"The error is %@", [videoWriter error]);
            }
            if(buffer)
                CVBufferRelease(buffer);
            [NSThread sleepForTimeInterval:0.05];
        }
        else
        {
            NSLog(@"error");
            i--;
        }
        [NSThread sleepForTimeInterval:0.02];
    }
    
    //Finish the session:
    [writerInput markAsFinished];
    [videoWriter finishWritingWithCompletionHandler:^{
        
    }];
    CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
}

CGAffineTransform imageOrientationtoTransform(UIImage *image)
{
    CGAffineTransform transform = CGAffineTransformIdentity;
    switch (image.imageOrientation) {
        case UIImageOrientationDown:
        case UIImageOrientationDownMirrored:
            transform = CGAffineTransformTranslate(transform, image.size.width, image.size.height);
            transform = CGAffineTransformRotate(transform, M_PI);
            break;
            
        case UIImageOrientationLeft:
        case UIImageOrientationLeftMirrored:
            transform = CGAffineTransformTranslate(transform, image.size.width, 0);
            transform = CGAffineTransformRotate(transform, M_PI_2);
            break;
            
        case UIImageOrientationRight:
        case UIImageOrientationRightMirrored:
            transform = CGAffineTransformTranslate(transform, 0, image.size.height);
            transform = CGAffineTransformRotate(transform, -M_PI_2);
            break;
        case UIImageOrientationUp:
        case UIImageOrientationUpMirrored:
            break;
    }
    return transform;
}


BOOL imageToVideo(UIImage *image, NSString *filePath, CGFloat duration, int fps) {
    if (fps < 2) { fps = 2;}
    
    CGSize frameSize = CGSizeMake(CGImageGetWidth(image.CGImage),  CGImageGetHeight(image.CGImage));
    
    NSError *error = nil;
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                                  [NSURL fileURLWithPath:filePath] fileType:AVFileTypeMPEG4
                                                              error:&error];
    
    if(error) {
        NSLog(@"error creating AssetWriter: %@",[error description]);
    }
    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:frameSize.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:frameSize.height], AVVideoHeightKey,
                                   nil];
    
    AVAssetWriterInput* writerInput = [AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings];
    
    NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
    [attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
    [attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.width] forKey:(NSString*)kCVPixelBufferWidthKey];
    [attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.height] forKey:(NSString*)kCVPixelBufferHeightKey];
    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                     assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                     sourcePixelBufferAttributes:attributes];
    
    [videoWriter addInput:writerInput];
    
    // fixes all errors
    writerInput.expectsMediaDataInRealTime = YES;
    writerInput.transform = imageOrientationtoTransform(image);
    //Start a session:
    BOOL start = [videoWriter startWriting];
    NSLog(@"Session started? %d", start);
    [videoWriter startSessionAtSourceTime:kCMTimeZero];
    
    CVPixelBufferRef buffer = pixelBufferFromCGImage([image CGImage]);
    
    [NSThread sleepForTimeInterval:0.05];
    
    for (int i = 0; i < fps * duration; i++) {
        if (adaptor.assetWriterInput.readyForMoreMediaData) {
            CMTime presentTime=CMTimeMake(i, fps);
            
            BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
            
            if (result == NO) //failes on 3GS, but works on iphone 4
            {
                NSLog(@"failed to append buffer");
                NSLog(@"The error is %@", [videoWriter error]);
                CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
                return NO;
            }

            [NSThread sleepForTimeInterval:0.05];
        }
        else
        {
            NSLog(@"error");
            i--;
        }
        [NSThread sleepForTimeInterval:0.02];
    }
    
    if(buffer)
        CVBufferRelease(buffer);
    
    //Finish the session:
    [writerInput markAsFinished];
    [videoWriter endSessionAtSourceTime:CMTimeMake(duration * fps, fps)];
    
//#if (!defined(__IPHONE_6_0) || (__IPHONE_OS_VERSION_MAX_ALLOWED < __IPHONE_6_0))
    // Not iOS 6 SDK
    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
        [videoWriter finishWritingWithCompletionHandler:^{
            
        }];
    });
//#else
//    [videoWriter finishWritingWithCompletionHandler:^{
//        NSLog(@"====");
//    }];
//#endif
    
//    [videoWriter finishWriting];
    CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
    return YES;
}

BOOL imageToVideoWithSize480(UIImage *image, NSString *filePath, CGFloat duration, int fps) {
    if (fps < 2) { fps = 2;}
    
    CGSize frameSize = CGSizeMake(CGImageGetWidth(image.CGImage),  CGImageGetHeight(image.CGImage));
    
    NSError *error = nil;
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                                  [NSURL fileURLWithPath:filePath] fileType:AVFileTypeMPEG4
                                                              error:&error];
    
    if(error) {
        NSLog(@"error creating AssetWriter: %@",[error description]);
    }
    int width,height;
    
    if (frameSize.height > frameSize.width) {
        width = 480;
        height = (CGFloat)frameSize.height / (CGFloat)frameSize.width * 480;
    } else if (frameSize.height < frameSize.width) {
        height = 480;
        width = (CGFloat)frameSize.width / (CGFloat)frameSize.height * 480;
    } else {
        width = 480;
        height = 480;
    }
    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:height], AVVideoHeightKey,
                                   nil];
    
    AVAssetWriterInput* writerInput = [AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings];
    
    NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
    [attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
    [attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.width] forKey:(NSString*)kCVPixelBufferWidthKey];
    [attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.height] forKey:(NSString*)kCVPixelBufferHeightKey];
    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                     assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                     sourcePixelBufferAttributes:attributes];
    
    [videoWriter addInput:writerInput];
    
    // fixes all errors
    writerInput.expectsMediaDataInRealTime = YES;
    writerInput.transform = imageOrientationtoTransform(image);
    //Start a session:
    BOOL start = [videoWriter startWriting];
    NSLog(@"Session started? %d", start);
    [videoWriter startSessionAtSourceTime:kCMTimeZero];
    
    CVPixelBufferRef buffer = pixelBufferFromCGImage([image CGImage]);
    
    [NSThread sleepForTimeInterval:0.05];
    
    for (int i = 0; i < fps * duration; i++) {
        if (adaptor.assetWriterInput.readyForMoreMediaData) {
            CMTime presentTime=CMTimeMake(i, fps);
            
            BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
            
            if (result == NO) //failes on 3GS, but works on iphone 4
            {
                NSLog(@"failed to append buffer");
                NSLog(@"The error is %@", [videoWriter error]);
                CVPixelBufferPoolRelease(adaptor.pixelBufferPool);

                return NO;
            }
            
            [NSThread sleepForTimeInterval:0.05];
        }
        else
        {
            NSLog(@"error");
            i--;
        }
        [NSThread sleepForTimeInterval:0.02];
    }
    
    if(buffer)
        CVBufferRelease(buffer);
    
    //Finish the session:
    [writerInput markAsFinished];
    [videoWriter endSessionAtSourceTime:CMTimeMake(duration * fps, fps)];
    
    //#if (!defined(__IPHONE_6_0) || (__IPHONE_OS_VERSION_MAX_ALLOWED < __IPHONE_6_0))
    // Not iOS 6 SDK
    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
        [videoWriter finishWritingWithCompletionHandler:^{
            
        }];
    });
    //#else
    //    [videoWriter finishWritingWithCompletionHandler:^{
    //        NSLog(@"====");
    //    }];
    //#endif
    
    //    [videoWriter finishWriting];
    CVPixelBufferPoolRelease(adaptor.pixelBufferPool);

    return YES;
}

BOOL isCameraAuthorized()
{
    if ([[[UIDevice currentDevice] systemVersion] compare:@"7.0"] != NSOrderedAscending) {
        AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
        return status == AVAuthorizationStatusAuthorized;
    } else {
        AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
        AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:nil];
        return captureInput == nil ? NO : YES;
    }    
}

//BOOL isLocationAuthorized()
//{
//    return [CLLocationManager locationServicesEnabled] && [CLLocationManager authorizationStatus] == kCLAuthorizationStatusAuthorized;
//
//}

CGSize getVideoSize (NSURL *videoURL) {
    AVAsset *asset              = [AVURLAsset URLAssetWithURL:videoURL options:nil];
    if (![[asset tracksWithMediaType:AVMediaTypeVideo] count]) { return CGSizeZero;}
    AVAssetTrack *videoTrack    = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    CGSize naturalSize          = [videoTrack naturalSize];
    CGSize normalSize           = CGSizeApplyAffineTransform(naturalSize, videoTrack.preferredTransform);
    normalSize.width = fabs(normalSize.width);
    normalSize.height = fabs(normalSize.height);
    return normalSize;
}

CGAffineTransform transformationVideo(NSURL *videoURL,CGSize targetVideoSize, CGPoint offset,YXVideoCropType cropType)
{
    CGSize videoSize = getVideoSize(videoURL);
    UIDeviceOrientation orientation;
    CGFloat ratio;
    if (targetVideoSize.width/videoSize.width > targetVideoSize.height/videoSize.height) {
        ratio = targetVideoSize.width/videoSize.width;
        orientation = UIDeviceOrientationPortrait;
    } else {
        ratio = targetVideoSize.height/videoSize.height;
        orientation = UIDeviceOrientationLandscapeLeft;
    }
    
    CGSize ratioVideo;
    ratioVideo.height = videoSize.height * ratio;
    ratioVideo.width  = videoSize.width * ratio;
    
     AVAsset *asset          = [AVURLAsset URLAssetWithURL:videoURL options:nil];
    AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    
    CGAffineTransform transform = videoTrack.preferredTransform;
    
    if (cropType > 0) {
        switch (cropType) {
            case YXVideoCropTop:
                    offset = CGPointMake(0.0f, 0.0f);
                break;
            case YXVideoCropMiddle:
                if (UIDeviceOrientationIsPortrait(orientation)) {
                    offset = CGPointMake(0.0f, (ratioVideo.height - targetVideoSize.height) / 2.0f);
                } else {
                    offset = CGPointMake((ratioVideo.width - targetVideoSize.width) / 2.0f, 0.0f);
                }
                break;
            case YXVideoCropBottom:
                if (UIDeviceOrientationIsPortrait(orientation)) {
                    offset = CGPointMake(0.0f, ratioVideo.height - targetVideoSize.height);
                } else {
                    offset = CGPointMake(ratioVideo.width - targetVideoSize.width, 0.0f);
                }
                break;
                
            default:
                break;
        }
        
    }
        transform.tx = transform.tx * ratio - offset.x;
        transform.ty = transform.ty * ratio - offset.y;

    return CGAffineTransformScale(transform,ratio,ratio);
}



NSString* buildHttpQuery(NSDictionary *dict) {
    NSMutableArray *parts = [NSMutableArray array];
    
    [dict enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop) {
        NSString *part = [NSString stringWithFormat: @"%@=%@", key, obj];
        [parts addObject: part];
    }];
    NSString *queryString = [parts componentsJoinedByString: @"&"];
    return queryString;
}

NSString *convertVideoPath(NSString *videoPath) {
   return [NSHomeDirectory() stringByAppendingFormat:@"/Documents/media/%@",[videoPath lastPathComponent]];
}

//void commRunOnMainQueueWithoutDeadlocking(void (^block)(void))
//{
//    runOnMainQueueWithoutDeadlocking(block);
//}
//
//void commRunAsynchronouslyOnVideoProcessingQueue(void (^block)(void))
//{
//    runAsynchronouslyOnVideoProcessingQueue(block);
//}

