//
//  SceneRecorder.m
//  Moo-O Touch
//
//  Created by Reisen on 11/14/11.
//  Copyright 2011 __MyCompanyName__. All rights reserved.
//

#import "VideoRecorder.h"
#import "Utility.h"
#import <QuartzCore/QuartzCore.h>
#import <MobileCoreServices/UTCoreTypes.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import "TalkDefine.h"

@interface VideoRecorder(Private)
- (void) writeVideoFrameAtTime:(CMTime)time;
@end

@implementation VideoRecorder

@synthesize currentScreen, bitRate, recordPath;
int FWidth = 360; int FHeight = 480;
//int FWidth = 480; int FHeight = 640;

- (id) init {
    self = [super init];
    if (self) 
    {
        self.currentScreen = nil;
        self.bitRate = 960000;
        _recording = false;
        videoWriter = nil;
        videoWriterInput = nil;
        avAdaptor = nil;
        startedAt = nil;
        bitmapData = NULL;
        
        audioWriterInput = nil;
        
        frame = CGRectMake(0, 0, FWidth, FHeight);
    }
    return self;
}

- (id) initWithWidth:(int)w Height:(int)h {
    self = [super init];
    if (self)
    {
        self.currentScreen = nil;
        self.bitRate = 960000 * 5;
        _recording = false;
        videoWriter = nil;
        videoWriterInput = nil;
        avAdaptor = nil;
        startedAt = nil;
        bitmapData = NULL;
        
        audioWriterInput = nil;
        
        FWidth = w;
        FHeight = h;
        
        frame = CGRectMake(0, 0, FWidth, FHeight);
    }
    return self;
}


- (CGContextRef) createBitmapContextOfSize:(CGSize) size {
    CGContextRef    context = NULL;
    CGColorSpaceRef colorSpace;
    int             bitmapByteCount;
    int             bitmapBytesPerRow;
    
    bitmapBytesPerRow   = (size.width * 4);
    bitmapByteCount     = (bitmapBytesPerRow * size.height);
    colorSpace = CGColorSpaceCreateDeviceRGB();
    
    if (bitmapData != NULL) {
        free(bitmapData);
    }
    
    bitmapData = malloc( bitmapByteCount );
    if (bitmapData == NULL) {
        fprintf (stderr, "Memory not allocated!");
        CGColorSpaceRelease( colorSpace );
        return NULL;
    }
    
    context = CGBitmapContextCreate (bitmapData,
                                     size.width,
                                     size.height,
                                     8,      // bits per component
                                     bitmapBytesPerRow,
                                     colorSpace,
                                     kCGImageAlphaNoneSkipFirst);
    
    CGContextSetAllowsAntialiasing(context,NO);
    if (context== NULL) {
        free (bitmapData);
        fprintf (stderr, "Context not created!");
    }
    CGColorSpaceRelease( colorSpace );
    
    return context;
}

- (void) setFrameFromUIView:(UIView*)view withFrame:(CGRect) videoFrameLOL
{
    //DLog(@"Setframe begin!");
    
    CGRect videoFrame = videoFrameLOL;
    
   // CGFloat h = videoFrame.size.height;
    
    CGContextRef context = [self createBitmapContextOfSize:videoFrame.size];
    
    //CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, h);
    
    //CGContextConcatCTM(context, flipVertical);
    
    UIGraphicsBeginImageContext(view.frame.size);
    [[view layer] renderInContext:UIGraphicsGetCurrentContext()];
    UIImage* screenshot = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();
    
    CGContextDrawImage(context, videoFrame, [screenshot CGImage]);
    
    //TODO:CGImage leak?
    CGImageRef ref = CGBitmapContextCreateImage(context);
    self.currentScreen = [UIImage imageWithCGImage:ref];
    CGImageRelease(ref);
    
    CGContextRelease(context);
}

- (void) encodeFrameAtTime:(CMTime)timestamp
{   
    if(!_recording || self.currentScreen == nil) return;
    
    //DLog(@"Encode Video!"); //return;
    if(videoWriter == nil) return;
    
    if(videoWriter.status == AVAssetWriterStatusFailed)
    {
        TLDebugS(@"Ready to fail ready to fail");
        [self refreshVideoWriter];
    }
    if(videoWriter.status == AVAssetWriterStatusFailed)
        return;
    
    if(videoWriter.status != AVAssetWriterStatusWriting)
    {
        if(videoWriter.status != AVAssetWriterStatusUnknown)
            return;
        
        [videoWriter startWriting];
        [videoWriter startSessionAtSourceTime:timestamp];
        
        startTime = CMTimeGetSeconds(timestamp);
    }
    
    [self writeVideoFrameAtTime:timestamp];
}

- (BOOL) encodeVideoFrame:(CMSampleBufferRef)buffer
{
    if(!_recording) return NO;
    
    if(videoWriter == nil) return NO;
    
   // DLog(@"Encoding?");
    
    CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(buffer);
    
    if(videoWriter.status == AVAssetWriterStatusFailed)
    {
        TLDebugS(@"Ready to fail ready to fail");
        [self refreshVideoWriter];
    }
    if(videoWriter.status == AVAssetWriterStatusFailed)
        return NO;
    
    if(videoWriter.status != AVAssetWriterStatusWriting)
    {
        TLDebugS(@"My status = %zd", videoWriter.status);
        
        if(videoWriter.status != AVAssetWriterStatusUnknown)
            return NO;
        
        [videoWriter startWriting];
        [videoWriter startSessionAtSourceTime:timestamp];
        
        startTime = CMTimeGetSeconds(timestamp);
    }
    
    if(![videoWriterInput isReadyForMoreMediaData])
    {
       // TLDebugS(@"V Not ready!");
        return NO;
    }
    
    [videoWriterInput appendSampleBuffer:buffer];
    
    return YES;
  //  DLog(@"Encode Video Frame = %d!", ret);
}

-(BOOL) encodeAudioFrame:(CMSampleBufferRef)buffer
{
    if(!_recording) return NO;
    
 //   DLog(@"Encode Audio!"); //return;
    
    CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(buffer);
    //TLDebugS(@"Encode audio at %f", CMTimeGetSeconds(timestamp));
    
    if(videoWriter.status == AVAssetWriterStatusFailed)
    {
        TLDebugS(@"Ready to fail ready to fail");
        [self refreshVideoWriter];
    }
    if(videoWriter.status == AVAssetWriterStatusFailed)
        return NO;
    
    if(videoWriter.status != AVAssetWriterStatusWriting)
    {
        //CRASH: Status is AVAssetWriterStatusWriting here.
        
        [videoWriter startWriting];
        [videoWriter startSessionAtSourceTime:timestamp];
        
        startTime = CMTimeGetSeconds(timestamp);
        
        TLDebugS(@"Start session at %f", startTime);
    }
    
    if(![audioWriterInput isReadyForMoreMediaData])
    {
        TLDebugS(@"A Not ready!");
        return NO;
    }
    
    [audioWriterInput appendSampleBuffer:buffer];
    
    return YES;
}

- (void) cleanupWriter {
    avAdaptor = nil;
    
    videoWriterInput = nil;
    
    videoWriter = nil;
    
    startedAt = nil;
    
    audioWriterInput = nil;
    
    if (bitmapData != NULL) {
        free(bitmapData);
        bitmapData = NULL;
    }
    self.currentScreen = nil;
}

- (void)dealloc {
    TLDebugS(@"Scene Recorder dealloc!");
    
    [self cleanupWriter];
}

- (NSURL*) outputURLPath
{
    NSURL* outputURL = [[NSURL alloc] initFileURLWithPath:recordPath];
    return outputURL;
}

-(BOOL) setUpWriter
{
    return [self setUpWriter:YES];
}
-(BOOL) setUpWriter:(BOOL)deleteExisting
{
    //If has file, delete the file.
    if(deleteExisting)
    {
    NSFileManager* fileManager = [NSFileManager defaultManager];
    if ([fileManager fileExistsAtPath:recordPath]) {
        NSError* error;
        if ([fileManager removeItemAtPath:recordPath error:&error] == NO) {
            TLDebugS(@"Could not delete old recording file at path:  %@", recordPath);
        }
    }
    }
    NSError* error = nil;
//    videoWriter = [[AVAssetWriter alloc] initWithURL:[self outputURLPath] fileType:AVFileTypeQuickTimeMovie error:&error];
    videoWriter = [[AVAssetWriter alloc] initWithURL:[self outputURLPath] fileType:AVFileTypeMPEG4 error:&error];
    NSParameterAssert(videoWriter);
    
    //Configure video
    NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                                [NSNumber numberWithInt:FWidth], AVVideoCleanApertureWidthKey,
                                                [NSNumber numberWithInt:FHeight], AVVideoCleanApertureHeightKey,
                                                [NSNumber numberWithInt:10], AVVideoCleanApertureHorizontalOffsetKey,
                                                [NSNumber numberWithInt:10], AVVideoCleanApertureVerticalOffsetKey,
                                                nil];
    
    NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                              [NSNumber numberWithInt:1], AVVideoPixelAspectRatioHorizontalSpacingKey,
                                              [NSNumber numberWithInt:1],AVVideoPixelAspectRatioVerticalSpacingKey,
                                              nil];
    
    NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   [NSNumber numberWithInt:self.bitRate], AVVideoAverageBitRateKey,
                                   [NSNumber numberWithInt:1],AVVideoMaxKeyFrameIntervalKey,
                                   videoCleanApertureSettings, AVVideoCleanApertureKey,
                                   videoAspectRatioSettings, AVVideoPixelAspectRatioKey,
                                   nil];
    //*/
    
    NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   codecSettings,AVVideoCompressionPropertiesKey,
                                   [NSNumber numberWithInt:FWidth], AVVideoWidthKey,
                                   [NSNumber numberWithInt:FHeight], AVVideoHeightKey,
                                   //                               videoCompressionProps, AVVideoCompressionPropertiesKey,
                                   nil];
    
    videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
    
    NSParameterAssert(videoWriterInput);
    videoWriterInput.expectsMediaDataInRealTime = YES;
    NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
                                      [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, 
                                      //   [NSNumber numberWithInt:FWidth], kCVPixelBufferWidthKey,
                                      //   [NSNumber numberWithInt:FHeight], kCVPixelBufferHeightKey,
                                      nil];
    
    avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes];
    
    
    //Configure Audio
    AudioChannelLayout acl;
    bzero(&acl, sizeof(acl));
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
    
    NSDictionary* audioSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
                                   [ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
                                   [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                                   [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,                                      
                                   [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                                   [NSNumber numberWithInt:64000], AVEncoderBitRateKey,
                                   nil ];
    /*
    NSDictionary* audioSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
                                   [ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
                                   [ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
                                   [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                                   [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
                                   [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                                   nil ];
    */
    audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
    audioWriterInput.expectsMediaDataInRealTime = YES;
    
    //add input
    [videoWriter addInput:videoWriterInput];
    [videoWriter addInput:audioWriterInput];
    
    return YES;
}

-(void) refreshVideoWriter
{
    TLDebugS(@"Refresh writer!");
    
    [videoWriterInput markAsFinished];    
    [audioWriterInput markAsFinished];
    [videoWriter finishWritingWithCompletionHandler:^(void)
     {
         
     }];
    [self setUpWriter:NO];
}

- (void) completeRecordingSession {
    @autoreleasepool {
    
        if(videoWriter.status != AVAssetWriterStatusWriting)
        {
            while (videoWriter.status == AVAssetWriterStatusUnknown)
            {
                TLDebugS(@"Waiting 1...");
                [NSThread sleepForTimeInterval:0.5f];
            }
            
            [videoWriterInput markAsFinished];
        
            [audioWriterInput markAsFinished];
        
            // Wait for the video
            int status = videoWriter.status;
        
            //*
            while (status == AVAssetWriterStatusUnknown)
            {
                TLDebugS(@"Waiting...");
                [NSThread sleepForTimeInterval:0.5f];
                status = videoWriter.status;
            }
        }
        //*/
        @synchronized(self)
        {
            [videoWriter finishWritingWithCompletionHandler:^(void)
             {
                 [self cleanupWriter];
                 
                 id delegateObj = self.delegate;
                 
                 NSError* err = nil;
                 
                 [[NSFileManager defaultManager] setAttributes:[NSDictionary dictionaryWithObjectsAndKeys:[NSDate date], NSFileModificationDate, nil] ofItemAtPath:[[self outputURLPath] path] error:&err];
                 
                 NSURL *outputURL = [self outputURLPath];
                 
                 TLDebugS(@"Completed recording, file is stored at: %@, %@", [outputURL path], err);
                 if ([delegateObj respondsToSelector:@selector(recordingFinished:)]) {
                     [delegateObj performSelectorOnMainThread:@selector(recordingFinished:) withObject:outputURL waitUntilDone:YES];
                 }
             }
             ];
        }
    
    }
}

- (bool) startRecording 
{
    bool result = NO;
    @synchronized(self) 
    {
        if (! _recording) 
        {
            result = [self setUpWriter];
            startedAt = [NSDate date];
            _recording = true;
        }
    }
 
    return result;
}

- (void) updateStartTime
{
//    NSDate* oldDate = startedAt;
    startedAt = [NSDate date];
}
- (void) stopRecording {
    @synchronized(self) 
    {
        if (_recording) 
        {
            _recording = false;
            [self completeRecordingSession];
        }
    }
}

-(void) writeVideoFrameAtTime:(CMTime)time {
    if (![videoWriterInput isReadyForMoreMediaData]) 
    {
        TLDebugS(@"Not ready for video data");
    }
    else 
    {
        @synchronized (self) 
        {
            //*
            
            CVPixelBufferRef pixelBuffer = NULL;
            CGImageRef cgImage = CGImageCreateCopy([self.currentScreen CGImage]);
            CFDataRef image = CGDataProviderCopyData(CGImageGetDataProvider(cgImage));
            
            NSDictionary* option = [NSDictionary dictionaryWithObjectsAndKeys:
                                    [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                                    [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, 
                                    nil];
            
            CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, FWidth, FHeight, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef)option, &pixelBuffer);
            
            if(status != 0)
            {
                //could not get a buffer from the pool
                TLDebugS(@"Error creating pixel buffer:  status=%d", status);
            }
            
            // set image data into pixel buffer
            CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
            uint8_t* destPixels = (uint8_t*)CVPixelBufferGetBaseAddress(pixelBuffer);
            
            CGColorSpaceRef cSpace = CGColorSpaceCreateDeviceRGB();
            
            //Pixel buffer may not be continuous, ensure same bytes per row.
            CGContextRef context = CGBitmapContextCreate(destPixels, FWidth, FHeight, 8, CVPixelBufferGetBytesPerRow(pixelBuffer), cSpace, kCGImageAlphaPremultipliedFirst);
            
            CGContextDrawImage(context, CGRectMake(0, 0, FWidth, FHeight), cgImage);
            
            CGContextRelease(context);
            CGColorSpaceRelease(cSpace);
            
            //CFDataGetBytes(image, CFRangeMake(0, CFDataGetLength(image)), destPixels);  //XXX:  will work if the pixel buffer is contiguous and has the same bytesPerRow as the input data
            
            
            if(status == 0)
            {
                BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
                if (!success)
                    TLDebugS(@"Warning: Unable to write buffer to video");
            }
            
            //clean up
            CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
            CVPixelBufferRelease(pixelBuffer);
            CFRelease(image);
            CGImageRelease(cgImage);
            
           // */
        }
    }
}

@end