//
//  SceneReader.m
//  Moo-O Touch
//
//  Created by Reisen on 8/3/12.
//  Copyright 2012 __MyCompanyName__. All rights reserved.
//

#import "VideoReader.h"
#import "Utility.h"
#import "TalkDefine.h"

@implementation VideoReader
@synthesize filePath;

- (id)init
{
    self = [super init];
    if (self) {
        // Initialization code here.
        
        isInitialized = NO;
    }
    
    return self;
}

-(void) dealloc
{
    if(buffer != nil)
    {
        CMSampleBufferInvalidate(buffer);
        CFRelease(buffer);
    }
    
    if(nextBuffer != nil)
    {
        CMSampleBufferInvalidate(nextBuffer);
        CFRelease(nextBuffer);
    }
    
}

-(void) openVideo:(NSString*)path
{
    [self openVideo:path fromTime:kCMTimeZero];
}

-(void) openVideo:(NSString*)path fromTime:(CMTime)startTime
{
    if(isInitialized) 
    {
        TLDebugS(@"Let me close this for you...");
        [self closeVideo];
    }
    
    isInitialized = YES;
    
    buffer = nil;
    nextBuffer = nil;
    msTime = 0;
    nextMsTime = 0;
    
    NSError* errur = nil;
    
    self.filePath = path;
    
    TLDebugS(@"Open video from %@", self.filePath);
   
 //   TLDebugS(@"Open!");
    
    NSFileManager* fm = [NSFileManager defaultManager];
    if(![fm fileExistsAtPath:self.filePath])
    {
        TLDebugS(@"File not exists!");
        isInitialized = NO;
        return;
    }
    
    mAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:path] options:nil];
    
    if(mAsset == nil)
    {
        TLDebugS(@"Asset nil!");
        isInitialized = NO;
        return;
    }
    
    self.duration = (long)(CMTimeGetSeconds([mAsset duration]) * 1000);
    
 //   TLDebugS(@"Duration = %ld", duration);
    
    mAssetReader = [[AVAssetReader alloc] initWithAsset:mAsset error:&errur];
    mAssetReader.timeRange = CMTimeRangeMake(startTime, kCMTimePositiveInfinity);
    
    NSArray* videoTracks = [mAsset tracksWithMediaType:AVMediaTypeVideo];
//    TLDebugS(@"VideoTracks = %d", [videoTracks count]);
    
    if([videoTracks count] == 0)
    {
        
        TLDebugS(@"Is shitty video!");
        isInitialized = NO;
        return;
    }
    
    AVAssetTrack* videoTrack = [videoTracks objectAtIndex:0];
    transform = [videoTrack preferredTransform];
    transform.tx = 0;
    transform.ty = 0;
    
    TLDebugS(@"Transform = %@", NSStringFromCGAffineTransform(transform));
    NSMutableDictionary* dict = [[NSMutableDictionary alloc] init];
    [dict setObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
    
    mReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:dict];
    [mAssetReader addOutput:mReaderOutput];
    
    [mAssetReader startReading];
    
    if(mAssetReader.error != nil)
    {
        TLDebugS(@"Error reading: %@", mAssetReader.error);
    }
    composition = [AVMutableComposition composition];
    AVMutableCompositionTrack* comTr = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    BOOL ok = NO;
    
    NSArray* audioTracks = [mAsset tracksWithMediaType:AVMediaTypeAudio];
    
    if(audioTracks.count == 0)
    {
        
    }
    
    else
    {
        AVAssetTrack* audioTrack = [[mAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
        CMTimeRange range = CMTimeRangeMake(CMTimeMake(0, 600), CMTimeMake(mAsset.duration.value, mAsset.duration.timescale));
        
        ok = [comTr insertTimeRange:range ofTrack:audioTrack atTime:[composition duration] error:&errur];
        
        if(!ok)
        {
            TLDebugS(@"Composition sucks!");
        }
    }
    
}

-(UIImage*) readVideoAtFrame:(long)frame
{
    
    //    [mAssetReader cancelReading];
    //    [mAssetReader setTimeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(frame * 0.001f, 600), kCMTimePositiveInfinity)];
    //    [mAssetReader startReading];
    
    if(frame < msTime)
    {
//        return nil; //Need to seek backward...
        [self closeVideo];
        [self openVideo:self.filePath];
    }
    
    UIImage* uiImage;
    
    if(frame < nextMsTime)
    {
        //Can still use current frame...
    }
    
    else if([self isEOF])
    {
        //Will process last frame...
    }
    
    else
    {
        
        if(buffer != nil)
        {
            CMSampleBufferInvalidate(buffer);
            CFRelease(buffer);
        }
        buffer = nextBuffer;
        msTime = nextMsTime;
        
        while([mAssetReader status] == AVAssetReaderStatusReading)
        {
            nextBuffer = [mReaderOutput copyNextSampleBuffer];
            
            CMTime tiem = CMSampleBufferGetPresentationTimeStamp(nextBuffer);
            
            nextMsTime = (int)(CMTimeGetSeconds(tiem) * 1000);
          //  TLDebugS(@"Read video frame at %d ms", nextMsTime);
            //TLDebugS(@"TimeVal = %lld", tiem.value);
            //TLDebugS(@"Timescale = %d", tiem.timescale);
            if(nextMsTime > frame)
                break;
            
            if([mAssetReader status] == AVAssetReaderStatusCompleted)
            {
                TLDebugS(@"EOF!");
                break;
            }
            
            if(buffer != nil)
            {
                CMSampleBufferInvalidate(buffer);
                CFRelease(buffer);
            }
            buffer = nextBuffer;
            msTime = nextMsTime;
        }
        
    }
    
 //   TLDebugS(@"Output video frame at %d ms", (int)(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(buffer)) * 1000));
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(buffer);
    
    uiImage = [Utility UIimageFromSampleBuffer:imageBuffer];
    
    if(CGAffineTransformIsIdentity(transform))
    {
        
    }
    
    else if(CGAffineTransformEqualToTransform(transform, CGAffineTransformMakeRotation(-M_PI_2)))
    {
        uiImage = [[UIImage alloc] initWithCGImage: uiImage.CGImage
                                                             scale: 1.0
                                                       orientation: UIImageOrientationLeft];
    }
    
    else if(CGAffineTransformEqualToTransform(transform, CGAffineTransformMakeRotation(M_PI_2)))
    {
        uiImage = [[UIImage alloc] initWithCGImage: uiImage.CGImage
                                             scale: 1.0
                                       orientation: UIImageOrientationRight];
    }
    
    else if(CGAffineTransformEqualToTransform(transform, CGAffineTransformMakeRotation(M_PI)))
    {
        uiImage = [[UIImage alloc] initWithCGImage: uiImage.CGImage
                                             scale: 1.0
                                       orientation: UIImageOrientationDown];
    }
    
    return uiImage;
}

-(BOOL) isEOF
{
    return [mAssetReader status] == AVAssetReaderStatusCompleted;
}

-(CMSampleBufferRef) readVideoSampleAtFrame:(long)frame
{
    if(frame < msTime)
    {
        TLDebugS(@"Forced close, %ld / %d", frame, msTime);
        
        float rate = 0;
        CMTime tiem;
        BOOL restoreAudio = NO;
        if(audioPlayer != nil)
        {
            restoreAudio = YES;
            rate = [audioPlayer rate];
            tiem = [audioPlayer currentTime];
        }
        
        //Need to seek backward...
        [self closeVideo];
        [self openVideo:self.filePath];
        
        if(restoreAudio)
        {
            [self playAudio];
            [audioPlayer setRate:rate];
            [audioPlayer seekToTime:tiem];
        }
    }
    
    
    if(frame < nextMsTime)
    {
        //Can still use current frame...
    }
    
    else if([self isEOF])
    {
        //Will process last frame...
    }
    else
    {
        if(buffer != nil)
        {
            CMSampleBufferInvalidate(buffer);
            CFRelease(buffer);
        }
        
        buffer = nextBuffer;
        msTime = nextMsTime;
        
        if([mAssetReader status] != AVAssetReaderStatusReading)
        {
            if([mAssetReader status] == AVAssetReaderStatusFailed)
            {
                TLDebugS(@"Failed!");
            }
            
            TLDebugS(@"MAssetReader status suck! = %d", [mAssetReader status]);
            [self closeVideo];
            [self openVideo:self.filePath];
        }
        
        while([mAssetReader status] == AVAssetReaderStatusReading)
        {
            nextBuffer = [mReaderOutput copyNextSampleBuffer];
            
            CMTime tiem = CMSampleBufferGetPresentationTimeStamp(nextBuffer);
            
            nextMsTime = (int)(CMTimeGetSeconds(tiem) * 1000);
            
//            TLDebugS(@"I break at %d!", nextMsTime);
            if(nextMsTime > frame && msTime > 0)
            {
                break;
            }
            if([mAssetReader status] == AVAssetReaderStatusCompleted)
            {
                TLDebugS(@"EOF!");
                break;
            }
            
            
            if(buffer != nil)
            {
                CMSampleBufferInvalidate(buffer);
                CFRelease(buffer);
            }
            buffer = nextBuffer;
            msTime = nextMsTime;
        }
    }
    
//    TLDebugS(@"Request frame at %ld ms, msTime = %d, nextMsTime = %d", frame, msTime, nextMsTime);
//    TLDebugS(@"Output video frame at %d ms", (int)(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(buffer)) * 1000));
    return buffer;
}


-(void) closeVideo
{
    if(audioPlayer != nil)
    {
        [audioPlayer pause];
        audioPlayer = nil;
    }
    
    [mAssetReader cancelReading];
    
    mReaderOutput = nil;
    mAssetReader = nil;
    mAsset = nil;
    composition = nil;
    
    if(buffer != nil)
    {
        CMSampleBufferInvalidate(buffer);
        CFRelease(buffer);
        buffer = nil;
    }
    
    if(nextBuffer != nil)
    {
        CMSampleBufferInvalidate(nextBuffer);
        CFRelease(nextBuffer);
        nextBuffer = nil;
    }
    isInitialized = NO;

    TLDebugS(@"Close!");
}

-(void) playAudio
{
    if(composition == nil) return;
    
    if(audioPlayer != nil)
    {
        [audioPlayer pause];
        audioPlayer = nil;
    }
    
    audioPlayer = [[AVPlayer alloc] initWithPlayerItem:[AVPlayerItem playerItemWithAsset:composition]];
    
    [audioPlayer play];
}

-(void) playAudioFromTime:(NSTimeInterval)time
{
    if(composition == nil) return;
    
    if(audioPlayer != nil)
    {
        [audioPlayer pause];
        audioPlayer = nil;
    }
    
    audioPlayer = [[AVPlayer alloc] initWithPlayerItem:[AVPlayerItem playerItemWithAsset:composition]];
    
    [audioPlayer seekToTime:CMTimeMakeWithSeconds(time, 1000)];
    
    [audioPlayer play];
}

-(void) pauseAudio
{
    if(audioPlayer == nil) return;
    
    [audioPlayer pause];
}
-(void) resumeAudio
{
    if(audioPlayer == nil) return;
    
    [audioPlayer play];
}
@end
