//
//  TextSegment.m
//  NewsMaker Touch
//
//  Created by Reisen on 10/8/14.
//  Copyright (c) 2014 Reisen. All rights reserved.
//

#import "InterviewSegment.h"
#import "Utility.h"
#import "VideoRecorder.h"
#import "TalkDefine.h"

@implementation InterviewSegment

-(BOOL)canPreview
{
    return YES;
}

-(void) initializePreview
{
    PreviewView* prev = self.currentPreviewLayer;
    prev.layoutView.hidden = NO;
    
    [prev.layoutView.subviews makeObjectsPerformSelector:@selector(removeFromSuperview)];
    
    [self initializePreviewInView:prev.layoutView encoding:NO];
}

-(void) initializePreviewInView:(UIView*)view
{
    [self initializePreviewInView:view encoding:YES];
}

-(void) initializePreviewInView:(UIView*)view encoding:(BOOL)encoding
{
    [Utility startTiming];
    
    answerViews = [NSMutableArray array];
    questionViews = [NSMutableArray array];
    
    NSString* path = [self.root stringByAppendingPathComponent:self.src_template];
//    [[NSBundle mainBundle] pathForResource:self.src_template ofType:@"plist"];
//    NSDictionary* dict = [NSDictionary dictionaryWithContentsOfFile:path];
    NSDictionary* dict = [NSJSONSerialization JSONObjectWithData:[NSData dataWithContentsOfFile:path] options:NSJSONReadingMutableContainers error:nil];
    
    NSArray* items = [dict objectForKey:@"items"];
    
    CGFloat wFactor = view.bounds.size.width / 768;
    CGFloat hFactor = view.bounds.size.height / 1024;
    for(NSDictionary* itemDict in items)
    {
        NSString* type = itemDict[@"type"];
        NSString* itemID = itemDict[@"id"];
        CGRect frame = CGRectFromString(itemDict[@"frame"]);
        
        frame = CGRectMake(frame.origin.x * wFactor, frame.origin.y * hFactor, frame.size.width * wFactor, frame.size.height * hFactor);
        
        if([type isEqualToString:@"image"])
        {
            UIImageView* backgroundView = [[UIImageView alloc] initWithFrame:frame];
            [backgroundView setContentMode:UIViewContentModeScaleAspectFill];
            
            backgroundView.image = [UIImage imageWithContentsOfFile:[self.fileRoot stringByAppendingPathComponent:itemDict[@"source"]]];
            [view addSubview:backgroundView];
            
            if([itemDict[@"visible"] isEqualToString:@"answer"])
                [answerViews addObject:backgroundView];
            
            else if([itemDict[@"visible"] isEqualToString:@"question"])
                [questionViews addObject:backgroundView];
        }
        
        if([type isEqualToString:@"video"])
        {
            UIView* videoView = [[UIView alloc] initWithFrame:frame];
            //videoView.backgroundColor = [UIColor colorWithWhite:0.5f alpha:0.5f];
            [view addSubview:videoView];
            
            if([itemID isEqualToString:@"answer"])
            {
                [answerViews addObject:videoView];
                aLayer = videoView;
            }
            
            else if([itemID isEqualToString:@"question"])
            {
                [questionViews addObject:videoView];
                qLayer = videoView;
            }
            
            if(encoding)
            {
                /*
                if(videoView == qLayer)
                {
                    AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:(videoView == aLayer?self.vid_mp4:self.q1_mp4)] options:nil];
                    AVAssetImageGenerator *generateImg = [[AVAssetImageGenerator alloc] initWithAsset:asset];
                    NSError *error = NULL;
                    CMTime time = CMTimeMake(0, 65);
                    CGImageRef refImg = [generateImg copyCGImageAtTime:time actualTime:NULL error:&error];
                    
                    videoView.layer.contents = (__bridge id)(refImg);
                  //  CGImageRelease(refImg);
                }
                 */
                //*
                UIImageView* img = [[UIImageView alloc] initWithFrame:videoView.bounds];
                img.contentMode = UIViewContentModeScaleAspectFill;
                img.clipsToBounds = YES;
                videoView.clipsToBounds = YES;
                [videoView addSubview:img];
                
                if(videoView == qLayer)
                {
                AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:(videoView == aLayer?self.vid_mp4:self.q1_mp4)] options:nil];
                AVAssetImageGenerator *generateImg = [[AVAssetImageGenerator alloc] initWithAsset:asset];
                    generateImg.appliesPreferredTrackTransform = YES;
                NSError *error = NULL;
                CMTime time = CMTimeMake(0, 65);
                CGImageRef refImg = [generateImg copyCGImageAtTime:time actualTime:NULL error:&error];
                    
                img.image = [UIImage imageWithCGImage:refImg];
                CGImageRelease(refImg);
                }
                 //*/
            }
            else
            {
                AVPlayer* player = [AVPlayer playerWithPlayerItem:nil];
                player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
                
                AVPlayerLayer* layer = [AVPlayerLayer playerLayerWithPlayer:player];
                layer.videoGravity = AVLayerVideoGravityResizeAspectFill;
                layer.frame = videoView.bounds;
                [videoView.layer addSublayer:layer];
                
                if([itemID isEqualToString:@"answer"])
                {
                    aPlayer = player;
                    
                    previewImage = [[UIImageView alloc] initWithFrame:videoView.bounds];
                    previewImage.contentMode = UIViewContentModeScaleAspectFill;
                    previewImage.clipsToBounds = YES;
                    [videoView.superview insertSubview:previewImage belowSubview:videoView];
                    
                    AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:self.q1_mp4] options:nil];
                    AVAssetImageGenerator *generateImg = [[AVAssetImageGenerator alloc] initWithAsset:asset];
                    generateImg.appliesPreferredTrackTransform = YES;
                    NSError *error = NULL;
                    CMTime time = CMTimeMake(0, 65);
                    CGImageRef refImg = [generateImg copyCGImageAtTime:time actualTime:NULL error:&error];
                    
                    previewImage.image = [UIImage imageWithCGImage:refImg];
                    CGImageRelease(refImg);
                }
                
                else if([itemID isEqualToString:@"question"])
                {
                    if(qPlayer != nil)
                    {
                        @try{
                            [qPlayer removeObserver:self forKeyPath:@"status"];
                        }@catch(id anException){
                            //do nothing, obviously it wasn't attached because an exception was thrown
                        }
                    }
                    qPlayer = player;
                }
            }
        }
        
        if([type isEqualToString:@"label"])
        {
            UILabel* label = [[UILabel alloc] initWithFrame:frame];
            label.font = [UIFont boldSystemFontOfSize:[itemDict[@"fontSize"] integerValue] * hFactor];
            label.textAlignment = NSTextAlignmentCenter;
            label.textColor = [UIColor blackColor];
            label.numberOfLines = -1;
            
            if(itemDict[@"background"] != nil)
            {
                NSArray *components = [itemDict[@"background"] componentsSeparatedByString:@","];
                CGFloat r = [[components objectAtIndex:0] floatValue];
                CGFloat g = [[components objectAtIndex:1] floatValue];
                CGFloat b = [[components objectAtIndex:2] floatValue];
                CGFloat a = [[components objectAtIndex:3] floatValue];
                label.backgroundColor = [UIColor colorWithRed:r green:g blue:b alpha:a];
            }
            
            [view addSubview:label];
            
            if([itemID isEqualToString:@"chinese"]) label1 = label;
            if([itemID isEqualToString:@"english"]) label2 = label;
        }
    }
    
    if(self.vid_mp4 != nil)
    {
        NSURL *fileURL = [NSURL fileURLWithPath:self.vid_mp4];
    
        [aPlayer replaceCurrentItemWithPlayerItem:[AVPlayerItem playerItemWithURL:fileURL]];
    }
    
    for(UIView* v in questionViews)
        v.hidden = NO;
    
    for(UIView* v in answerViews) v.hidden = YES;
    
    if(!encoding)
        [qPlayer addObserver:self forKeyPath:@"status" options:0 context:nil];

    [Utility endTimingWithTag:@"ASS"];
}

- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object
                        change:(NSDictionary *)change context:(void *)context {
    if (object == qPlayer && [keyPath isEqualToString:@"status"]) {
        if (qPlayer.status == AVPlayerStatusReadyToPlay) {
            [bgmPlayer play];
            
            self.currentPreviewLayer.layoutView.hidden = NO;
            TLDebugS(@"Video loaded!");
        }
        
        [qPlayer removeObserver:self forKeyPath:@"status"];
    }
}

-(void)questionCompleted:(AVPlayerItem*)sender
{
    [[NSNotificationCenter defaultCenter] removeObserver:self];
    
    TLDebugS(@"QPlayer completed...");
    
    previewImage.image = nil;
    
    for(UIView* v in questionViews)
        v.hidden = YES;
    
    for(UIView* v in answerViews)
        v.hidden = NO;
    
    aPlayer.volume = 1;
}


-(void)play
{
    if(isPaused)
    {
        TLDebugS(@"Unpause!");
 
        isPaused = NO;
        [aPlayer play];
        [qPlayer play];
        [bgmPlayer play];
        return;
    }
    
    isPaused = NO;
    
    [self startPlaybackLoop];
}


-(void)pause
{
    isPaused = YES;
    [aPlayer pause];
    [qPlayer pause];
    [bgmPlayer pause];
}

-(void) startPlaybackLoop
{
    if(playbackTimer!=nil)
    {
        [playbackTimer invalidate];
        playbackTimer = nil;
    }
    {
        lastPlaybackLoop = playbackStarted = [NSDate timeIntervalSinceReferenceDate];
        duration = self.recordTimeInfo.time_to;
        isPlaying = YES;
        
        for(UIView* v in questionViews)
            v.hidden = NO;

        for(UIView* v in answerViews) v.hidden = YES;
        aPlayer.volume = 0;
        [aPlayer seekToTime:CMTimeMakeWithSeconds(self.recordTimeInfo.time_from, 1000)];
        [aPlayer play];
        
        [self playbackLoop:nil];
        
        playbackTimer = [NSTimer scheduledTimerWithTimeInterval:0.025 target:self selector:@selector(playbackLoop:) userInfo:nil repeats:YES];
    }
}

-(void) startPlaybackLoopFromLast
{
    if(playbackTimer!=nil)
    {
        [playbackTimer invalidate];
        playbackTimer = nil;
    }
    {
        NSTimeInterval lastQuestionStart = self.recordTimeInfo.pTime_2;
        NSString* lastQuestionPath = self.q3_mp4;
        
        if(self.recordTimeInfo.time_to == self.recordTimeInfo.pTime_2)
        {
     //       NSLog(@"GO BAAAAAACK");
            lastQuestionStart = self.recordTimeInfo.pTime_1;
            lastQuestionPath = self.q2_mp4;
        }
        
        if(self.recordTimeInfo.time_to == self.recordTimeInfo.pTime_1)
        {
            lastQuestionStart = 0;
            lastQuestionPath = self.q1_mp4;
        }
        
        CGFloat aPlayerStart = lastQuestionStart + self.recordTimeInfo.time_from;

        
        
        
        lastPlaybackLoop = playbackStarted = [NSDate timeIntervalSinceReferenceDate];
        
        playbackStarted = lastPlaybackLoop - lastQuestionStart;
        
        
        duration = self.recordTimeInfo.time_to;
        isPlaying = YES;
        
        for(UIView* v in questionViews)
            v.hidden = NO;
        
        for(UIView* v in answerViews) v.hidden = YES;
        aPlayer.volume = 0;
        [aPlayer seekToTime:CMTimeMakeWithSeconds(aPlayerStart, 1000)];
        [aPlayer play];
        
        //Replace preview image
        AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:lastQuestionPath] options:nil];
        AVAssetImageGenerator *generateImg = [[AVAssetImageGenerator alloc] initWithAsset:asset];
        generateImg.appliesPreferredTrackTransform = YES;
        NSError *error = NULL;
        CMTime time = CMTimeMake(0, 65);
        CGImageRef refImg = [generateImg copyCGImageAtTime:time actualTime:NULL error:&error];
        
        previewImage.image = [UIImage imageWithCGImage:refImg];
        CGImageRelease(refImg);
        
        AVPlayerItem* qItem = [AVPlayerItem playerItemWithAsset:asset];
        [qPlayer replaceCurrentItemWithPlayerItem:qItem];
        
        [self playbackLoop:nil];
        
        playbackTimer = [NSTimer scheduledTimerWithTimeInterval:0.025 target:self selector:@selector(playbackLoop:) userInfo:nil repeats:YES];
    }
}

-(void)playbackLoop:(NSTimer*)timer
{
    NSTimeInterval now = [NSDate timeIntervalSinceReferenceDate];
    
    @autoreleasepool {
        if(isPaused)
        {
            playbackStarted += now - lastPlaybackLoop;
            lastPlaybackLoop = now;
            return;
        }
        
        double lastgap = lastPlaybackLoop - playbackStarted;
        
        lastPlaybackLoop = now;
        
        double gap = now - playbackStarted;
        
        if(stringAttributes == nil)
        {
            NSMutableParagraphStyle *paragraphStyle = [[NSMutableParagraphStyle alloc] init];
            [paragraphStyle setAlignment:NSTextAlignmentCenter];
            [paragraphStyle setLineBreakMode:NSLineBreakByWordWrapping];
            
            stringAttributes = @{NSFontAttributeName: label1.font,
                                 //NSStrokeColorAttributeName: [UIColor whiteColor],
                                 //NSStrokeWidthAttributeName: [NSNumber numberWithInt:-3],
                                 NSForegroundColorAttributeName: [UIColor blackColor],
                                 NSParagraphStyleAttributeName:paragraphStyle};
        }
        
        if(lastgap <= 0 && gap > 0)
        {
            for(UIView* v in questionViews) v.hidden = NO;
            
            for(UIView* v in answerViews) v.hidden = YES;
            aPlayer.volume = 0;
            
            label1.attributedText = [[NSAttributedString alloc] initWithString:self.q1 attributes:stringAttributes];
            label2.attributedText = [[NSAttributedString alloc] initWithString:self.q1_sub attributes:stringAttributes];
            label1.hidden = NO;
            label2.hidden = NO;

            AVPlayerItem* qItem = [AVPlayerItem playerItemWithURL:[NSURL fileURLWithPath:self.q1_mp4]];
            [qPlayer replaceCurrentItemWithPlayerItem:qItem];

            [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(questionCompleted:) name:AVPlayerItemDidPlayToEndTimeNotification object:qItem];
            
            [qPlayer play];
            
            if(bgmPlayer != nil)
                [bgmPlayer stop];
            
            if(self.q1_mp3 != nil)
            {
                bgmPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL fileURLWithPath:self.q1_mp3] error:nil];

                if (qPlayer.status == AVPlayerStatusReadyToPlay)
                    [bgmPlayer play];
            }
        }
        
        else if(lastgap <= self.recordTimeInfo.pTime_1 && gap > self.recordTimeInfo.pTime_1)
        {
            for(UIView* v in questionViews) v.hidden = NO;
            
            for(UIView* v in answerViews) v.hidden = YES;
            aPlayer.volume = 0;
            
            label1.attributedText = [[NSAttributedString alloc] initWithString:self.q2 attributes:stringAttributes];
            label2.attributedText = [[NSAttributedString alloc] initWithString:self.q2_sub attributes:stringAttributes];
            label1.hidden = NO;
            label2.hidden = NO;
            
            if(bgmPlayer != nil)
                [bgmPlayer stop];
            
            
            //If recording doesn't stop here...
            if(self.recordTimeInfo.time_to > self.recordTimeInfo.pTime_1)
            {
                NSLog(@"PLAY ME");
                AVPlayerItem* qItem = [AVPlayerItem playerItemWithURL:[NSURL fileURLWithPath:self.q2_mp4]];
                [qPlayer replaceCurrentItemWithPlayerItem:qItem];
                
                [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(questionCompleted:) name:AVPlayerItemDidPlayToEndTimeNotification object:qItem];
                [qPlayer play];
                
                if(self.q2_mp3 != nil)
                {
                    bgmPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL fileURLWithPath:self.q2_mp3] error:nil];
                    
                    if (qPlayer.status == AVPlayerStatusReadyToPlay)
                        [bgmPlayer play];
                }
            }
        }
        
        else if(lastgap <= self.recordTimeInfo.pTime_2 && gap > self.recordTimeInfo.pTime_2)
        {
            for(UIView* v in questionViews) v.hidden = NO;
            for(UIView* v in answerViews) v.hidden = YES;
            aPlayer.volume = 0;
            
            label1.attributedText = [[NSAttributedString alloc] initWithString:self.q3 attributes:stringAttributes];
            label2.attributedText = [[NSAttributedString alloc] initWithString:self.q3_sub attributes:stringAttributes];
            label1.hidden = NO;
            label2.hidden = NO;
            
            if(bgmPlayer != nil)
                [bgmPlayer stop];
            
            //If recording doesn't stop here...
            if(self.recordTimeInfo.time_to > self.recordTimeInfo.pTime_2)
            {
                AVPlayerItem* qItem = [AVPlayerItem playerItemWithURL:[NSURL fileURLWithPath:self.q3_mp4]];
                [qPlayer replaceCurrentItemWithPlayerItem:qItem];
                
                
                [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(questionCompleted:) name:AVPlayerItemDidPlayToEndTimeNotification object:qItem];
                
                [qPlayer play];
                
                if(self.q3_mp3 != nil)
                {
                    bgmPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL fileURLWithPath:self.q3_mp3] error:nil];
                    
                    if (qPlayer.status == AVPlayerStatusReadyToPlay)
                        [bgmPlayer play];
                }
            }
        }
        
        if(gap >= duration)
            [self stop];
    }
}

-(CGFloat)seekToTime:(float)gap
{
    double maxTime = duration - 2;
    double interpolation = gap / maxTime;
    
    if(interpolation > 1) interpolation = 1;
    
    if(gap > duration) gap = duration;
    
    if(stringAttributes == nil)
    {
        NSMutableParagraphStyle *paragraphStyle = [[NSMutableParagraphStyle alloc] init];
        [paragraphStyle setAlignment:NSTextAlignmentCenter];
        stringAttributes = @{NSFontAttributeName: label1.font,
//                             NSStrokeColorAttributeName: [UIColor whiteColor],
//                             NSStrokeWidthAttributeName: [NSNumber numberWithInt:-5],
                             NSForegroundColorAttributeName: [UIColor blackColor],
                             NSParagraphStyleAttributeName:paragraphStyle};
    }
    
    label1.hidden = NO;
    label2.hidden = NO;
    
    if(gap < q1Dur)
    {
        for(UIView* v in questionViews) v.hidden = NO;
        for(UIView* v in answerViews) v.hidden = YES;
        
        label1.attributedText = [[NSAttributedString alloc] initWithString:self.q1 attributes:stringAttributes];
        label2.attributedText = [[NSAttributedString alloc] initWithString:self.q1_sub attributes:stringAttributes];
        
        UIImageView* imgV = [qLayer.subviews firstObject];
        
        imgV.image = [q1Reader readVideoAtFrame:(long)(gap * 1000)];
    }
    
    else if(gap < self.recordTimeInfo.pTime_1)
    {
        for(UIView* v in questionViews) v.hidden = YES;
        for(UIView* v in answerViews) v.hidden = NO;
        
        label1.attributedText = [[NSAttributedString alloc] initWithString:self.q1 attributes:stringAttributes];
        label2.attributedText = [[NSAttributedString alloc] initWithString:self.q1_sub attributes:stringAttributes];
        
        UIImageView* imgV = [aLayer.subviews firstObject];
        imgV.image = [aReader readVideoAtFrame:(long)((self.recordTimeInfo.time_from + gap) * 1000)];
    }
    
    else if(gap < self.recordTimeInfo.pTime_1 + q2Dur)
    {
        for(UIView* v in questionViews) v.hidden = NO;
        for(UIView* v in answerViews) v.hidden = YES;
        
        label1.attributedText = [[NSAttributedString alloc] initWithString:self.q2 attributes:stringAttributes];
        label2.attributedText = [[NSAttributedString alloc] initWithString:self.q2_sub attributes:stringAttributes];
        
        UIImageView* imgV = [qLayer.subviews firstObject];
        imgV.image = [q2Reader readVideoAtFrame:(long)((gap - self.recordTimeInfo.pTime_1) * 1000)];
    }
    else if(gap < self.recordTimeInfo.pTime_2)
    {
        for(UIView* v in questionViews) v.hidden = YES;
        for(UIView* v in answerViews) v.hidden = NO;
        
        label1.attributedText = [[NSAttributedString alloc] initWithString:self.q2 attributes:stringAttributes];
        label2.attributedText = [[NSAttributedString alloc] initWithString:self.q2_sub attributes:stringAttributes];
        
        UIImageView* imgV = [aLayer.subviews firstObject];
        imgV.image = [aReader readVideoAtFrame:(long)((self.recordTimeInfo.time_from + gap) * 1000)];
    }
    else if(gap < self.recordTimeInfo.pTime_2 + q3Dur)
    {
        for(UIView* v in questionViews) v.hidden = NO;
        for(UIView* v in answerViews) v.hidden = YES;
        
        label1.attributedText = [[NSAttributedString alloc] initWithString:self.q3 attributes:stringAttributes];
        label2.attributedText = [[NSAttributedString alloc] initWithString:self.q3_sub attributes:stringAttributes];
        
        
        UIImageView* imgV = [qLayer.subviews firstObject];
        imgV.image = [q3Reader readVideoAtFrame:(long)((gap - self.recordTimeInfo.pTime_2) * 1000)];
    }
    else if(gap < self.recordTimeInfo.time_to)
    {
        for(UIView* v in questionViews) v.hidden = YES;
        for(UIView* v in answerViews) v.hidden = NO;
        
        label1.attributedText = [[NSAttributedString alloc] initWithString:self.q3 attributes:stringAttributes];
        label2.attributedText = [[NSAttributedString alloc] initWithString:self.q3_sub attributes:stringAttributes];
        
        UIImageView* imgV = [aLayer.subviews firstObject];
        imgV.image = [aReader readVideoAtFrame:(long)((self.recordTimeInfo.time_from + gap) * 1000)];
    }
    
    return gap;
}


-(void)stop
{
    [playbackTimer invalidate];
    playbackTimer=nil;
    
    isPlaying = NO;
    isPaused = NO;
    
//    [qLayer removeFromSuperlayer];
    [qPlayer pause];
    
    @try{
        [qPlayer removeObserver:self forKeyPath:@"status"];
    }@catch(id anException){
        //do nothing, obviously it wasn't attached because an exception was thrown
    }
    qLayer = nil;
    qPlayer = nil;
    
//    [aLayer removeFromSuperlayer];
    [aPlayer pause];
    aLayer = nil;
    aPlayer = nil;
    
    [bgmPlayer stop];
    [self.currentPreviewLayer playNextSegment];
}


//Running on main thread;
//*
-(void) encodeVideo
{
    q1Reader = [VideoReader new];
    [q1Reader openVideo:self.q1_mp4];
    q1Dur = q1Reader.duration / 1000.0f;
    
    q2Reader = [VideoReader new];
    [q2Reader openVideo:self.q2_mp4];
    q2Dur = q1Reader.duration / 1000.0f;
    
    q3Reader = [VideoReader new];
    [q3Reader openVideo:self.q3_mp4];
    q3Dur = q1Reader.duration / 1000.0f;
    
    aReader = [VideoReader new];
    [aReader openVideo:self.vid_mp4];
    
    int w = 768 * 2 / 4;
    int h = 1024 * 2 / 4;
    
    TLDebugS(@"Encode video");
    layoutView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, w, h)];
    
    [self initializePreviewInView:layoutView];
    TLDebugS(@"Has subviews = %d", layoutView.subviews.count);
    
    duration = self.recordTimeInfo.time_to;
    
    TLDebugS(@"Duration = %f", duration);
    
    NSString* outputPath = [self videoInformation];
    //VideoRecorder*
    recorder = [[VideoRecorder alloc] initWithWidth:w Height:h];
    recorder.recordPath = outputPath;
    recorder.delegate = self;
    [recorder startRecording];
    
    TLDebugS(@"Start recording!");
    
    [self encodeLoop:[NSNumber numberWithFloat:0]];
}


-(void)encodeLoop:(NSNumber*)stamp
{
//    TLDebugS(@"Encode loop:%f", [stamp floatValue]);
    
    CGFloat t = [stamp floatValue];
    
    if(t >= duration)
    {
        [recorder stopRecording];
        
        recorder = nil;
        layoutView = nil;
        
        [q1Reader closeVideo];
        [q2Reader closeVideo];
        [q3Reader closeVideo];
        [aReader closeVideo];
        
        self.progress = 1;
        TLDebugS(@"Done!!!");
        return;
    }
    
//    [Utility startTiming];
    [self seekToTime:t];

    UIGraphicsBeginImageContextWithOptions(layoutView.frame.size, YES, 1);
    CGContextRef ctx = UIGraphicsGetCurrentContext();
    CGContextSetInterpolationQuality(ctx, kCGInterpolationNone);

    [layoutView.layer renderInContext:ctx];
//    [layoutView drawViewHierarchyInRect:layoutView.bounds afterScreenUpdates:YES];

    UIImage* screenshot = UIGraphicsGetImageFromCurrentImageContext();

    UIGraphicsEndImageContext();
    
    recorder.currentScreen = screenshot;
    
    [recorder encodeFrameAtTime:CMTimeMakeWithSeconds(t, 1000)];
    
    self.progress = t / duration;
    
//    [Utility endTimingWithTag:@"Interview Frame"];
    
    [self performSelector:@selector(encodeLoop:) withObject:[NSNumber numberWithFloat:t + 1.0f / 20] afterDelay:0.001f];
}

-(NSMutableArray*)audioInformationWithStart:(float)start
{
    NSMutableArray* arr = [NSMutableArray array];
    
    [arr addObject:@{@"path": self.q1_mp4, @"start":[NSNumber numberWithFloat:start], @"duration":[NSNumber numberWithFloat:-1], @"track":[NSNumber numberWithInt:0]}];
    
    [arr addObject:@{@"path": self.vid_mp4, @"start":[NSNumber numberWithFloat:start + q1Dur], @"localStart":[NSNumber numberWithFloat:self.recordTimeInfo.time_from + q1Dur], @"duration":[NSNumber numberWithFloat:self.recordTimeInfo.pTime_1 - q1Dur], @"track":[NSNumber numberWithInt:0]}];
    
    [arr addObject:@{@"path": self.q2_mp4, @"start":[NSNumber numberWithFloat:start + self.recordTimeInfo.pTime_1], @"duration":[NSNumber numberWithFloat:-1], @"track":[NSNumber numberWithInt:0]}];
    
    [arr addObject:@{@"path": self.vid_mp4, @"start":[NSNumber numberWithFloat:start + self.recordTimeInfo.pTime_1 + q2Dur], @"localStart":[NSNumber numberWithFloat:self.recordTimeInfo.time_from + self.recordTimeInfo.pTime_1 + q2Dur], @"duration":[NSNumber numberWithFloat:self.recordTimeInfo.pTime_2 - (self.recordTimeInfo.pTime_1 + q2Dur)], @"track":[NSNumber numberWithInt:0]}];
    
    [arr addObject:@{@"path": self.q3_mp4, @"start":[NSNumber numberWithFloat:start+ self.recordTimeInfo.pTime_2], @"duration":[NSNumber numberWithFloat:-1], @"track":[NSNumber numberWithInt:0]}];
    
    [arr addObject:@{@"path": self.vid_mp4, @"start":[NSNumber numberWithFloat:start + self.recordTimeInfo.pTime_2 + q3Dur], @"localStart":[NSNumber numberWithFloat:self.recordTimeInfo.time_from + self.recordTimeInfo.pTime_2 + q3Dur], @"duration":[NSNumber numberWithFloat:self.recordTimeInfo.time_to - (self.recordTimeInfo.pTime_2 + q3Dur)], @"track":[NSNumber numberWithInt:0]}];
    
    
    return arr;
}

-(NSString*)videoInformation
{
    return [[Utility rootStorage] stringByAppendingPathComponent:@"interview.mp4"];
}
-(float)getDuration
{
    if(duration == 0) duration = self.recordTimeInfo.time_to;
    return duration;
}



-(void)swipeLeft
{
    double gap = lastPlaybackLoop - playbackStarted;
    
    if(gap < self.recordTimeInfo.pTime_1)
    {
        playbackStarted = lastPlaybackLoop - self.recordTimeInfo.pTime_1;
        [aPlayer seekToTime:CMTimeMakeWithSeconds(self.recordTimeInfo.pTime_1 + self.recordTimeInfo.time_from, 1000)];
    }
    else if(gap < self.recordTimeInfo.pTime_2)
    {
        playbackStarted = lastPlaybackLoop - self.recordTimeInfo.pTime_2;
        [aPlayer seekToTime:CMTimeMakeWithSeconds(self.recordTimeInfo.pTime_2  + self.recordTimeInfo.time_from, 1000)];
    }
    
    else if(gap < self.recordTimeInfo.time_to) { [self stop]; }
}

-(void)swipeRight
{
    double gap = lastPlaybackLoop - playbackStarted;
    
    if(gap > self.recordTimeInfo.pTime_2)
    {
        playbackStarted = lastPlaybackLoop - self.recordTimeInfo.pTime_1;
        [aPlayer seekToTime:CMTimeMakeWithSeconds(self.recordTimeInfo.pTime_1 + self.recordTimeInfo.time_from, 1000)];
    }
    else if(gap > self.recordTimeInfo.pTime_1)
    {
        playbackStarted = lastPlaybackLoop;
        [aPlayer seekToTime:CMTimeMakeWithSeconds(self.recordTimeInfo.time_from, 1000)];
    }
    
    else { [self stop]; }
}

-(void) goToLastQuestion
{
    if(isPaused)
    {
        TLDebugS(@"Unpause!");
        
        isPaused = NO;
        [aPlayer play];
        [qPlayer play];
        [bgmPlayer play];
        return;
    }
    
    isPaused = NO;
    
    [self startPlaybackLoopFromLast];
}

@end
