//
//  TalkViewController.m
//  talkDemo
//
//  Created by telen on 15/4/17.
//  Copyright (c) 2015年 刘赞黄Telen. All rights reserved.
//

#import "TalkViewController.h"
#import "TalkDefine.h"
#import "InterviewSegment.h"
#import "TrailerSegment.h"
#import "TextSegment.h"
#import "Utility.h"
#import "PublishView.h"
#import "UploadView.h"
#import  <AssetsLibrary/ALAssetsLibrary.h>

@interface TalkViewController ()<TalkRecordingViewDelegate, TalkVideoEditDelegate, UploadViewDelete,VideoRecorderDelegate,UIAlertViewDelegate>

@property(nonatomic,strong)TalkVedioEditView* editVedioView;
@property(nonatomic,strong)TalkRecordingView* recordingView;
@property(nonatomic,strong)TalkAllQuestionsView* questionsAllView;
@property(nonatomic,strong)TalkBookCoverView* coverView;
@property(nonatomic,strong)TalkBlackBg* blackBg;

@end

@implementation TalkViewController

static __weak TalkViewController* singleTalkViewController = nil;

+ (instancetype)shareLastInstance{
    return singleTalkViewController;
}

- (void)viewDidLoad {
    [super viewDidLoad];
    singleTalkViewController = self;
    
    // Do any additional setup after loading the view.
    needToShare = NO;
    
    [self loadCameraView];
    
    [self loadPlayerView];
    
    //
    [self.blackBg performSelector:@selector(show) withObject:nil afterDelay:0.6f];
    [self addTopBackGround];
    [self addBackView];
    
    //
    [self.coverView performSelector:@selector(show) withObject:nil afterDelay:0.6f];
    if([self isFileExsit:_saveVideoFullPath_temp])self.questionsAllView.needPlayBtn = YES;
    [self.questionsAllView performSelector:@selector(show) withObject:nil afterDelay:0.6f];
    [self.questionsAllView.recordBtn addTarget:self action:@selector(didClickedRecordBtn) forControlEvents:UIControlEventTouchUpInside];
    [self.questionsAllView.playBtn addTarget:self action:@selector(didClickedPlayBtn) forControlEvents:UIControlEventTouchUpInside];
    [self.questionsAllView.shareBtn addTarget:self action:@selector(didClickedShareBtn:) forControlEvents:UIControlEventTouchUpInside];
    [self.questionsAllView.deleteBtn addTarget:self action:@selector(didClickedDeleteBtn) forControlEvents:UIControlEventTouchUpInside];
    
    //
    [self.recordingView.reRecordBtn addTarget:self action:@selector(didClickedRerecordBtn) forControlEvents:UIControlEventTouchUpInside];
    [self.recordingView.finishBtn addTarget:self action:@selector(didClickedFinishRecordingBtn) forControlEvents:UIControlEventTouchUpInside];
    [self.recordingView.stopRecordBtn addTarget:self action:@selector(didClickedStopRecordingBtn) forControlEvents:UIControlEventTouchUpInside];
    self.recordingView.delegate = self;
    //
    
    //[self.editVedioView.shareBtn addTarget:self action:@selector(didClickedShareBtn:) forControlEvents:UIControlEventTouchUpInside];
    //[self.editVedioView.saveBtn addTarget:self action:@selector(didClickedSaveBtn) forControlEvents:UIControlEventTouchUpInside];
    
//    self.questionsAllView.needPlayBtn = YES;
    if ([self isFileExsit:self.saveVideoUploadedInfo]) {
        dict_UploadedInfo = [NSDictionary dictionaryWithContentsOfFile:self.saveVideoUploadedInfo];
    }
}

- (void)addBackView
{
    CGFloat w = 100/_ratio;
    CGFloat h = 64/_ratio;
    UIView* view = [[UIView alloc] initWithFrame:CGRectMake(0, 0, w, h)];
    view.backgroundColor = [UIColor clearColor];
    [self.view addSubview:view];
    //
    UIImageView* imgv = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, 44/_ratio, 44/_ratio)];
    imgv.image = [UIImage imageNamed:@"public_icon_leftarrow.png"];
    imgv.center = CGPointMake(w/2, h/2);
    [view addSubview:imgv];
    //
    UITapGestureRecognizer* tap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(tapBack)];
    tap.numberOfTapsRequired = 1;
    tap.numberOfTouchesRequired = 1;
    [view addGestureRecognizer:tap];
}

- (void)addTopBackGround
{
    UIImage* image = [UIImage imageNamed:@"talk_bg_top_black.png"];
    image = [image stretchableImageWithLeftCapWidth:image.size.width*0.5 topCapHeight:image.size.height];
    UIImageView* bgtview = [[UIImageView alloc] init];
    bgtview.image = image;
    [self.view addSubview:bgtview];
    bgtview.translatesAutoresizingMaskIntoConstraints = NO;
    [bgtview lyt_alignTopToParent];
    [bgtview lyt_alignLeftToParent];
    [bgtview lyt_alignRightToParent];
    [bgtview lyt_setHeight:image.size.height];
}

- (TalkBlackBg *)blackBg
{
    if (!_blackBg) {
        _blackBg = [TalkBlackBg talkBlackBgAlinToParent:self.view];
    }
    return _blackBg;
}

- (UIView *)coverView
{
    if (!_coverView) {
        _coverView = [TalkBookCoverView talkCover:_coverImage ratio:_ratio];
        [self.view addSubview:_coverView];
    }
    return _coverView;
}

- (TalkAllQuestionsView *)questionsAllView
{
    if (!_questionsAllView) {
        NSArray* arr = [NSArray arrayWithObjects:_q1,_q1_sub,_q2,_q2_sub,_q3,_q3_sub, nil];
        _questionsAllView = [TalkAllQuestionsView talkAllQuestions:arr ratio:_ratio];
        [self.view addSubview:_questionsAllView];
    }
    return _questionsAllView;
}

- (TalkRecordingView *)recordingView
{
    if (!_recordingView) {
        NSArray* arr = [NSArray arrayWithObjects:_q1,_q1_sub,_q2,_q2_sub,_q3,_q3_sub, nil];
        _recordingView = [TalkRecordingView TalkRecordingQuestion:arr ratio:_ratio recordingTime:_recordTimeInfo];
        [self.view addSubview:_recordingView];
    }
    return _recordingView;
}

- (TalkVedioEditView *)editVedioView
{
    if (!_editVedioView) {
        _editVedioView = [TalkVedioEditView talkVedioEditForThemes:_themeArr ratio:_ratio];
        _editVedioView.delegate = self;
        [self.view addSubview:_editVedioView];
    }
    return _editVedioView;
}

- (BOOL)isFileExsit:(NSString*)path
{
    BOOL isDirctor = NO;
    BOOL ret = [[NSFileManager defaultManager] fileExistsAtPath:path isDirectory:&isDirctor];
    return (ret && !isDirctor);
}

- (void)didReceiveMemoryWarning {
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

- (NSString *)talkRecordingView:(TalkRecordingView *)recordingView mp4ForQuestionIndex:(NSUInteger)index
{
    switch (index) {
        case 0:
            return _q1_mp4;
            break;
        case 1:
            return _q2_mp4;
            break;
        case 2:
            return _q3_mp4;
            break;
            
        default:
            break;
    }
    return nil;
}

- (NSString *)talkRecordingView:(TalkRecordingView *)recordingView mp3ForQuestionIndex:(NSUInteger)index
{
    switch (index) {
        case 0:
            return _q1_mp3;
            break;
        case 1:
            return _q2_mp3;
            break;
        case 2:
            return _q3_mp3;
            break;
            
        default:
            break;
    }
    return nil;
}

//以下部分，可能是Benson 团队需要用到的api
#pragma mark - Function For Share API

- (void)dealloc
{
    self.recordingView.delegate = nil;
    self.recordingView = nil;
    singleTalkViewController = nil;
}

- (void)talkRecordingView_Countdown_Start:(TalkRecordingView *)recordingView
{

    TLDebugS(@"%f, %@", [NSDate timeIntervalSinceReferenceDate], NSStringFromSelector(_cmd));

    [self startRecord];
    
    RecordingTimeInfo r;
    self.recordTimeInfo = r;
    //
    [[NSFileManager defaultManager] removeItemAtPath:self.saveVideoFullPath error:nil];
    [[NSFileManager defaultManager] removeItemAtPath:self.saveVideoUploadedInfo error:nil];
    dict_UploadedInfo = nil;
}

- (void)talkRecordingView_Countdown_Stop:(TalkRecordingView *)recordingView
{

    TLDebugS(@"%f, %@", [NSDate timeIntervalSinceReferenceDate], NSStringFromSelector(_cmd));

    RecordingTimeInfo r = self.recordTimeInfo;
    r.time_to = [NSDate timeIntervalSinceReferenceDate] - recordingStarted;
    self.recordTimeInfo = r;
    
    [self stopRecord];
    
    NSDictionary* dict = @{
                           @"time_from" : [NSNumber numberWithFloat:self.recordTimeInfo.time_from],
                           @"pTime_1" : [NSNumber numberWithFloat:self.recordTimeInfo.pTime_1],
                           @"pTime_2" : [NSNumber numberWithFloat:self.recordTimeInfo.pTime_2],
                           @"time_to" : [NSNumber numberWithFloat:self.recordTimeInfo.time_to],
                           };
    
    if([[NSFileManager defaultManager] fileExistsAtPath:[self savedRecordTimePath]])
        [[NSFileManager defaultManager] removeItemAtPath:[self savedRecordTimePath] error:nil];
    
    [dict writeToFile:[self savedRecordTimePath] atomically:YES];
}

- (void)talkRecordingView:(TalkRecordingView *)recordingView questionBegin_Index:(NSUInteger)index
{
    TLDebugS(@"%f, %@", [NSDate timeIntervalSinceReferenceDate], NSStringFromSelector(_cmd));
    
    RecordingTimeInfo r = self.recordTimeInfo;
    
    if(index == 0)
    {
        r.time_from = [NSDate timeIntervalSinceReferenceDate] - recordingStarted;
    }
    
    else if(index == 1)
    {
        r.pTime_1 = [NSDate timeIntervalSinceReferenceDate] - recordingStarted;
    }
    
    else if(index == 2)
    {
        r.pTime_2 = [NSDate timeIntervalSinceReferenceDate] - recordingStarted;
    }
    
    self.recordTimeInfo = r;
}

- (void)talkRecordingView:(TalkRecordingView *)recordingView questionEnd_Index:(NSUInteger)index
{

    TLDebugS(@"%f, %@", [NSDate timeIntervalSinceReferenceDate], NSStringFromSelector(_cmd));

}

- (void)talkRecordingView:(TalkRecordingView *)recordingView answerBegin_Index:(NSUInteger)index
{

    TLDebugS(@"%f, %@", [NSDate timeIntervalSinceReferenceDate], NSStringFromSelector(_cmd));

}

- (void)talkRecordingView:(TalkRecordingView *)recordingView answerEnd_Index:(NSUInteger)index
{

    TLDebugS(@"%f, %@", [NSDate timeIntervalSinceReferenceDate], NSStringFromSelector(_cmd));

}

-(void)loadPlayerView
{
}

//Load camera view here, and init config here
- (void)loadCameraView
{
    //Camera view
    self.view.backgroundColor = [UIColor colorWithPatternImage:[UIImage imageNamed:@"camera.jpg"]];//telen test ui placeholder
    
    self.webcamView = [[UIImageView alloc] initWithFrame:self.view.bounds];
    self.webcamView.contentMode = UIViewContentModeScaleAspectFill;
    [self.view insertSubview:self.webcamView atIndex:0];
    
    isCameraActive = NO;
    isRecording = NO;
    [self startCapture];
}

- (void)tapBack //leaving this VC
{
    if(previewView != nil)
    {
        [self.editVedioView unshow];
        [previewView close:nil];
        previewView = nil;
        
        [self.recordingView unshow];
        
        [self.blackBg performSelector:@selector(show) withObject:nil afterDelay:0.6f];
        [self.coverView performSelector:@selector(show) withObject:nil afterDelay:0.6f];
        if([self isFileExsit:_saveVideoFullPath_temp])self.questionsAllView.needPlayBtn = YES;
        [self.questionsAllView performSelector:@selector(show) withObject:nil afterDelay:0.6f];
    }
    
    else if(self.recordingView.isShown)
    {
        [self didClickedStopRecordingBtn];
    }
    
    else
    {
        //[captureSession stopRunning];
        [self endCapture];
        
        if (self.navigationController) {
            [self.navigationController popViewControllerAnimated:YES];
        }else{
            [self dismissViewControllerAnimated:YES completion:nil];
        }
    }
}

- (void)didClickedRecordBtn
{
    [self.coverView unshow];
    [self.blackBg unshow];
    [self.questionsAllView unshow];
    [self.recordingView performSelector:@selector(show) withObject:nil afterDelay:0.6f];
    //
    
}

- (void)didClickedPlayBtn
{
    [self.coverView unshow];
    [self.blackBg unshow];
    [self.questionsAllView unshow];
    
    [self.editVedioView unshow];
    [self loadRecordingPreview];
    [previewView play];
    
    //
    if (_delegate && [_delegate respondsToSelector:@selector(talkViewDidClicked_Play)]) {
        [_delegate talkViewDidClicked_Play];
    }
}


- (void)didClickedRerecordBtn
{

    TLDebugS(@"%@",NSStringFromSelector(_cmd));
    TLDebugS(@"Record clicked!");

}

- (void)didClickedFinishRecordingBtn
{

    TLDebugS(@"Finish record clicked!");

    [self.recordingView unshow];

    [self.editVedioView unshow];
    [self loadRecordingPreview];
    [previewView play];
}

- (void)didClickedStopRecordingBtn
{

    TLDebugS(@"Stop record clicked!");

    [self.recordingView stopRecord];
    [self.recordingView unshow];
    [self.blackBg performSelector:@selector(show) withObject:nil afterDelay:0.6f];
    [self.coverView performSelector:@selector(show) withObject:nil afterDelay:0.6f];
    if([self isFileExsit:_saveVideoFullPath_temp])self.questionsAllView.needPlayBtn = YES;
    [self.questionsAllView performSelector:@selector(show) withObject:nil afterDelay:0.6f];
    //
}

- (void)didClickedSaveBtn
{

    TLDebugS(@"%@",NSStringFromSelector(_cmd));

}

- (void)didClickedShareBtn:(UIView*)sender
{

    TLDebugS(@"%@",NSStringFromSelector(_cmd));

    [self TalkVideoEdit_Share:sender];
}

- (void)didClickedDeleteBtn
{

    TLDebugS(@"%@",NSStringFromSelector(_cmd));

    UIAlertView* alert = [[UIAlertView alloc] initWithTitle:nil message:TalkLocalizedString(@"deleteView", "") delegate:self cancelButtonTitle:nil otherButtonTitles:TalkLocalizedString(@"No",""),TalkLocalizedString(@"Yes",""), nil];
    alert.tag = 127;
    [alert show];
}

#pragma mark - Function For Benson's Team


-(void) startCapture
{
    if(isCameraActive)
    {
        TLDebugS(@"Camera already opened!");
        return;
    }
    

    TLDebugS(@"Start capture!");

    
    AVCaptureDevice* camera = nil;
    NSArray* devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    for(AVCaptureDevice* dev in devices)
    {
        if([dev position] == AVCaptureDevicePositionFront)
        {
            camera = dev;
            break;
        }
    }
    
    if(camera == nil) return;
    
    if(videoCaptureSession != nil)
        TLDebugS(@"CAUTION: Video capture session not null!!!");
    
    videoCaptureSession = [[AVCaptureSession alloc] init];
    videoCaptureSession.sessionPreset = AVCaptureSessionPresetHigh;
    
    
    NSError* err = nil;
    
    AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:camera error:&err];
    
    if(input == nil)
    {
        [[[UIAlertView alloc] initWithTitle:@"Error" message:err.localizedDescription delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil] show];
        
        return;
    }
    
    [videoCaptureSession addInput:input];
    
    audioOutput = [[AVCaptureAudioDataOutput alloc] init];
    videoOutput = [[AVCaptureVideoDataOutput alloc] init];
    
    dispatch_queue_t queue = dispatch_queue_create("videoQueue", NULL);
    
    [videoOutput setSampleBufferDelegate:self queue:queue];
    
    dispatch_queue_t queue2 = dispatch_queue_create("audioQueue", NULL);
    [audioOutput setSampleBufferDelegate:self queue:queue2];
    
    
    videoOutput.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
    //videoOutput.minFrameDuration = CMTimeMake(1, 60);
    
    AVCaptureDevice* microphone = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
    
    err = nil;
    
    AVCaptureDeviceInput* input2 = [AVCaptureDeviceInput deviceInputWithDevice:microphone error:&err];
    
    [videoCaptureSession addInput:input2];
    
    [videoCaptureSession addOutput:audioOutput];
    
    [videoCaptureSession addOutput:videoOutput];
    
    [[videoOutput connectionWithMediaType:AVMediaTypeVideo] setVideoMirrored:YES];
    [[videoOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:AVCaptureVideoOrientationPortrait];
    
    [videoCaptureSession startRunning];
    
    isCameraActive = YES;
}

-(void) endCapture
{
    if(!isCameraActive) return;

    TLDebugS(@"End capture!");

    isCameraActive = NO;
    
    [videoCaptureSession stopRunning];
    
    audioOutput = nil;
    videoOutput = nil;
    videoCaptureSession = nil;
}


CMTime timeCallback(CMBufferRef buff, void* refcon)
{
    return CMTimeMake(1, 1);
}

-(void) startEncodingThread
{
    if(videoBuffer != NULL)
    {
        CMBufferQueueReset(videoBuffer);
        CFRelease(videoBuffer);
        videoBuffer = NULL;
    }
    if(audioBuffer != NULL)
    {
        CMBufferQueueReset(audioBuffer);
        CFRelease(audioBuffer);
        audioBuffer = NULL;
    }
    
    //*
    CMBufferCallbacks* callbacks;
    callbacks = (CMBufferCallbacks*)malloc(sizeof(CMBufferCallbacks));
    callbacks->version = 0;
    callbacks->getDuration = timeCallback;
    callbacks->refcon = NULL;
    callbacks->getDecodeTimeStamp = NULL;
    callbacks->getPresentationTimeStamp = NULL;
    callbacks->isDataReady = NULL;
    callbacks->compare = NULL;
    callbacks->dataBecameReadyNotification = NULL;
    CMBufferQueueRef lolQueue;
    int res = CMBufferQueueCreate(kCFAllocatorDefault, 0, callbacks, &lolQueue);
    videoBuffer = lolQueue;

    TLDebugS(@"Buffer create = %d", res);

    callbacks = (CMBufferCallbacks*)malloc(sizeof(CMBufferCallbacks));
    callbacks->version = 0;
    callbacks->getDuration = timeCallback;
    callbacks->refcon = NULL;
    callbacks->getDecodeTimeStamp = NULL;
    callbacks->getPresentationTimeStamp = NULL;
    callbacks->isDataReady = NULL;
    callbacks->compare = NULL;
    callbacks->dataBecameReadyNotification = NULL;
    
    CMBufferQueueRef lolQueue2;
    res = CMBufferQueueCreate(kCFAllocatorDefault, 0, callbacks, &lolQueue2);
    audioBuffer = lolQueue2;

    TLDebugS(@"Buffer create = %d", res);

    if (!isRecording)
    {
        dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0), ^{
            videoRecorder = [[VideoRecorder alloc] initWithWidth:webcamWidth Height:webcamHeight];
            
            videoRecorder.recordPath = self.saveVideoFullPath_temp;
            videoRecorder.delegate = self;
            [videoRecorder startRecording];
            
            isRecording = YES;
            recordingStarted = [NSDate timeIntervalSinceReferenceDate];
            
            while (isRecording)
            {
                @autoreleasepool
                {
                    long outstandingBuff = CMBufferQueueGetBufferCount(videoBuffer);
                    
                    for(long i = 0; i < outstandingBuff; i++)
                    {
                        CMSampleBufferRef sampleBuffer = (CMSampleBufferRef)CMBufferQueueGetHead(videoBuffer);
                        
                       // BOOL ret = YES;
                        BOOL ret = [videoRecorder encodeVideoFrame:sampleBuffer];
                        
                        if(ret)
                        {
                            CMSampleBufferRef sampleBufferz = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(videoBuffer);
                            CFRelease(sampleBufferz);
                        }
                        
                        else break;
                    }
                    
                    long outstandingBuff2 = CMBufferQueueGetBufferCount(audioBuffer);
                    
                    for(long i = 0; i < outstandingBuff2; i++)
                    {
                        CMSampleBufferRef sampleBuffer = (CMSampleBufferRef)CMBufferQueueGetHead(audioBuffer);
                        
                        //BOOL ret = YES;
                        BOOL ret = [videoRecorder encodeAudioFrame:sampleBuffer];
                        
                        if(ret)
                        {
                            CMSampleBufferRef sampleBufferz = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(audioBuffer);
                            CFRelease(sampleBufferz);
                        }
                        
                        else break;
                    }
                    
                  //  if(outstandingBuff > 0 || outstandingBuff2 > 0)
                  //  TLDebugS(@"Outstanding buffers: %ld , %ld", outstandingBuff, outstandingBuff2);
                    
                }
                
                usleep(500);
                //SLEEP TAKES UNSIGNED INT OMGWTF
//                sleep(0.5f);
            }

            TLDebugS(@"Close streams now!");

            [videoRecorder stopRecording];
            
            if(videoBuffer != NULL)
            {
                CMBufferQueueReset(videoBuffer);
                CFRelease(videoBuffer);
                videoBuffer = NULL;
            }
            if(audioBuffer != NULL)
            {
                CMBufferQueueReset(audioBuffer);
                CFRelease(audioBuffer);
                audioBuffer = NULL;
            }

            TLDebugS(@"Done recording!");

        });
    }
}


-(void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    if(!CMSampleBufferDataIsReady(sampleBuffer))
        return;
    
    @autoreleasepool {
        if(captureOutput == videoOutput)
        {
            UIImage* uiImage = nil;
            
                imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
                uiImage = [Utility UIimageFromSampleBuffer:imageBuffer];
                
                webcamWidth = uiImage.size.width;
                webcamHeight = uiImage.size.height;
            
            if(isCameraActive)
            {
                dispatch_sync(dispatch_get_main_queue(), ^{
                    if(uiImage != nil)
                        self.webcamView.image = uiImage;
                });
                
                
                if(isRecording)
                {
                    videoRecorder.currentScreen = uiImage;
                    
                    CMBufferQueueEnqueue(videoBuffer, sampleBuffer);
                    
                    NSTimeInterval relativeFrameStamp = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
                    if(startStamp == -1)
                        startStamp = relativeFrameStamp;
                    
                    //==========================================
                    
                    videoRecorder.currentScreen = nil;
                }
            }
        }
        
        else if(captureOutput == audioOutput)
        {
            if(isCameraActive && isRecording)
            {
            //    TLDebugS(@"Has audio at %f!!!", CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)));
                CMBufferQueueEnqueue(audioBuffer, sampleBuffer);
            }
        }
    }
}

-(void)startRecord
{
    if(!isCameraActive)
    {
        [[[UIAlertView alloc] initWithTitle:@"Error" message:@"Camera is not active. Please check the camera/microphone permission on Settings -> Privacy -> Camera/Microphone" delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil] show];
        return;
    }

    TLDebugS(@"Gonna start recording...");

    startStamp = -1;
    
    if(![[NSFileManager defaultManager] fileExistsAtPath:self.saveVideoFullPath])
    {
        [[NSFileManager defaultManager] removeItemAtPath:self.saveVideoFullPath error:nil];
    }
    [self startEncodingThread];
}

-(void) recordingFinished:(NSString*)outputURL
{
}

-(void)stopRecord
{

    TLDebugS(@"Stop pressed!");

    isRecording = NO;
}

-(void)TalkVideoEdit_ThemeSelected:(NSDictionary *)theme
{
    if(theme == self.currentTheme) return;
    
   self.currentTheme = theme;
    [self loadRecordingPreview];
    [self previewTapped:nil];
}

-(void)TalkVideoEdit_Share:(UIView*)btnView
{
    needToShare = NO;
    if (dict_UploadedInfo && [self isFileExsit:self.saveVideoUploadedInfo]) {
        if (_delegate) {
            if ([_delegate respondsToSelector:@selector(talkViewShareDefineMySelf:withInfo:)]) {
                [_delegate talkViewShareDefineMySelf:btnView withInfo:dict_UploadedInfo];
            }else{
                TalkShareSelectView* shareView = [TalkShareSelectView talkShareView_ratio:self.ratio inSuperView:self.view];
                shareView.delegate = _delegate;
                shareView.shareDict = dict_UploadedInfo;
            }
        }
    }else{
        [self TalkVideoEdit_Save];
        needToShare = YES;
    }
}

//==================================================

-(void)TalkVideoEdit_Save
{
    if(![[NSFileManager defaultManager] fileExistsAtPath:self.saveVideoFullPath])
    {
        UIAlertView* alert = [[UIAlertView alloc] initWithTitle:TalkLocalizedString(@"Save title", nil) message:TalkLocalizedString(@"Save message", nil) delegate:self cancelButtonTitle:TalkLocalizedString(@"Cancel", nil) otherButtonTitles:TalkLocalizedString(@"OK", nil), nil];
        
        [alert show];
    }else{
#if Talk_Upload_Share_Close != 0
        ALAssetsLibrary* library = [ALAssetsLibrary new];
        [library writeVideoAtPathToSavedPhotosAlbum:[NSURL fileURLWithPath:self.saveVideoFullPath] completionBlock:nil];
#endif
        [self TalkVideoEdit_Upload];
    }
}

-(void)alertView:(UIAlertView *)alertView clickedButtonAtIndex:(NSInteger)buttonIndex
{
    if (alertView.tag == 127) {
        if(buttonIndex == 1 && [[NSFileManager defaultManager] fileExistsAtPath:self.saveVideoFullPath_temp])
        {
            [[NSFileManager defaultManager] removeItemAtPath:self.saveVideoFullPath_temp error:nil];
            [[NSFileManager defaultManager] removeItemAtPath:self.saveVideoFullPath error:nil];
            [[NSFileManager defaultManager] removeItemAtPath:self.saveVideoUploadedInfo error:nil];
            dict_UploadedInfo = nil;
            if([self isFileExsit:_saveVideoFullPath_temp])self.questionsAllView.needPlayBtn = YES;
            else self.questionsAllView.needPlayBtn = NO;
            self.editVedioView.showShareBtn = NO;
            [self.questionsAllView show];
        }
    }
    //
    else if(buttonIndex != alertView.cancelButtonIndex)
    {
        if(![[NSFileManager defaultManager] fileExistsAtPath:self.saveVideoFullPath])
        {
            PublishView* publishView = [[PublishView alloc] initWithFrame:self.view.bounds segments:[self loadSegments] recordPath:self.saveVideoFullPath_temp];
            publishView.superViewController = self;
            publishView.outputPath = self.saveVideoFullPath;
            [self.view addSubview:publishView];
            
            [publishView publish];
        }else{
            [self TalkVideoEdit_Upload];
        }
    }
}

//==================================================

-(void)TalkVideoEdit_Upload
{
#if Talk_Upload_Share_Close == 0
    if([[NSFileManager defaultManager] fileExistsAtPath:self.saveVideoFullPath])
    {
        UploadView* uploadView = [[UploadView alloc] initWithFrame:self.view.bounds path:self.saveVideoFullPath username:self.userNick uId:self.uid];
        uploadView.superViewController = self;
        uploadView.delegate = self;
        [self.view addSubview:uploadView];
    }
    
    else
    {
        UIAlertView* alert = [[UIAlertView alloc] initWithTitle:TalkLocalizedString(@"Alert", nil) message:TalkLocalizedString(@"Video not published", nil) delegate:self cancelButtonTitle:TalkLocalizedString(@"OK", nil) otherButtonTitles:nil];
        
        [alert show];
    }
#else
    UIAlertView* alert = [[UIAlertView alloc] initWithTitle:nil message:TalkLocalizedString(@"saveToPhotoA", nil) delegate:self cancelButtonTitle:TalkLocalizedString(@"OK", nil) otherButtonTitles:nil];
    [alert show];
#endif
}

//MARK: uploaded
- (void)uploadView_uploadedSuccess:(FileUploader *)uploader
{
    NSString* url = uploader.url;
    NSString* files = uploader.clientFileInfo.link;
    if (files && url) {
        dict_UploadedInfo = @{@"url":url,@"files":files};
        [dict_UploadedInfo writeToFile:self.saveVideoUploadedInfo atomically:NO];
    }
    
    if ([self isFileExsit:self.saveVideoUploadedInfo])
        self.editVedioView.showShareBtn = YES;
    else
        self.editVedioView.showShareBtn = NO;
    if(self.editVedioView.isShown)[self.editVedioView show];
    //
    if (_delegate && [_delegate respondsToSelector:@selector(talkViewUploadedSuccess_withInfo:)]) {
        [_delegate talkViewUploadedSuccess_withInfo:dict_UploadedInfo];
    }
    if (needToShare) {
        [self TalkVideoEdit_Share:self.questionsAllView.shareBtn];
    }
}

//==================================================

-(void)playbackCompleted:(PreviewView *)preview
{
    [self.editVedioView show];
}

-(void)loadRecordingPreview
{
    [Utility setAudioCategory:AVAudioSessionCategoryPlayback];
    
    if(previewView != nil)
    {
        [previewView close:nil];
        [previewView removeFromSuperview];
        previewView = nil;
    }
    
    previewView = [PreviewView new];
    previewView.delegate = self;
    previewView.backgroundColor = [UIColor blackColor];
    previewView.frame = self.view.bounds;
    
    UIView* layoutView = [[UIView alloc] initWithFrame:previewView.frame];
    [previewView addSubview:layoutView];
    previewView.layoutView = layoutView;
    
    UIView* videoView = [[UIView alloc] initWithFrame:previewView.frame];
    [previewView addSubview:videoView];
    previewView.videoView = videoView;
    
    [self.view insertSubview:previewView atIndex:1];
    
    previewView.userInteractionEnabled = YES;
    UISwipeGestureRecognizer* gesReg = [[UISwipeGestureRecognizer alloc] initWithTarget:previewView action:@selector(onSwipedLeft:)];
    gesReg.direction = UISwipeGestureRecognizerDirectionLeft;
    [previewView addGestureRecognizer:gesReg];
    
    gesReg = [[UISwipeGestureRecognizer alloc] initWithTarget:previewView action:@selector(onSwipedRight:)];
    gesReg.direction = UISwipeGestureRecognizerDirectionRight;
    [previewView addGestureRecognizer:gesReg];
    
    UITapGestureRecognizer* tapReg = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(previewTapped:)];
    [previewView addGestureRecognizer:tapReg];
    
    [previewView setSegments:[self loadSegments]];
}

-(void)loadBook:(NSString*)bookPath
{
    self.bookSegmentFile = [bookPath stringByAppendingPathComponent:@"book.plist"];
    
    NSDictionary* dict = [NSDictionary dictionaryWithContentsOfFile:self.bookSegmentFile];
    
    self.coverImage = [UIImage imageWithContentsOfFile:[bookPath stringByAppendingPathComponent:dict[@"cover"]]];

    NSArray* questions = dict[@"questions"];
    
    NSDictionary* questionDict = questions[0];
    self.q1 = questionDict[@"chinese"];
    self.q1_sub = questionDict[@"english"];
    self.q1_mp4 = [bookPath stringByAppendingPathComponent:questionDict[@"video"]];
    
    questionDict = questions[1];
    self.q2 = questionDict[@"chinese"];
    self.q2_sub = questionDict[@"english"];
    self.q2_mp4 = [bookPath stringByAppendingPathComponent:questionDict[@"video"]];
    
    questionDict = questions[2];
    self.q3 = questionDict[@"chinese"];
    self.q3_sub = questionDict[@"english"];
    self.q3_mp4 = [bookPath stringByAppendingPathComponent:questionDict[@"video"]];
    
    self.run_out_XML_path = [bookPath stringByAppendingPathComponent:dict[@"credits"]];
    
    [self loadRecordTimeInfo];
}

-(RecordingTimeInfo)loadRecordTimeInfo
{
    RecordingTimeInfo r;
    
    //Load recording time.
    if([[NSFileManager defaultManager] fileExistsAtPath:[self savedRecordTimePath]])
    {
        NSDictionary* dict = [NSDictionary dictionaryWithContentsOfFile:[self savedRecordTimePath]];
        
        r.time_from = [dict[@"time_from"] floatValue];
        r.pTime_1 = [dict[@"pTime_1"] floatValue];
        r.pTime_2 = [dict[@"pTime_2"] floatValue];
        r.time_to = [dict[@"time_to"] floatValue];
        
        if(r.pTime_2 == 0) r.pTime_2 = r.time_to;
        if(r.pTime_1 == 0) r.pTime_1 = r.time_to;
    }
    
    return r;
}

-(NSMutableArray*)loadSegments
{
    NSDictionary* segmentsDict = [NSJSONSerialization JSONObjectWithData:[NSData dataWithContentsOfFile:self.bookSegmentFile] options:NSJSONReadingMutableContainers error:nil];
//    NSDictionary* segmentsDict = [NSDictionary dictionaryWithContentsOfFile:self.bookSegmentFile];
    
    NSArray* segmentDataArray = segmentsDict[@"segments"];
    
    NSMutableArray* segments = [NSMutableArray array];
    
    for(NSDictionary* dict in segmentDataArray)
    {
        NSString* type = dict[@"type"];
        NSString* name = dict[@"name"];
        
      //  if([type isEqualToString:@"interview"])continue;
        
        if([type isEqualToString:@"trailer"])
        {
            TrailerSegment* t = [TrailerSegment new];
            t.root = [self.bookSegmentFile stringByDeletingLastPathComponent];
            t.fileRoot = self.talkFileRoot;
            t.src = dict[@"source"];
            t.name = name;
            
            [t load];
           
            [segments addObject:t];
        }
        
        else if([type isEqualToString:@"text"])
        {
            TextSegment* t = [TextSegment new];
            t.name = name;
            t.root = [self.bookSegmentFile stringByDeletingLastPathComponent];
            t.fileRoot = self.talkFileRoot;
            t.src = dict[@"source"];
            [segments addObject:t];
        }
        
        else if([type isEqualToString:@"interview"])
        {
            InterviewSegment* seg = [InterviewSegment new];
            seg.name = name;
            seg.q1 = self.q1;
            seg.q1_sub = self.q1_sub;
            seg.q2 = self.q2;
            seg.q2_sub = self.q2_sub;
            seg.q3 = self.q3;
            seg.q3_sub = self.q3_sub;
            seg.q1_mp4 = self.q1_mp4;
            seg.q2_mp4 = self.q2_mp4;
            seg.q3_mp4 = self.q3_mp4;
            seg.q1_mp3 = self.q1_mp3;
            seg.q2_mp3 = self.q2_mp3;
            seg.q3_mp3 = self.q3_mp3;
            seg.vid_mp4 = self.saveVideoFullPath_temp;
            
            seg.root = [self.bookSegmentFile stringByDeletingLastPathComponent];
            seg.fileRoot = self.talkFileRoot;
            

            TLDebugS(@"RInfo = %f, %f, %f, %f", self.recordTimeInfo.time_from, self.recordTimeInfo.pTime_1, self.recordTimeInfo.pTime_2, self.recordTimeInfo.time_to);

            
            
            RecordingTimeInfo rinfo = [self loadRecordTimeInfo];
            //rinfo.time_from = self.recordTimeInfo.time_from;
            rinfo.time_to = rinfo.time_to - rinfo.time_from;
            rinfo.pTime_1 = rinfo.pTime_1 - rinfo.time_from;
            rinfo.pTime_2 = rinfo.pTime_2 - rinfo.time_from;
            
            seg.recordTimeInfo = rinfo;
            
            seg.src_template = dict[@"source"];
            
            
            [segments addObject:seg];
        }
    }
    
    return segments;
}

-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections
{
}

-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
}

-(void)previewTapped:(UITapGestureRecognizer*)sender
{
    if(!self.editVedioView.isShown)
    {
        if ([self isFileExsit:self.saveVideoUploadedInfo])
            self.editVedioView.showShareBtn = YES;
        else
            self.editVedioView.showShareBtn = NO;
        [self.editVedioView show];
        [previewView pause];
    }
    
    else
    {
        [self.editVedioView unshow];
        [previewView play];
    }
}

-(NSString*)savedRecordTimePath
{
    return [[self.saveVideoFullPath stringByDeletingPathExtension] stringByAppendingPathExtension:@"plist"];
}
@end
