//
//  PublishView.m
//  talkDemo
//
//  Created by Reisen on 5/5/15.
//  Copyright (c) 2015 刘赞黄Telen. All rights reserved.
//

#import "PublishView.h"
#import "Utility.h"
#import  <AssetsLibrary/ALAssetsLibrary.h>
#import "TrailerSegment.h"
#import "TextSegment.h"
#import "InterviewSegment.h"
#import "TalkDefine.h"

@implementation PublishView

//================================================

-(id)initWithFrame:(CGRect)frame segments:(NSMutableArray*)_segments recordPath:(NSString*)recordPath
{
    self = [super initWithFrame:frame];
    self.userInteractionEnabled = YES;
    
    //Just to disable it from tapping through.
    UITapGestureRecognizer* gesReg = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(outputPath)];
    [self addGestureRecognizer:gesReg];
    
    interviewRecordPath = recordPath;
    
    self.backgroundColor = [UIColor colorWithWhite:1 alpha:0.96f];
    
    /*
    UIView* bg = [[UIView alloc] initWithFrame:CGRectMake(84, 452, 600, 120)];
    bg.layer.cornerRadius = 20;
    bg.layer.borderWidth = 3;
    bg.backgroundColor = [UIColor colorWithWhite:1 alpha:0.75f];
    [self addSubview:bg];
    */
    
    UIImageView* animu = [[UIImageView alloc] init];
    animu.frame = CGRectMake(0, 0, 450 * 0.55f, 560 * 0.55f);
    animu.center = CGPointMake(self.center.x, 360);
    [self addSubview:animu];
    NSMutableArray* animuImages = [NSMutableArray array];
    
    for(int i = 1; i <= 12; i++)
    {
        [animuImages addObject:[UIImage imageNamed:[NSString stringWithFormat:@"%04d.png", i]]];
    }
    
    animu.animationImages = animuImages;
    animu.animationRepeatCount = 0;
    animu.animationDuration = 2;
    [animu startAnimating];
    
    self.progressView = [[UIProgressView alloc] initWithFrame:CGRectMake(50, 98, 300, 2)];
    
    self.progressView.center = CGPointMake(self.center.x, 620);
    [self addSubview:self.progressView];
    self.progressView.transform = CGAffineTransformMakeScale(1, 3);
    self.progressView.tintColor = [UIColor colorWithRed:138/255.0f green:142/255.0f blue:201/255.0f alpha:1];
    
    self.statusLabel = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, 768, 50)];
    self.statusLabel.font = [UIFont systemFontOfSize:28];
    self.statusLabel.textAlignment = NSTextAlignmentCenter;
    self.statusLabel.text = @"Saving. Please wait...";
    self.statusLabel.center = CGPointMake(self.center.x, 560);
    [self addSubview:self.statusLabel];
    
    self.stepLabel = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, 300, 20)];
    self.stepLabel.font = [UIFont italicSystemFontOfSize:20];
    self.stepLabel.textAlignment = NSTextAlignmentRight;
    self.stepLabel.text = @"1/3";
    self.stepLabel.center = CGPointMake(self.center.x, 600);
    self.stepLabel.textColor = self.progressView.tintColor;
    [self addSubview:self.stepLabel];
    
    segments = _segments;
    
    
    
    [self load];
    
    return self;
}

- (void)load {
    encodingDurations = [NSMutableArray array];
    totalEncodingDurations = 0;
    
    curStep = 0;
    totalStep = 0;
    
    NSMutableArray* rows = [NSMutableArray array];
    for(Segment* seg in segments)
    {
        [rows addObject:seg.name];
        
        float dur = [seg getDuration];
        
        if([seg isMemberOfClass:[TrailerSegment class]])
            dur = 0;
        
        else totalStep++;
        
        TLDebugS(@"Add dur:%f", dur);
        totalEncodingDurations += dur;
        [encodingDurations addObject:[NSNumber numberWithFloat:dur]];
    }
    
    totalStep++;
    [rows addObject:@"Combining"];
    
    totalEncodingDurations += 3;
    [encodingDurations addObject:[NSNumber numberWithFloat:3]];
}

-(UIStatusBarStyle)preferredStatusBarStyle
{
    return UIStatusBarStyleDefault;
}

-(void)publish
{
   // return;
    
    totalDuration = 0;
    estimateStabilized = NO;
    
    for(Segment* seg in segments)
    {
        seg.progress = 0;
        seg.encodingCompleted = @selector(segmentEncoded:);
        seg.encodingDelegate = self;
    }
    
    [NSTimer scheduledTimerWithTimeInterval:0.1f target:self selector:@selector(updateProgress:) userInfo:nil repeats:YES];
    
    videoPaths = [NSMutableArray array];
    
    audioTracks = [NSMutableArray array];
    
    //Prepare 3 tracks: Music, webcam's audio, and video's audio
    [audioTracks addObject:[NSMutableArray array]];
    [audioTracks addObject:[NSMutableArray array]];
    [audioTracks addObject:[NSMutableArray array]];
    
    encodeStarted = [NSDate date];
    encodeCompleted = nil;
    assTimer = nil;
    
    curStep = 1;
    [[segments firstObject] encodeVideo];
}

-(void)segmentEncoded:(Segment*)seg
{
    TLDebugS(@"Segment encoded 1");
    seg.encodingDelegate = nil;
    
    [videoPaths addObject:[seg videoInformation]];
    
    NSMutableArray* audioInfo = [seg audioInformationWithStart:totalDuration];
    
    totalDuration += [seg getDuration];
    
    //Sort tracks.
    for(NSDictionary* dict in audioInfo)
        [[audioTracks objectAtIndex:[dict[@"track"] intValue]] addObject:dict];
    
    NSInteger index = [segments indexOfObject:seg];
    
    index += 1;
    
    if(![seg isKindOfClass:[TrailerSegment class]])
        curStep++;

    TLDebugS(@"Segment encoded 2");
    
    if(index < segments.count)
    {
        TLDebugS(@"Will encode %zd", index);
        [[segments objectAtIndex:index] encodeVideo];
    }
    
    else
        [self gattai:videoPaths audio:audioTracks];
}

-(void)updateProgress:(NSTimer*)timer
{
    float progress = 0;
    for(Segment* seg in segments)
    {
        if(seg.progress < 1)
        {
            self.statusLabel.text = TalkLocalizedString(@"Step",nil);
            self.stepLabel.text = [NSString stringWithFormat:@"%d/%d", curStep, totalStep];
            progress = seg.progress;
            
            break;
        }
    }
    
    if(exportSession != nil)
    {
        
        self.statusLabel.text = TalkLocalizedString(@"Step",nil);
        self.stepLabel.text = [NSString stringWithFormat:@"%d/%d", totalStep, totalStep];
        progress = exportSession.progress;
        
        if(progress == 1)
            [timer invalidate];
    }
    
    self.progressView.progress = progress;
}

-(void) gattai:(NSMutableArray*)videoPathz
         audio:(NSMutableArray*)audioPaths
{
    TLDebugS(@"Gattai from %@", videoPathz);
    
    [Utility startTiming];
    
    NSString* recordPath = self.outputPath;
    
    if([[NSFileManager defaultManager] fileExistsAtPath:recordPath])
        [[NSFileManager defaultManager] removeItemAtPath:recordPath error:nil];
    
    AVMutableComposition *mixComposition = [AVMutableComposition composition];
    
    //-----------------------------------------------------------------------------
    
    AVMutableCompositionTrack* videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    NSError * error = nil;
 //   NSMutableArray* timeRanges = [NSMutableArray array];
  //  NSMutableArray* tracks = [NSMutableArray array];
    
    float startz = 0;
    for (NSString* videoPath in videoPathz) {
        AVURLAsset *assetClip = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:videoPath]];

        if([assetClip tracksWithMediaType:AVMediaTypeVideo].count == 0) continue;
        
        TLDebugS(@"Insert video %@ at %f", [videoPath lastPathComponent], startz);
        
        AVAssetTrack *clipVideoTrackB = [[assetClip tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
        
   //     [timeRanges addObject:[NSValue valueWithCMTimeRange:CMTimeRangeMake(kCMTimeZero, assetClip.duration)]];
    //    [tracks addObject:clipVideoTrackB];
        
        
        [videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetClip.duration)
                            ofTrack:clipVideoTrackB
                             atTime:CMTimeMakeWithSeconds(startz, assetClip.duration.timescale)
                              error:&error];
        
        startz += CMTimeGetSeconds(assetClip.duration);
    }
    // [videoTrack insertTimeRanges:timeRanges ofTracks:tracks atTime:kCMTimeZero error:&error];
    
    //-----------------------------------------------------------------------------
    
    for(NSMutableArray* trackData in audioPaths)
    {
        //break;
        AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
        
        int index = 0;
        for (NSDictionary* audioData in trackData) {
            TLDebugS(@"Insert audio:%@", audioData);
            
            
            //            [arr addObject:@{@"path": [self recordingPath], @"start":[NSNumber numberWithFloat:0], @"duration":[NSNumber numberWithFloat:-1], @"track":[NSNumber numberWithInt:0]}];
            
            float dur = [audioData[@"duration"] floatValue];
            float start = [audioData[@"start"] floatValue];
            float localStart = [audioData[@"localStart"] floatValue];
            
            AVURLAsset *assetClip = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:audioData[@"path"]]];
            
            NSArray* tracks = [assetClip tracksWithMediaType:AVMediaTypeAudio];
            if(tracks.count == 0) continue;
            
            AVAssetTrack *clipAudioTrack = [tracks objectAtIndex:0];
            
            if(dur != -1)
                dur = MIN(dur, CMTimeGetSeconds(assetClip.duration));
            
            else
                dur = CMTimeGetSeconds(assetClip.duration);
            
            [audioTrack insertTimeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(localStart, assetClip.duration.timescale), CMTimeMakeWithSeconds(dur, assetClip.duration.timescale))
                                ofTrack:clipAudioTrack
                                 atTime:CMTimeMakeWithSeconds(start, assetClip.duration.timescale)
                                  error:&error];
            
            index++;
        }
    }
    
    //-----------------------------------------------------------------------------
    
    // AVMutableAudioMix* exportAudioMix = [AVMutableAudioMix audioMix];
    /*
     AVMutableAudioMixInputParameters* exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:audioTrack];
     
     double totalDur = CMTimeGetSeconds(audioTrack.timeRange.duration);
     
     DLog(@"Total duration = %f", totalDur);
     
     [exportAudioMixInputParameters setVolume:1.0 atTime:CMTimeMakeWithSeconds((totalDur-2), 1)];
     [exportAudioMixInputParameters setVolume:0.5 atTime:CMTimeMakeWithSeconds((totalDur-1), 1)];
     [exportAudioMixInputParameters setVolume:0.1 atTime:CMTimeMakeWithSeconds((totalDur-0), 1)];
     
     exportAudioMix.inputParameters = [NSArray arrayWithObject:exportAudioMixInputParameters];
     */
    //-----------------------------------------------------------------------------
    //Medium =  480x360 , 7 M, 98 sec.
    //640x480 = 640x480, 22 M, 98 sec.
    //Highest = Original size, 67 M, 98 sec.
    
    NSArray* arr = [AVAssetExportSession exportPresetsCompatibleWithAsset:mixComposition];
    
    TLDebugS(@"Compat Preset = %@", arr);
    TLDebugS(@"Medium = %@", AVAssetExportPresetHighestQuality);
    
    exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset640x480];
    
    exportSession.fileLengthLimit = 10000000;
    exportSession.shouldOptimizeForNetworkUse = YES;
    NSParameterAssert(exportSession != nil);
    if([[recordPath pathExtension] isEqualToString:@"mov"])
        exportSession.outputFileType = AVFileTypeQuickTimeMovie;
    
    else if([[recordPath pathExtension] isEqualToString:@"mp4"])
        exportSession.outputFileType = AVFileTypeMPEG4;
    
    exportSession.outputURL = [NSURL fileURLWithPath:recordPath];
    // exportSession.audioMix = exportAudioMix;
    
    //Kill a blank frame at the end of the video
    exportSession.timeRange = CMTimeRangeMake(CMTimeMakeWithSeconds(0, 1000), CMTimeMakeWithSeconds(startz - 1/25.0f, 1000));
    TLDebugS(@"File size = %lld", exportSession.estimatedOutputFileLength);

    [exportSession exportAsynchronouslyWithCompletionHandler:^{
        [Utility endTimingWithTag:@"Gattai"];
        
        //  self.encodingDone = YES;
        
        switch ([exportSession status]) {
            case AVAssetExportSessionStatusFailed:
                TLDebugS(@"Export failed: %@", [exportSession error]);
                break;
            case AVAssetExportSessionStatusCancelled:
                TLDebugS(@"Export canceled");
                break;
            case AVAssetExportSessionStatusCompleted:
                TLDebugS(@"Export successfully! File size = %lld", [[[NSFileManager defaultManager] attributesOfItemAtPath:recordPath error:nil] fileSize]);
                break;
            default:
                break;
        }
        if (exportSession.status != AVAssetExportSessionStatusCompleted){
            TLDebugS(@"Retry export");
        }
        
        NSUserDefaults* pref = [NSUserDefaults standardUserDefaults];
        [pref setBool:NO forKey:@"isEncoding"];
        [pref synchronize];
        
        //    self.encodingDone = YES;
        
        TLDebugS(@"Saving to library!");
        
        exportSession = nil;
        
        
        ALAssetsLibrary* library = [ALAssetsLibrary new];
        
        [library writeVideoAtPathToSavedPhotosAlbum:[NSURL fileURLWithPath:recordPath] completionBlock:^(NSURL *assetURL, NSError *error) {
            if(error)
            {
            }
            else
            {
                TLDebugS(@"Saving to library success!");
                
                dispatch_async(dispatch_get_main_queue(), ^(){
                    
                    for (Segment* seg in segments)
                    {
                        if([seg isKindOfClass:[TrailerSegment class]]) continue;
                        
                        [[NSFileManager defaultManager] removeItemAtPath:[seg videoInformation] error:nil];
                    }
                    
                    //This block doesn't use YouTube
                    [self removeFromSuperview];
                  //  [[[UIAlertView alloc] initWithTitle:TalkLocalizedString(@"Success title", nil) message:TalkLocalizedString(@"Success message", nil) delegate:nil cancelButtonTitle:TalkLocalizedString(@"OK", nil) otherButtonTitles:nil] show];
                    
                    [self.superViewController performSelector:@selector(TalkVideoEdit_Upload)];
                });
            }
        }
         ];
        
    }];
}

-(void) alertView:(UIAlertView *)alertView clickedButtonAtIndex:(NSInteger)buttonIndex
{
    if(alertView.tag == 101)
    {
        if(buttonIndex == alertView.cancelButtonIndex)
        {
            [self removeFromSuperview];
            [[[UIAlertView alloc] initWithTitle:TalkLocalizedString(@"Publishing is completed", nil) message:TalkLocalizedString(@"The video is now available in the Photos folder.", nil) delegate:nil cancelButtonTitle:TalkLocalizedString(@"OK", nil) otherButtonTitles:nil] show];
        }
        
        else
        {
        }
    }
}


-(IBAction)back:(id)sender
{
    UINavigationController* rootViewController = (UINavigationController*)[[[UIApplication sharedApplication] keyWindow] rootViewController];
    [rootViewController popViewControllerAnimated:YES];
}


@end
