//
//  ClipViewModel.m
//  TheVideoClip
//
//  Created by huyan on 2018/4/3.
//  Copyright © 2018年 北斗君合MacMini. All rights reserved.
//

#import "ClipViewModel.h"

@implementation ClipViewModel

+ (UIImage *)getVideoPreViewImageWithTime:(NSTimeInterval)timeValue asset:(AVURLAsset *)asset {
    AVAssetImageGenerator *gen = [[AVAssetImageGenerator alloc] initWithAsset:asset];
    gen.appliesPreferredTrackTransform = YES;
    gen.requestedTimeToleranceAfter = kCMTimeZero;
    gen.requestedTimeToleranceBefore = kCMTimeZero;
    CMTime time = CMTimeMakeWithSeconds(timeValue, 10);
    NSError *error = nil;
    CMTime actualTime;
    CGImageRef image = [gen copyCGImageAtTime:time actualTime:&actualTime error:&error];
    UIImage *img = [[UIImage alloc] initWithCGImage:image];
    CGImageRelease(image);
    return img;
}

+ (NSString *)timeFormatted:(NSInteger)totalSeconds {
    NSInteger seconds = totalSeconds % 60;
    NSInteger minutes = (totalSeconds / 60) % 60;
    NSInteger hours = totalSeconds / 3600;
    if (hours == 0) {
        return [NSString stringWithFormat:@"%02ld:%02ld", (long)minutes, (long)seconds];
    }
    return [NSString stringWithFormat:@"%02ld:%02ld:%02ld", (long)hours, (long)minutes, (long)seconds];
}

+ (CGFloat)realSecondsWithFormatted:(NSString *)timeStr {
    CGFloat realSeconds = 0;
    NSArray *array = [timeStr componentsSeparatedByString:@":"];
    if (array.count == 2) {
        realSeconds = [array[0] intValue] * 60 + [array[1] intValue];
    } else if (array.count == 3) {
        realSeconds = [array[0] intValue] * 3600 + [array[1] intValue] * 60 + [array[2] intValue];
    }
    return realSeconds;
}

+ (void)cutVideoAndOutPut:(AVURLAsset *)asset beginTime:(CGFloat)beginTime endTime:(CGFloat)endTime block:(void(^)(NSString *videoFilePath, BOOL success))block {
    NSDateFormatter *formatter =[[NSDateFormatter alloc] init];
    [formatter setDateFormat:@"yyyyMMddHHmmss"];
    NSString *currentTime = [formatter stringFromDate:[NSDate date]];
    NSString *filename = [NSString stringWithFormat:@"%@.mp4", currentTime];
    NSString *tmpDir = NSTemporaryDirectory();
    //文件管理器
    NSFileManager *fileManager = [NSFileManager defaultManager];
    //把刚刚图片转换的data对象拷贝至沙盒中
    [fileManager createDirectoryAtPath:tmpDir withIntermediateDirectories:YES attributes:nil error:nil];
    NSString *filePath = [tmpDir stringByAppendingString:filename];
    
    AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
    videoComposition.frameDuration = CMTimeMake(1, 30);
    CGFloat renderSizeWidth = clipVideoTrack.naturalSize.height;
    videoComposition.renderSize =CGSizeMake(renderSizeWidth, renderSizeWidth * 9/16.0);
    CALayer *parentLayer = [CALayer layer];
    CALayer *videoLayer = [CALayer layer];
    videoLayer.frame = CGRectMake(0, 0, 320, 320 * 9 / 16.0);
    parentLayer.frame = CGRectMake(0, 0, 320, 320 * 9 / 16.0);
    [parentLayer addSublayer:videoLayer];

    videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
    AVMutableVideoCompositionLayerInstruction *avMutableVideoCompositionLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
    CGAffineTransform t1 = clipVideoTrack.preferredTransform;
    t1 = CGAffineTransformTranslate(t1, 0,0);
    [avMutableVideoCompositionLayerInstruction setTransform:t1 atTime:kCMTimeZero];
    instruction.layerInstructions = [NSArray arrayWithObject:avMutableVideoCompositionLayerInstruction];
    videoComposition.instructions = [NSArray arrayWithObject:instruction];
    
    
    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality];
    //长度
    CMTime start = CMTimeMakeWithSeconds(beginTime, asset.duration.timescale);
    CMTime duration = CMTimeMakeWithSeconds(endTime - beginTime, asset.duration.timescale);
    CMTimeRange range = CMTimeRangeMake(start, duration);
    exportSession.timeRange = range;
    //范围
    exportSession.outputURL = [NSURL fileURLWithPath:filePath];
    exportSession.outputFileType = AVFileTypeMPEG4;
    exportSession.shouldOptimizeForNetworkUse = YES;
    [exportSession exportAsynchronouslyWithCompletionHandler:^(void) {
         switch (exportSession.status) {
             case AVAssetExportSessionStatusUnknown:
                 
                 break;
             case AVAssetExportSessionStatusWaiting:
                 
                 break;
             case AVAssetExportSessionStatusExporting:
                 
                 break;
             case AVAssetExportSessionStatusCompleted:
                 block(filePath, YES);
                 break;
             case AVAssetExportSessionStatusFailed:
                 block(nil, NO);
                 break;
             case AVAssetExportSessionStatusCancelled:
                 
                 break;
             default:
                 
                 break;
         }
     }];
}

@end
