//
//  MPTSpeedUploadVideoEditor.m
//  MPTCapture
//
//  Created by Khazmovsky on 16/4/12.
//  Copyright © 2016年 Jeakin. All rights reserved.
//

#import "MPTSpeedUploadVideoEditor.h"

@interface MPTSpeedUploadVideoEditor ()

@property (nonatomic, strong) AVAsset *composition;
@property (nonatomic, strong) AVVideoComposition *videoComposition;
@property (nonatomic, strong) AVAudioMix *audioMix;
@property (nonatomic, assign) CMTimeRange assetTimeRange;
@property (nonatomic, assign) MPTVideoInfo videoInfo;

@end

@implementation MPTSpeedUploadVideoEditor

+ (BOOL)isCorrectVideo:(NSString*)videoPath
{
    if([[NSFileManager defaultManager] fileExistsAtPath:videoPath])
    {
        AVURLAsset *videoAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:videoPath] options:nil];
        NSArray*videoTracks = [videoAsset tracksWithMediaType:AVMediaTypeVideo];
        if (videoTracks && videoTracks.count>0) {
            return YES;
        }
        else
        {
            unlink([[NSString stringWithFormat:@"%@", videoPath] cStringUsingEncoding:NSUTF8StringEncoding]);
        }
    }
    return NO;
}


- (instancetype)initWithVideoFilePath:(NSString *)filePath withRange:(CMTimeRange)timeRange {
    return [self initWithVideoFileURL:[NSURL fileURLWithPath:filePath] withRange:timeRange];
}

- (instancetype)initWithVideoFileURL:(NSURL *)fileURL withRange:(CMTimeRange)timeRange {
    if (self = [super init]) {
        self.assetTimeRange = timeRange;
        if (![self dealWithFile:fileURL]) {
            return nil;
        }
    }
    return self;
}

- (instancetype)initWithAsset:(AVAsset *)asset withRange:(CMTimeRange)timeRange
{
    NSAssert([asset isKindOfClass:[AVAsset class]], @"参数不对");
    if (self = [super init]) {
        self.assetTimeRange = timeRange;
        if (![self dealWithAsset:asset]) {
            return nil;
        }
    }
    return self;
}

#if DEBUG
void __CGAffineTransformPrint(CGAffineTransform t) {
    for (int i = 0; i < sizeof(t) / sizeof(CGFloat); ++i) {
        if (i && !(i % 2)) printf("\n");
        printf("%d\t\t", (int)(((CGFloat *)(&t))[i]));
    }
}
#endif

BOOL __isTransformEqual(CGAffineTransform transform1, CGAffineTransform transform2)
{
    if (ABS(transform1.a-transform2.a)<0.0001
        && ABS(transform1.b-transform2.b)<0.0001
        && ABS(transform1.c-transform2.c)<0.0001
        && ABS(transform1.d-transform2.d)<0.0001
        && ABS(transform1.tx-transform2.tx)<0.0001
        && ABS(transform1.ty-transform2.ty)<0.0001
        ) {
        return YES;
    }
    return NO;
}


CGAffineTransform __transformationOfTrack(AVAssetTrack *track)
{
    CGSize normalSize = CGSizeZero;
    AVAssetTrack *videoTrack = track;
    CGSize naturalSize          = [videoTrack naturalSize];
    normalSize          = CGSizeApplyAffineTransform(naturalSize, videoTrack.preferredTransform);
    normalSize.width    = fabs(normalSize.width);
    normalSize.height   = fabs(normalSize.height);

    
    CGSize videoSize = normalSize;
    
    
    const CGFloat ratio = 1.0;
    
//    CGFloat ratio = self.videoSize.width/videoSize.width > self.videoSize.height/videoSize.height
//    ? self.videoSize.width/videoSize.width : self.videoSize.height/videoSize.height;
    
    CGSize ratioVideo;
    ratioVideo.height = videoSize.height * ratio;
    ratioVideo.width  = videoSize.width * ratio;
    
    CGAffineTransform transform = videoTrack.preferredTransform;
    if (__isTransformEqual(videoTrack.preferredTransform, CGAffineTransformMakeRotation(M_PI_2))) {
        transform.tx = videoSize.width;
    } else if (__isTransformEqual(videoTrack.preferredTransform, CGAffineTransformMakeRotation(M_PI))) {
        transform.tx = videoSize.width;
        transform.ty = videoSize.height;
    } else if (__isTransformEqual(videoTrack.preferredTransform, CGAffineTransformMakeRotation(M_PI_2 * 3))) {
        transform.ty = videoSize.height;
    }
    
    transform.tx = transform.tx * ratio;
    transform.ty = transform.ty * ratio;
    return CGAffineTransformScale(transform,ratio,ratio);
}


static inline MPTVideoInfo detectVideoSize(AVAssetTrack *track) {
    MPTVideoInfo info;
    const CGFloat largestSide = 1280.0;
//    const CGFloat shortSideMaxmum = 480.0;
//    static const unsigned int __main30 = 408240;

    CGSize size = track.naturalSize;
    info.transform = __transformationOfTrack(track);
    size = CGSizeApplyAffineTransform(size, info.transform);
    
    CGAffineTransform fixTransform = CGAffineTransformIdentity;
    
    CGFloat width  = fabs(size.width);
    CGFloat height = fabs(size.height);
    
    if (size.height<0) {
        size.height = -size.height;
    }
    if (size.width<0) {
        size.width = -size.width;
    }
    
    info.isLandscape = width >= height;
    info.ratio = width / height;
    info.isNeedResize = width > largestSide | height > largestSide;
    CGSize targetSize = size;
    info.scale = 1.0;
    if (info.isNeedResize) {
        if (info.isLandscape) {
            info.scale = largestSide / width;
            targetSize.width = largestSide;
            targetSize.height = floor(height * info.scale);
            if ((int)targetSize.height % 2) {
                targetSize.height --;
            }
//            if (targetSize.height > shortSideMaxmum) {
//                targetSize.height = shortSideMaxmum;
//                targetSize.width = targetSize.height * info.ratio;
//                info.scale = targetSize.width / width;
//            }
        } else {
            info.scale = largestSide / height;
            targetSize.width = floor(width * info.scale);
            targetSize.height = largestSide;
            
            if ((int)targetSize.width % 2) {
                targetSize.width --;
            }
//            if (targetSize.width > shortSideMaxmum) {
//                targetSize.width = shortSideMaxmum;
//                targetSize.height = targetSize.width / info.ratio;
//                info.scale = targetSize.height / height;
//            }
        }
    }
    info.targetSize = info.isNeedResize ? targetSize : size;
//    if (info.targetSize.width * info.targetSize.height > __main30) {
//        info.targetSize = CGSizeMake(__main30 / sqrtf(__main30 / info.ratio), sqrtf(__main30 / info.ratio));
//    }
    info.transform = CGAffineTransformConcat(CGAffineTransformConcat(info.transform, CGAffineTransformMakeScale(info.scale, info.scale)), fixTransform);
    info.estimatedDataRate = track.estimatedDataRate;
    return info;
}

- (BOOL)dealWithFile:(NSURL *)fileURL {
    
    AVAsset *asset = [AVAsset assetWithURL:fileURL];
    
    NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
    NSArray *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio];
    CMTime duration = asset.duration;
    CMTimeRange assetTimeRange = self.assetTimeRange;
    if (CMTimeRangeEqual(assetTimeRange, kCMTimeRangeZero)) {
        assetTimeRange = CMTimeRangeMake(kCMTimeZero, duration);
    }
    if (videoTracks.count == 0) {
        return NO;
    }
    MPTVideoInfo sizeInfo = detectVideoSize(videoTracks[0]);
    self.videoInfo = sizeInfo;
    AVAssetTrack *videoTrack = videoTracks[0];
    AVAssetTrack *audioTrack = nil;
    if (audioTracks.count) {
        audioTrack = audioTracks[0];
    }
    
    CMTime frameDuration = videoTrack.minFrameDuration;
    
    AVMutableComposition *composition = [AVMutableComposition composition];
    
    AVMutableCompositionTrack *compositionTracks[2];
    
    compositionTracks[0] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    compositionTracks[1] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    
    [compositionTracks[0] insertTimeRange:assetTimeRange ofTrack:videoTrack atTime:kCMTimeZero error:nil];
    if (audioTrack) {
        [compositionTracks[1] insertTimeRange:assetTimeRange ofTrack:audioTrack atTime:kCMTimeZero error:nil];
    } else {
        [self muteAudio:compositionTracks[1] duration:duration];
    }
    
    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
    AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, assetTimeRange.duration);
    
    AVMutableVideoCompositionLayerInstruction *videoCompositionLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionTracks[0]];
    [videoCompositionLayerInstruction setTransform:sizeInfo.transform atTime:kCMTimeZero];
    [videoCompositionLayerInstruction setOpacity:1.0 atTime:kCMTimeZero];
    
    videoCompositionInstruction.layerInstructions = @[videoCompositionLayerInstruction];
    videoComposition.instructions = @[videoCompositionInstruction];
    videoComposition.renderSize = _videoInfo.targetSize;
    videoComposition.frameDuration = frameDuration;
    
//    [self aspectRatioLogo:videoComposition size:sizeInfo];
    
    self.composition = composition;
    self.videoComposition = videoComposition;
    self.audioMix = nil;
    
    return YES;
}

- (BOOL)dealWithAsset:(AVAsset *)asset
{
    NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
    NSArray *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio];
    CMTime duration = asset.duration;
    CMTimeRange assetTimeRange = self.assetTimeRange;
    if (CMTimeRangeEqual(assetTimeRange, kCMTimeRangeZero)) {
        assetTimeRange = CMTimeRangeMake(kCMTimeZero, duration);
    }
    
    if (videoTracks.count == 0) {
        return NO;
    }
    
    
    MPTVideoInfo sizeInfo = detectVideoSize(videoTracks[0]);
    self.videoInfo = sizeInfo;
    
    AVAssetTrack *videoTrack = videoTracks[0];
    AVAssetTrack *audioTrack = nil;
    if (audioTracks.count) {
        audioTrack = audioTracks[0];
    }
    
    CMTime frameDuration = CMTimeMake(1, videoTrack.nominalFrameRate);
    AVMutableComposition *composition = [AVMutableComposition composition];
    
    AVMutableCompositionTrack *compositionTracks[2];
    
    compositionTracks[0] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    compositionTracks[1] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    
    [compositionTracks[0] insertTimeRange:assetTimeRange ofTrack:videoTrack atTime:kCMTimeZero error:nil];
    if (audioTrack) {
        [compositionTracks[1] insertTimeRange:assetTimeRange ofTrack:audioTrack atTime:kCMTimeZero error:nil];
    } else {
        [self muteAudio:compositionTracks[1] duration:assetTimeRange.duration];
    }
    
    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
    AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, assetTimeRange.duration);
    
    AVMutableVideoCompositionLayerInstruction *videoCompositionLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionTracks[0]];
    [videoCompositionLayerInstruction setTransform:sizeInfo.transform atTime:kCMTimeZero];
    
    videoCompositionInstruction.layerInstructions = @[videoCompositionLayerInstruction];
    videoComposition.instructions = @[videoCompositionInstruction];
    videoComposition.renderSize = _videoInfo.targetSize;
    videoComposition.frameDuration = frameDuration;
    
//    [self aspectRatioLogo:videoComposition size:sizeInfo];
    
    self.composition = composition;
    self.videoComposition = videoComposition;
    self.audioMix = nil;
    return YES;

}

- (void)muteAudio:(AVMutableCompositionTrack *)track duration:(CMTime)duration {
    NSString *mutePath = [[NSBundle mainBundle] pathForResource:@"mute" ofType:@"mp3"];
    AVAsset *asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:mutePath]];
    CMTime audioTrackDuration = [asset duration];

    AVAssetTrack *audioTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
    CMTime insertTime = kCMTimeZero;
    while (CMTimeCompare(insertTime, duration) < 0) {
        CMTime sub = CMTimeSubtract(duration, insertTime);
        CMTime insertDuration = kCMTimeZero;
        if (CMTimeCompare(sub, audioTrackDuration) <= 0) {
            insertDuration = sub;
        } else {
            insertDuration = audioTrackDuration;
        }
        [track insertTimeRange:CMTimeRangeMake(kCMTimeZero, insertDuration) ofTrack:audioTrack atTime:insertTime error:nil];
        insertTime = CMTimeAdd(insertTime, insertDuration);
#ifdef DEBUG
        NSLog(@"目标时长:%f", CMTimeGetSeconds(duration));
        NSLog(@"还差时长:%f", CMTimeGetSeconds(sub));
        NSLog(@"插入时长:%f", CMTimeGetSeconds(insertDuration));
        NSLog(@"插入位置:%f", CMTimeGetSeconds(insertTime));
#endif
    }
}

- (void)aspectRatioLogo:(AVMutableVideoComposition *)videoComposition size:(MPTVideoInfo)sizeInfo {
    CALayer *parentLayer = [CALayer new];
    CALayer *videoLayer = [CALayer new];
//    CALayer *logoLayer = [CALayer new];
    CGRect frame = CGRectMake(0, 0, sizeInfo.targetSize.width, sizeInfo.targetSize.height);
    parentLayer.frame = frame;
    videoLayer.frame = frame;
    
    CGFloat side = sizeInfo.isLandscape ? sizeInfo.targetSize.height : sizeInfo.targetSize.width;
    CGFloat subs = fabs(sizeInfo.targetSize.width - sizeInfo.targetSize.height);
    frame.size = CGSizeMake(side, side);
    sizeInfo.isLandscape ? (frame.origin.x += subs) : (frame.origin.y += subs);
    
//    logoLayer.frame = frame;
    [parentLayer addSublayer:videoLayer];
//    logoLayer.contents = (id)[UIImage imageNamed:@"Logo_hd.png"].CGImage;
//    [parentLayer addSublayer:logoLayer];
    videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}

- (CGFloat)assetRotation:(AVAsset *)asset {
    CGFloat rotation = 0.0;
    CGAffineTransform transform = asset.preferredTransform;
    if (transform.a == 0.0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0.0) {
        rotation = M_PI_2;
    } else if (transform.a == 0.0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0.0) {
        rotation = M_PI_2 * 3;
    } else if (transform.a == 1.0 && transform.b == 0.0 && transform.c == 0.0 && transform.d == 1.0) {
        rotation = 0;
    } else if (transform.a == -1.0 && transform.b == 0.0 && transform.c == 0.0 && transform.d == -1.0) {
        rotation = M_PI;
    }
    return rotation;
}

@end
