//
//  VCAssetCreatManger.m
//  VideoCollage
//
//  Created by Mac on 2018/6/25.
//  Copyright © 2018年 stoneobs.qq.com. All rights reserved.
//

#import "VCAssetCreatManger.h"
#import "UIImage+STTools.h"
#import "NSBundle+STSystemTool.h"
#import "AVAsset+STTools.h"
#import <AVFoundation/AVFoundation.h>

#define MediaFileName @"videoCollgetTMKj_export.mp4"
#define videoBackColor FlatWhite
@interface VCAssetCreatManger()
@property(nonatomic, strong) AVAssetExportSession                     *exportSession;/**< 当前session */

@end
@implementation VCAssetCreatManger
+ (VCAssetCreatManger *)manger{
    static VCAssetCreatManger* deflut = nil;
    static dispatch_once_t onceToken;
    dispatch_once(&onceToken, ^{
        deflut = [VCAssetCreatManger new];
    });
    return deflut;
}
- (void)cgreateAssetWithView:(VCItemView *)view progress:(createProgressHandle)progress handle:(createHandle)handle{
    //所选资源全是图片
    if (view.isAllImage) {
        [view hideNOResultButton];
        UIImage * image = [UIImage st_snapshot:view];
        [view showAllButton];
        NSInteger rand = arc4random()%100000000000 + 1000000000;
        NSString * outputPath = [NSString stringWithFormat:@"%@/%ld.png",NSBundle.st_documentDirectoryPath,rand];
        bool success =  [UIImagePNGRepresentation(image) writeToFile:outputPath atomically:YES]; // 保存成功会返回YES
        if (success) {
            if (handle) {
                handle(YES,nil,outputPath,image);
            }
        }else{
            if (handle) {
                handle(NO,nil,@"",nil);
            }
        }
        
    }else{
        // 所选资源包含视频，当视频avaset 准备好之后开始合成
        __weak typeof(view) weakView =  view;
        bool judgeAllPathLoad = [view judgeAllPathLoad];
        bool judgeAllAvAssetReady = [view judgeAllAvAssetReady];
        if (judgeAllPathLoad && judgeAllAvAssetReady) {
            [self cgreateVideotWithView:weakView progress:progress handle:handle];
        }else{
            [view setAllAvAssetReady:^(bool isAllReady) {
                [self cgreateVideotWithView:weakView progress:progress handle:handle];
                //置空block
                [weakView setAllAvAssetReady:nil];
            }];
        }
    }
}
//开始合成
- (void)cgreateVideotWithView:(VCItemView *)view progress:(createProgressHandle)progress handle:(createHandle)handle{
    
    DDLogInfo(@"正在合成");
    //将视屏加入到处理器
    AVMutableComposition *composition = [AVMutableComposition  composition];
    int32_t minLcm = secendframes;
    for (TZAssetModel * model in view.assetModles) {
        
        // DDLogInfo(@"AVASEET时长%@",model.avasset.duration);
        NSInteger index = [view.assetModles indexOfObject:model] + 1;
        if (model.didBegin && model.avasset && model.type == TZAssetModelMediaTypeVideo) {
            minLcm = [self.class lcmWithNumber1:minLcm Number2:model.avasset.duration.timescale];
            [self addAsset:model.avasset view:view toComposition:composition withTrackID:index];
        }
    }
    CGFloat  width = 500;
    CGFloat  height = width * 0.75;
    
    VCItemSizeModel * model = view.sizeModel;
    CGFloat maxWithd = 960;//最大宽度
    CGFloat viewMaxWithd = 0.8 * UIScreenWidth;
    CGFloat fangda = maxWithd / viewMaxWithd;
    
    CGFloat minSizeLength = MIN(model.size.width, model.size.height);
    CGFloat maxSizeLength = MAX(model.size.width, model.size.height);
    
    CGFloat bili = minSizeLength / maxSizeLength;
    CGSize finshSize  = CGSizeZero;
    if (maxSizeLength == model.size.width) {
        //长 为 最大 将长 缩小 为 maxWithd
        CGFloat dealHeight = maxWithd * bili;
        
        finshSize = CGSizeMake(maxWithd, dealHeight);
    }else{
        //高 为 最大 将长 缩小 为 maxWithd
        CGFloat dealWith = maxWithd * bili;
        
        finshSize = CGSizeMake(dealWith, maxWithd);
    }
    width = finshSize.width;
    height = finshSize.height;
    //将对比的圆角 和间距 放大 到 真实视频size 的圆角和 间距
    model.cornerRadius = model.cornerRadius * fangda;
    model.insetValue = model.insetValue * fangda;
    
    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
    videoComposition.renderSize =  CGSizeMake(width,height);
    CMTime videoTime = view.shortDuringCMTime;
    //获取 所有资源的最小公倍数 作为帧数
    //    if (minLcm > 10000) {
    //        minLcm = minLcm / 10;
    //    }
    CMTime frameDuration = CMTimeMake(videoTime.value / videoTime.timescale, minLcm);
    videoComposition.frameDuration = frameDuration;
    NSLog(@"videoComposition.frameDuration");
    CMTimeShow(videoComposition.frameDuration);
    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    //背景色设置
    instruction.backgroundColor = videoBackColor.CGColor;
    instruction.timeRange = [composition.tracks.firstObject timeRange];
    
    
    //配置承载layer parentLayer  和播放layer videoLayer
    CALayer * parentLayer = CALayer.layer;
    parentLayer.backgroundColor = UIColor.whiteColor.CGColor;
    parentLayer.masksToBounds = YES;
    CALayer * videoLayer = CALayer.layer;
    videoLayer.backgroundColor = UIColor.whiteColor.CGColor;
    parentLayer.frame = CGRectMake(0, 0, videoComposition.renderSize.width, videoComposition.renderSize.height);
    parentLayer.cornerRadius = model.cornerRadius;
    videoLayer.frame = CGRectMake(0, 0, videoComposition.renderSize.width, videoComposition.renderSize.height);
    [parentLayer addSublayer:videoLayer];
    
    NSMutableArray * videoCompositionLayerInstructions = [NSMutableArray new];
    NSArray *newitemFrameArray = [VCItemTools frameWithRank:view.rank itemInset:model.insetValue superSize:finshSize];
    DDLogInfo(@"视频最终宽度:%0.2f 高度:%0.2f",width,height);
    for (NSInteger i = 0; i < view.assetModles.count; i ++) {
        TZAssetModel * model = view.assetModles[i];
        VCItemFrameModel * frameModel = newitemFrameArray[i];
        UIImageView * button = view.itemImageViewArray[i];
        if (!button.hidden ) {
            if (model.type == TZAssetModelMediaTypeVideo && model.avasset) {
                CGSize size;//视屏的size
                for (AVAssetTrack *track in model.avasset.tracks) {
                    size  = track.naturalSize;
                    
                }
                AVMutableVideoCompositionLayerInstruction *frontLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstruction];
                CGAffineTransform trance = CGAffineTransformMakeTranslation(frameModel.center.x - frameModel.scaleWitdh * width / 2  , frameModel.center.y - frameModel.scaleHeight * height / 2);
                trance = CGAffineTransformScale(trance, frameModel.scaleWitdh * width / size.width, frameModel.scaleHeight * height / size.height);
                [frontLayerInstruction setTransform:trance atTime:kCMTimeZero];
                frontLayerInstruction.trackID = i+1;
                [videoCompositionLayerInstructions addObject:frontLayerInstruction];
                
            }
            if (model.type == TZAssetModelMediaTypePhoto) {
                //添加图片
                CALayer * waterLayer = CALayer.layer;
                //坐标Y  轴反
                //根据 大量数据 计算出 真实的anchorPointY   公式为   0.5 + （1 - 2 * frameModl中心位置百分比  ） * （1  / frameModl的高度百分比 ）
                waterLayer.frame = CGRectMake(frameModel.center.x - frameModel.scaleWitdh * width / 2, ( frameModel.center.y - frameModel.scaleHeight * height / 2), frameModel.scaleWitdh * width, frameModel.scaleHeight * height);
                waterLayer.backgroundColor = FlatRed.CGColor;
                waterLayer.position = frameModel.center;
                CGFloat anchorPointX = 0.5 ;
                CGFloat centYScale = 1 - (frameModel.center.y / height);//位置相反，真实百分比
                CGFloat anchorPointY = 0.5 + (1.0 - 2.0 * centYScale) * (1.0 / (frameModel.scaleHeight));
                waterLayer.anchorPoint = CGPointMake(anchorPointX, anchorPointY);
                waterLayer.masksToBounds = YES;
                UIImageView *  button = view.itemImageViewArray[i];
                [button st_setDottedLineBrderWidth:1 borderColor:UIColor.clearColor];
                UIImage * image = [UIImage st_snapshot:button];
                [waterLayer setContents:image.CGImage];
                waterLayer.masksToBounds = YES;
                [parentLayer addSublayer:waterLayer];
            }
            
        }
    }
    
    
    
    instruction.layerInstructions = videoCompositionLayerInstructions.copy;
    videoComposition.instructions = @[instruction];
    
    videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
    [_exportSession cancelExport];
#warning AVAssetExportPresetMediumQuality 从低-中-高 花费时间指数级别增长  通常低质量2秒 一般质量 4秒 高质量 16秒
    AVAssetExportSession *exporter = [AVAssetExportSession exportSessionWithAsset:composition presetName:AVAssetExportPresetMediumQuality];
    self.exportSession = exporter;
    NSString *urlPath = [NSBundle.st_documentDirectoryPath stringByAppendingPathComponent:@"export.mp4"];
    NSFileManager * fileManager = NSFileManager.defaultManager;
    if ([fileManager fileExistsAtPath:urlPath]) {
        [fileManager removeItemAtPath:urlPath error:nil];
    }
    exporter.outputURL = [NSURL fileURLWithPath:urlPath];
    exporter.outputFileType = AVFileTypeMPEG4;
    exporter.shouldOptimizeForNetworkUse = YES;
    exporter.videoComposition = videoComposition;
    [exporter exportAsynchronouslyWithCompletionHandler:^{
        
        AVAssetExportSessionStatus status = exporter.status;
        
        dispatch_async(dispatch_get_main_queue(), ^{
            
            DDLogInfo(@"导出状态%ld",status);
            [SVProgressHUD dismiss];
            if (status == AVAssetExportSessionStatusCompleted) {
                //
                DDLogInfo(@"视频合成成功");
                if (handle) {
                    handle(YES,nil,urlPath,nil);
                }
            }else if(status == AVAssetExportSessionStatusCancelled){
                
                DDLogInfo(@"上一个导出session取消%@",exporter.error);
            }else{
                if (handle) {
                    handle(NO,nil,nil,nil);
                }
                DDLogInfo(@"%@",exporter.error);
                [SVProgressHUD showErrorWithStatus:@"create error"];
            }
        });
        
    }];
}
- (void)meargeAssetWithLocalVideoPath:(NSString *)localVideoPath musicPatth:(NSString *)path progress:(createProgressHandle)progress handle:(createHandle)handle{
    
    NSURL * url = [NSURL fileURLWithPath:path];
    AVURLAsset* audioAsset = [[AVURLAsset alloc] initWithURL:url options:nil];
    AVURLAsset* videoAsset = (id)[AVAsset assetWithURL:[NSURL fileURLWithPath:localVideoPath]];
    NSRange videoRange = NSMakeRange(0, videoAsset.duration.value / videoAsset.duration.timescale);
    //创建AVMutableComposition对象来添加视频音频资源的AVMutableCompositionTrack
    AVMutableComposition* mixComposition = [AVMutableComposition composition];
    //CMTimeRangeMake(start, duration),start起始时间，duration时长，都是CMTime类型
    //CMTimeMake(int64_t value, int32_t timescale)，返回CMTime，value视频的一个总帧数，timescale是指每秒视频播放的帧数，视频播放速率，（value / timescale）才是视频实际的秒数时长，timescale一般情况下不改变，截取视频长度通过改变value的值
    //CMTimeMakeWithSeconds(Float64 seconds, int32_t preferredTimeScale)，返回CMTime，seconds截取时长（单位秒），preferredTimeScale每秒帧数
    //开始位置startTime
    CMTime startTime = CMTimeMakeWithSeconds(videoRange.location, videoAsset.duration.timescale);
    //截取长度videoDuration
    CMTime videoDuration = CMTimeMakeWithSeconds(videoRange.length, videoAsset.duration.timescale);
    
    CMTimeRange videoTimeRange = CMTimeRangeMake(startTime, videoDuration);
    if (self.isImageToVideo) {
        videoTimeRange = CMTimeRangeMake(startTime, audioAsset.duration);
    }
    //视频采集compositionVideoTrack
    AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    //TimeRange截取的范围长度
    //ofTrack来源
    //atTime插放在视频的时间位置
    [compositionVideoTrack insertTimeRange:videoTimeRange ofTrack:([videoAsset tracksWithMediaType:AVMediaTypeVideo].count>0) ? [videoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject : nil atTime:kCMTimeZero error:nil];
    
    
    //音频采集compositionCommentaryTrack
    CMTime videoTime = videoAsset.duration;
    NSInteger videoLength = videoTime.value / videoTime.timescale;
    
    CMTime musicTime = audioAsset.duration;
    NSInteger musicLength = musicTime.value / musicTime.timescale;
    
    DDLogInfo(@"video时间 %ld  音乐时间%ld",videoLength,musicLength);
    NSInteger max = videoLength / musicLength + 1;
    for (NSInteger i = 0; i < max; i ++ ) {
        AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid ];
        AVURLAsset* tempaudioAsset = audioAsset;
        CMTime musicstartTime = CMTimeMakeWithSeconds( i * musicLength, tempaudioAsset.duration.timescale);
        //截取长度videoDuration
        NSInteger endintergerTime = i * musicLength + musicLength;
        endintergerTime = endintergerTime >  videoLength?videoLength:endintergerTime;
        CMTime musicendTime = CMTimeMakeWithSeconds(endintergerTime , tempaudioAsset.duration.timescale);
        CMTimeRange tmpvideoTimeRange = CMTimeRangeMake(musicstartTime, musicendTime);
        CMTimeRangeShow(tmpvideoTimeRange);
        NSError * musiceror = nil;
        [compositionAudioTrack insertTimeRange:tmpvideoTimeRange
                                       ofTrack:([tempaudioAsset tracksWithMediaType:AVMediaTypeAudio].count > 0) ? [tempaudioAsset tracksWithMediaType:AVMediaTypeAudio].firstObject : nil
                                        atTime:kCMTimeZero
                                         error:&musiceror];
        DDLogInfo(@"musiceror = %@",musiceror);
    }
    [_exportSession cancelExport];
    //AVAssetExportSession用于合并文件，导出合并后文件，presetName文件的输出类型
    AVAssetExportSession *assetExportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
    self.exportSession = assetExportSession;
    NSString *outPutPath = [NSTemporaryDirectory() stringByAppendingPathComponent:MediaFileName];
    //混合后的视频输出路径
    NSURL *outPutUrl = [NSURL fileURLWithPath:outPutPath];
    if ([[NSFileManager defaultManager] fileExistsAtPath:outPutPath])
    {
        [[NSFileManager defaultManager] removeItemAtPath:outPutPath error:nil];
    }
    assetExportSession.outputFileType = AVFileTypeMPEG4;
    assetExportSession.outputURL = outPutUrl;
    assetExportSession.shouldOptimizeForNetworkUse = YES;
    [assetExportSession exportAsynchronouslyWithCompletionHandler:^{
        
        AVAssetExportSessionStatus status = assetExportSession.status;
        
        dispatch_async(dispatch_get_main_queue(), ^{
            [SVProgressHUD dismiss];
            DDLogInfo(@"导出状态%ld",status);
            if (status == AVAssetExportSessionStatusCompleted) {
                //
                DDLogInfo(@"音乐合成成功");
                //AVURLAsset* videoAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:urlPath] options:nil];
                if (handle) {
                    handle(YES,nil,outPutPath,nil);
                }
            }else if(status == AVAssetExportSessionStatusCancelled){
                
                DDLogInfo(@"上一个导出音乐session取消%@",assetExportSession.error);
            }else{
                if (handle) {
                    handle(NO,nil,nil,nil);
                }
                DDLogInfo(@"%@",assetExportSession.error);
                [SVProgressHUD showErrorWithStatus:@"create error"];
            }
            
        });
    }];
    
}
//将资源 加到视频处理中
- (void)addAsset:(AVAsset *)asset view:(VCItemView *)view toComposition:(AVMutableComposition *)composition withTrackID:(CMPersistentTrackID)trackID {
    
    CGFloat shortDuring = view.shortDuring;
    AVMutableCompositionTrack *videoTrack = [composition  addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:trackID];
    CMTimeRange timeRange;
    CMTime shortTime =  CMTimeMakeWithSeconds(shortDuring, secendframes);
    CMTimeShow(shortTime);
    CMTime time = view.shortDuringCMTime;
    timeRange = CMTimeRangeFromTimeToTime(kCMTimeZero,time);
    AVAssetTrack *assetVideoTrack = asset.firstVideoTrack;
    [videoTrack insertTimeRange:timeRange ofTrack:assetVideoTrack atTime:kCMTimeZero error:nil];
    
}

- (void)meargeImageToVideoPathView:(VCItemView *)view image:(UIImage *)image progress:(createProgressHandle)progress handle:(createHandle)handle{
    DDLogInfo(@"图片合成 音乐视频");
    //将视屏加入到处理器
    AVMutableComposition *composition = [AVMutableComposition  composition];
    NSString *orginurlPath = [NSBundle.st_documentDirectoryPath stringByAppendingPathComponent:[NSString stringWithFormat:@"%@/%@.mp4",@"video",imageToVideoNormalVidelName]];
    AVAsset * asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:orginurlPath]];
    [asset whenProperties:@[@"tracks"] areReadyDo:^{
        //添加到轨道
        AVMutableCompositionTrack *videoTrack = [composition  addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:1];
        CMTimeRange timeRange;
        CMTime shortvideoTime = CMTimeMakeWithSeconds(2.1, secendframes);//固定2秒
        timeRange = CMTimeRangeFromTimeToTime(kCMTimeZero,asset.duration);
        AVAssetTrack *assetVideoTrack = asset.firstVideoTrack;
        [videoTrack insertTimeRange:timeRange ofTrack:assetVideoTrack atTime:kCMTimeZero error:nil];
        //配置layer
        CGFloat  width = 1000;
        CGFloat  height = width * 0.75;
        VCItemSizeModel * model = view.sizeModel;
        CGFloat maxWithd = 960;//最大宽度
        CGFloat viewMaxWithd = 0.8 * UIScreenWidth;
        CGFloat fangda = maxWithd / viewMaxWithd;
        
        CGFloat minSizeLength = MIN(model.size.width, model.size.height);
        CGFloat maxSizeLength = MAX(model.size.width, model.size.height);
        
        CGFloat bili = minSizeLength / maxSizeLength;
        CGSize finshSize  = CGSizeZero;
        if (maxSizeLength == model.size.width) {
            //长 为 最大 将长 缩小 为 maxWithd
            CGFloat dealHeight = maxWithd * bili;
            
            finshSize = CGSizeMake(maxWithd, dealHeight);
        }else{
            //高 为 最大 将长 缩小 为 maxWithd
            CGFloat dealWith = maxWithd * bili;
            
            finshSize = CGSizeMake(dealWith, maxWithd);
        }
        model.insetValue = model.insetValue * fangda;
        width = finshSize.width;
        height = finshSize.height;
        
        
        AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
        videoComposition.renderSize =  CGSizeMake(width, height);
        CMTime videoTime = CMTimeMakeWithSeconds(2.1, secendframes);//固定2秒
        videoTime = asset.duration;
        videoComposition.frameDuration = CMTimeMake(videoTime.value/secendframes, secendframes);
        AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
        //背景色设置
        instruction.backgroundColor = videoBackColor.CGColor;
        instruction.timeRange = [composition.tracks.firstObject timeRange];
        
        //配置承载layer parentLayer  和播放layer videoLayer
        CALayer * parentLayer = CALayer.layer;
        parentLayer.backgroundColor = UIColor.whiteColor.CGColor;
        CALayer * videoLayer = CALayer.layer;
        videoLayer.backgroundColor = UIColor.whiteColor.CGColor;
        parentLayer.frame = CGRectMake(0, 0, videoComposition.renderSize.width, videoComposition.renderSize.height);
        videoLayer.frame = CGRectMake(0, 0, videoComposition.renderSize.width, videoComposition.renderSize.height);
        [parentLayer addSublayer:videoLayer];
        
        NSMutableArray * videoCompositionLayerInstructions = [NSMutableArray new];
        NSArray *newitemFrameArray = [VCItemTools frameWithRank:1 itemInset:model.insetValue superSize:CGSizeMake(width, height)];
        
        for (NSInteger i = 0; i < 1; i ++) {
            VCItemFrameModel * frameModel = newitemFrameArray[i];
            CGSize size;//视屏的size
            for (AVAssetTrack *track in asset.tracks) {
                size  = track.naturalSize;
                
            }
            AVMutableVideoCompositionLayerInstruction *frontLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstruction];
            CGAffineTransform trance = CGAffineTransformMakeTranslation(frameModel.center.x - frameModel.scaleWitdh * width / 2  , frameModel.center.y - frameModel.scaleHeight * height / 2);
            trance = CGAffineTransformScale(trance, frameModel.scaleWitdh * width / size.width, frameModel.scaleHeight * height / size.height);
            [frontLayerInstruction setTransform:trance atTime:kCMTimeZero];
            frontLayerInstruction.trackID = i+1;
            [videoCompositionLayerInstructions addObject:frontLayerInstruction];
            
            //添加图片
            CALayer * waterLayer = CALayer.layer;
            //坐标Y  轴反 不知为何
            //根据 大量数据 计算出 真实的anchorPointY   公式为   0.5 + （1 - 2 * frameModl中心位置百分比  ） * （1  / frameModl的高度百分比 ）
            waterLayer.frame = CGRectMake(frameModel.center.x - frameModel.scaleWitdh * width / 2, ( frameModel.center.y - frameModel.scaleHeight * height / 2) , frameModel.scaleWitdh * width , frameModel.scaleHeight * height  );
            waterLayer.backgroundColor = UIColor.grayColor.CGColor;
            waterLayer.position = frameModel.center;
            CGFloat anchorPointX = 0.5 ;
            CGFloat centYScale = 1 - (frameModel.center.y / height);//位置相反，真实百分比
            CGFloat anchorPointY = 0.5 + (1.0 - 2.0 * centYScale) * (1.0 / (frameModel.scaleHeight));
            waterLayer.anchorPoint = CGPointMake(anchorPointX, anchorPointY);
            waterLayer.masksToBounds = YES;
            [waterLayer setContents:image.CGImage];
            [parentLayer addSublayer:waterLayer];
            
        }
        instruction.layerInstructions = videoCompositionLayerInstructions.copy;
        videoComposition.instructions = @[instruction];
        videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
        [_exportSession cancelExport];
        AVAssetExportSession *exporter = [AVAssetExportSession exportSessionWithAsset:composition presetName:AVAssetExportPresetMediumQuality];
        self.exportSession = exporter;
        NSString *urlPath = [NSBundle.st_documentDirectoryPath stringByAppendingPathComponent:@"tempexport.mp4"];
        NSFileManager * fileManager = NSFileManager.defaultManager;
        if ([fileManager fileExistsAtPath:urlPath]) {
            [fileManager removeItemAtPath:urlPath error:nil];
        }
        exporter.outputURL = [NSURL fileURLWithPath:urlPath];
        exporter.outputFileType = AVFileTypeMPEG4;
        exporter.shouldOptimizeForNetworkUse = YES;
        exporter.videoComposition = videoComposition;
        [exporter exportAsynchronouslyWithCompletionHandler:^{
            
            AVAssetExportSessionStatus status = exporter.status;
            
            dispatch_async(dispatch_get_main_queue(), ^{
                [SVProgressHUD dismiss];
                DDLogInfo(@"导出状态%ld",status);
                if (status == AVAssetExportSessionStatusCompleted) {
                    //
                    DDLogInfo(@"视频合成成功");
                    if (handle) {
                        handle(YES,nil,urlPath,nil);
                    }
                    
                }else if(status == AVAssetExportSessionStatusCancelled){
                    
                    DDLogInfo(@"上一个导出session取消%@",exporter.error);
                }else{
                    if (handle) {
                        handle(NO,nil,nil,nil);
                    }
                    DDLogInfo(@"%@",exporter.error);
                    [SVProgressHUD showErrorWithStatus:@"create error"];
                }
            });
            
        }];
        
    }];
}

- (void)exportVideoPath:(NSString*)path handle:(createHandle)handle{
    DDLogInfo(@"导出视频 到沙盒");
    //将视屏加入到处理器
    AVMutableComposition *composition = [AVMutableComposition  composition];
    
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:YES], AVURLAssetPreferPreciseDurationAndTimingKey, nil];
    AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:path] options:options];
    
    [asset whenProperties:@[@"tracks"] areReadyDo:^{
        DDLogInfo(@"导出视频 到沙盒 asset 准备好了");
        AVAssetTrack *videoAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
        
        AVMutableCompositionTrack *videoCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
        [videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero error:nil];
        
        //配置layer
        CGSize size;
        for (AVAssetTrack * track in asset.tracks) {
            size = track.naturalSize;
        }
        
        CGFloat  width = size.height;
        CGFloat  height = size.width;
        DDLogInfo(@"高度  width = %f height = %f",width,height);
        AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
        videoComposition.renderSize =  CGSizeMake(width, height);
        videoComposition.frameDuration =  videoAssetTrack.minFrameDuration;
        AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
        //背景色设置
        instruction.backgroundColor = videoBackColor.CGColor;
        instruction.timeRange = [composition.tracks.firstObject timeRange];
        
        
        
        NSMutableArray * videoCompositionLayerInstructions = [NSMutableArray new];
        
        AVMutableVideoCompositionLayerInstruction *frontLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoAssetTrack];;
        [frontLayerInstruction setTransform:[self videoAssetTrackTransform:videoCompositionTrack] atTime:kCMTimeZero];
        [videoCompositionLayerInstructions addObject:frontLayerInstruction];
        
        instruction.layerInstructions = videoCompositionLayerInstructions.copy;
        videoComposition.instructions = @[instruction];
        [self.exportSession cancelExport];
        
        
        AVAssetExportSession *exporter = [AVAssetExportSession exportSessionWithAsset:composition presetName:AVAssetExportPresetMediumQuality];
        self.exportSession = exporter;
        NSString *urlPath = [NSBundle.st_documentDirectoryPath stringByAppendingPathComponent:@"tempexport.mp4"];
        NSFileManager * fileManager = NSFileManager.defaultManager;
        if ([fileManager fileExistsAtPath:urlPath]) {
            [fileManager removeItemAtPath:urlPath error:nil];
        }
        exporter.outputURL = [NSURL fileURLWithPath:urlPath];
        exporter.outputFileType = AVFileTypeMPEG4;
        exporter.shouldOptimizeForNetworkUse = YES;
        exporter.videoComposition = videoComposition;
        [exporter exportAsynchronouslyWithCompletionHandler:^{
            
            AVAssetExportSessionStatus status = exporter.status;
            dispatch_async(dispatch_get_main_queue(), ^{
                [SVProgressHUD dismiss];
                DDLogInfo(@"导出状态%ld",status);
                if (status == AVAssetExportSessionStatusCompleted) {
                    //
                    DDLogInfo(@"视频导出成功");
                    if (handle) {
                        handle(YES,nil,urlPath,nil);
                    }
                    
                }else if(status == AVAssetExportSessionStatusCancelled){
                    
                    DDLogInfo(@"上一个导出session取消%@",exporter.error);
                }else{
                    if (handle) {
                        handle(NO,nil,nil,nil);
                    }
                    DDLogInfo(@"%@",exporter.error);
                    [SVProgressHUD showErrorWithStatus:@"create error"];
                }
            });
            
        }];
    }];
    
    
    //   }];
    
}
#pragma mark --最小公倍数
+ (int32_t)lcmWithNumber1:(int32_t)num1 Number2:(int32_t)num2{
    int32_t gcd = [VCAssetCreatManger gcdWithNumber1:num1 Number2:num2];
    // 最小公倍数 = 两整数的乘积 ÷ 最大公约数
    return num1 * num2 / gcd;
    
}
+ (int32_t)gcdWithNumber1:(int32_t)num1 Number2:(int32_t)num2{
    while(num1 != num2){
        if(num1 > num2){ num1 = num1-num2;
            
        } else {
            num2 = num2-num1;
            
        }
        
    } return num1;
    
}

- (CGAffineTransform)videoAssetTrackTransform:(AVAssetTrack *)videoAssetTrack {
    int degrees = 90;//x
    CGAffineTransform transform = CGAffineTransformIdentity;
    if (degrees != 0) {
        CGAffineTransform translateToCenter = CGAffineTransformIdentity;
        if (degrees == 90) {
            // 顺时针旋转90°
            translateToCenter = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.height, 0.0);
            transform = CGAffineTransformRotate(translateToCenter, M_PI_2);
        } else if(degrees == 180){
            // 顺时针旋转180°
            translateToCenter = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.width, videoAssetTrack.naturalSize.height);
            transform = CGAffineTransformRotate(translateToCenter, M_PI);
        } else if(degrees == 270){
            // 顺时针旋转270°
            translateToCenter = CGAffineTransformMakeTranslation(0.0, videoAssetTrack.naturalSize.width);
            transform = CGAffineTransformRotate(translateToCenter, M_PI_2 + M_PI);
        }else if(degrees == -180){
            // 绕x轴旋转180度
            //仿射变换的坐标为iOS的屏幕坐标x向右为正y向下为正
#if 1
            //transform = CGAffineTransformTranslate(transform, videoAssetTrack.naturalSize.width, videoAssetTrack.naturalSize.height);
            //transform = CGAffineTransformRotate(transform, 90/180.0f*M_PI); // 旋转90度
            //transform = CGAffineTransformScale(transform, 1.0, -1.0); // 上下颠倒视频
            //transform = CGAffineTransformScale(transform, -1.0, 1.0);  // 左右颠倒视频
            //transform = CGAffineTransformScale(transform, 1.0, 1.0); // 使用原始大小
            
            //原始视频
            //         ___
            //        |   |
            //        |   |
            //     -------------------- +x
            //    |
            //    |
            //    |
            //    |
            //    |
            //    |
            //    |
            //    +y
            
            //transform = CGAffineTransformScale(transform, 1.0, -1.0); // 上下颠倒视频
            
            //     -------------------- +x
            //    |   |   |
            //    |   |___|
            //    |
            //    |
            //    |
            //    |
            //    |
            //    +y
            
            //transform = CGAffineTransformTranslate(transform, 0, -videoAssetTrack.naturalSize.height);// 将视频平移到原始位置
            
            //         ___
            //        |   |
            //        |   |
            //     -------------------- +x
            //    |
            //    |
            //    |
            //    |
            //    |
            //    |
            //    |
            //    +y
            
            transform = CGAffineTransformScale(transform, 1.0, -1.0); // 上下颠倒视频
            transform = CGAffineTransformTranslate(transform, 0, -videoAssetTrack.naturalSize.height);
#else
            transform = videoAssetTrack.preferredTransform;
            transform = CGAffineTransformTranslate(transform, 0, -videoAssetTrack.naturalSize.height);
#endif
        }
    }
    
#if 0 - cropVideo
    //Here we shift the viewing square up to the TOP of the video so we only see the top
    CGAffineTransform t1 = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.height, 0 );
    
    //Use this code if you want the viewing square to be in the middle of the video
    //CGAffineTransform t1 = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.height, -(videoAssetTrack.naturalSize.width - videoAssetTrack.naturalSize.height) /2 );
    
    //Make sure the square is portrait
    transform = CGAffineTransformRotate(t1, M_PI_2);
#endif
    
    return transform;
}

@end

