/*
 错误error code
 11841
 https://blog.csdn.net/jeffasd/article/details/80251204
 
 
 */

#import "XSAVTools.h"

@implementation XSAVTools

+ (instancetype)shareInstance{
    static XSAVTools *tool = nil;
    static dispatch_once_t onceToken;
    dispatch_once(&onceToken, ^{
        tool = [[[self class]alloc]init];
    });
    return tool;
}

//计算video大小
+ (CGFloat)calculateVideoSizeWithFilename:(NSString *)filename {
    NSFileManager *fileManager = [NSFileManager defaultManager];
    NSError * error;
    NSDictionary *fileAttr = [fileManager attributesOfItemAtPath:filename error:&error];
    long fileSize = [[fileAttr objectForKey:NSFileSize] longValue];
    NSString *bytes = [NSByteCountFormatter stringFromByteCount:fileSize countStyle:NSByteCountFormatterCountStyleFile];
    CGFloat fileMB = fileSize / 1024.0 / 1024.0;
    NSLog(@"fileMB = %lf   bytes=%@", fileMB, bytes);
    
    return fileMB;
}
 
#pragma mark -生成缩略图
//一张
+ (UIImage *)getThumbnailImageFromFilePath:(NSString *)videoPath time:(NSTimeInterval )videoTime isLocal:(BOOL)isLocal{
    if (!videoPath) {
        return nil;
    }
    NSURL *url = nil;
    if (isLocal) {
        url = [NSURL fileURLWithPath:videoPath];
    }else{
        url = [NSURL URLWithString:videoPath];
    }
    AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:url options:nil];
    AVAssetImageGenerator *assetImageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
    assetImageGenerator.appliesPreferredTrackTransform = YES;
    assetImageGenerator.apertureMode = AVAssetImageGeneratorApertureModeEncodedPixels;
    
    CGImageRef thumbnailImageRef = [assetImageGenerator copyCGImageAtTime:CMTimeMake(videoTime, 600)
                                                    actualTime:NULL error:nil];
    if (!thumbnailImageRef) {
        return nil;
    }
    UIImage *thumbnailImage = [[UIImage alloc] initWithCGImage:thumbnailImageRef];
    CFRelease(thumbnailImageRef);
    return thumbnailImage;
}

//多张
+ (void)generateThumbnailsWithURL:(NSURL *)url orAsset:(AVAsset *)asset completionHandler:(void(^)(CMTime requestedTime, CGImageRef  _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error, NSInteger imageCount))completionHandler{
     
    AVAsset *videoAsset = nil;
    if (url) {
        videoAsset = [[AVURLAsset alloc]initWithURL:url options:nil];
    }else if (asset){
        videoAsset = asset;
    }
    
    //视频总长度(秒) = 总帧数 / 每秒的帧数
    long totalTime = videoAsset.duration.value/videoAsset.duration.timescale;
    
    AVAssetImageGenerator *generator = [[AVAssetImageGenerator alloc]initWithAsset:videoAsset];
    generator.appliesPreferredTrackTransform = YES;
    
    NSMutableArray *framesArray = [NSMutableArray array];
    NSInteger frameCount = totalTime * 4?:1;
    NSInteger framePsec = videoAsset.duration.value/frameCount;
    for (int i = 0; i<frameCount; i++) {
        CMTime time = CMTimeMake(i * framePsec , videoAsset.duration.timescale);
//        CMTime time = CMTimeMake(i/4, 1);
        [framesArray addObject:[NSValue valueWithCMTime:time]];
    }
    [generator generateCGImagesAsynchronouslyForTimes:framesArray completionHandler:^(CMTime requestedTime, CGImageRef  _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) {
        completionHandler(requestedTime,image,actualTime,result,error, frameCount);
    }];
    
}


#pragma mark - 合成(音频 + 水印)
+ (void)systhesizeVideoWithOriginURL:(NSURL *)originURL orAsset:(AVAsset *)asset addCustomAudio:(NSURL *)addCustomAudioURL waterImage:(UIImage *)waterImage exportPath:(NSString *)exportPath completionHandler:(void(^)(int status,NSString *errorMsg, NSURL*exportURL))completionHandler{
    AVAsset *videoAsset = nil;
    if (originURL) {
        videoAsset = [[AVURLAsset alloc]initWithURL:originURL options:nil];
    }else if (asset){
        videoAsset = asset;
    }
     
//    NSDictionary *opts = [NSDictionary dictionaryWithObject:@(YES) forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
//    AVURLAsset *videoAsset = [AVURLAsset URLAssetWithURL:originURL options:opts];
    AVAssetTrack *originVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo]firstObject];
    AVAssetTrack *originAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio]firstObject];
    
    
    AVMutableComposition *mixCompostion = [[AVMutableComposition alloc]init];
    //1.视频通道
    AVMutableCompositionTrack *compositionVideoTrack = [mixCompostion addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
//    CMTime startTime = CMTimeMakeWithSeconds(0.2, 600);
//    CMTime endTime = CMTimeMakeWithSeconds(videoAsset.duration.value/videoAsset.duration.timescale - 0.2, videoAsset.duration.timescale);
//    CMTimeRange range = CMTimeRangeMake(startTime, endTime);
    CMTimeRange range = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
    
    [compositionVideoTrack insertTimeRange:range ofTrack:originVideoTrack atTime:kCMTimeZero error:nil];
    
    
    //2.音频通道
    AVMutableCompositionTrack *compositionAudioTrack = [mixCompostion addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    if (addCustomAudioURL) {
        AVAsset *assAasset = [AVAsset assetWithURL:addCustomAudioURL];
        AVAssetTrack *addAudioTrack = [[assAasset tracksWithMediaType:AVMediaTypeAudio]firstObject];
        [compositionAudioTrack insertTimeRange:range ofTrack:addAudioTrack atTime:kCMTimeZero error:nil];
        
    }else{
        [compositionAudioTrack insertTimeRange:range ofTrack:originAudioTrack atTime:kCMTimeZero error:nil];
    }
    
    
    //3 VideoComposition 相关
    //3.1 AVMutableVideoCompositionInstruction 视频轨道中的一个视频，可以缩放、旋转等
    AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, compositionVideoTrack.timeRange.duration);
    
    //3.2 AVMutableVideoCompositionLayerInstruction 一个视频轨道，包含了这个轨道上的所有视频素材
    AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
    AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo]firstObject];
    [layerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
//    [layerInstruction setOpacity:0.0 atTime:endTime];
    //add instructions
    mainInstruction.layerInstructions = [NSArray arrayWithObjects:layerInstruction, nil];
    
    //3.3 AVMutableVideoComposition 管理所有视频轨道,可以决定最终视频的尺寸, 裁剪, 水印
    AVMutableVideoComposition *videoCompositon = [AVMutableVideoComposition videoComposition];
    //方向
    CGSize naturalSize;
    if ([self isPortrait:videoAssetTrack]) {
        naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
    }else{
        naturalSize = videoAssetTrack.naturalSize;
    }
    videoCompositon.renderSize = naturalSize;
    videoCompositon.instructions = [NSArray arrayWithObjects:mainInstruction, nil];
    videoCompositon.frameDuration = CMTimeMake(1, 25);
    
    
    
    
    //4.加水印
    if (waterImage) {
        [self applyVideoEffectsComposition:videoCompositon withWaterImage:waterImage size:naturalSize];
    }
    
    
    //5..输出
    if (exportPath == nil) {
        exportPath = [XSFileManager tempVideoPath];
    }
    [self exportVideoWithComposition:mixCompostion videoCompostion:videoCompositon exportURL:[NSURL fileURLWithPath:exportPath] complete:completionHandler];
    
}

+ (AVMutableVideoComposition *)getVideoCompostion{

    
    return nil ;
    
}

/**
 水印:
 可完全自定制, 主要操作的是layer
 一般waterImage 水印图片大小 = size
 
 原理: http:www.cocoachina.com/articles/10542#header
  */
+ (void)applyVideoEffectsComposition:(AVMutableVideoComposition *)videoComposition withWaterImage:(UIImage *)waterImage size:(CGSize)size{
    //水印1,可以有多个
    CALayer *imageLayer = [CALayer layer];
    imageLayer.contents = (id)waterImage.CGImage;
    imageLayer.frame = CGRectMake(0, 0, size.width, size.height);

    
    //overlay
    CALayer *overlayLayer = [CALayer layer];
    overlayLayer.frame = CGRectMake(0, 0, size.width, size.height);
    overlayLayer.masksToBounds = YES;
    [overlayLayer addSublayer:imageLayer];

    //也可以加动画
//    CABasicAnimation *anima = [CABasicAnimation animationWithKeyPath:@"opacity"];
//    anima.fromValue = [NSNumber numberWithFloat:1.0f];
//    anima.toValue = [NSNumber numberWithFloat:0.0f];
//    anima.repeatCount = 0;
//    anima.duration = 5.0f;  //5s之后消失
//    [anima setRemovedOnCompletion:NO];
//    [anima setFillMode:kCAFillModeForwards];
//    anima.beginTime = AVCoreAnimationBeginTimeAtZero;
//    [coverImgLayer addAnimation:anima forKey:@"opacityAniamtion"];

    CALayer *videoLayer = [CALayer layer];
    videoLayer.frame = CGRectMake(0, 0, size.width, size.height);

    CALayer *parentLayer = [CALayer layer];
    parentLayer.frame = CGRectMake(0, 0, size.width, size.height);
    parentLayer.backgroundColor = [UIColor whiteColor].CGColor;
    [parentLayer addSublayer:videoLayer]; //videoLayer 负责显示视频
    [parentLayer addSublayer:overlayLayer];//overlayLayer 放到 videoLayer 之上

    videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
    
}


#pragma mark - merge video
+ (AVMutableComposition *)mergeVideos:(NSArray<XSPhotoModel *>*)array complete:(void(^)(int status,NSString *errorMsg,NSURL * mergeFinishVideoURL))complete{
    
    AVMutableComposition *mixCompostion = [[AVMutableComposition alloc]init];
    AVMutableCompositionTrack *compositionVideoTrack = [mixCompostion addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    AVMutableCompositionTrack *compositionAudioTrack = [mixCompostion addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    [self mergeTrackWithModels:array compositionVideoTrack:compositionVideoTrack compositionAudioTrack:compositionAudioTrack];
    
//    AVAsset *videoAsset = [AVAsset assetWithURL:[array firstObject].videoURL];
    AVAsset *videoAsset = [array firstObject].avasset;

    //3 VideoComposition 相关
    //3.1 AVMutableVideoCompositionLayerInstruction 一个视频轨道，包含了这个轨道上的所有视频素材
    AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
    AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo]firstObject];
    [layerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];// transfrom 旋转等操作
    //    [layerInstruction setOpacity:0.0 atTime:endTime];
    
    //3.2创建视频指令
    //AVMutableVideoCompositionInstruction 视频轨道中的一个视频，可以缩放、旋转等
    AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, compositionVideoTrack.timeRange.duration);
    //add instructions
    mainInstruction.layerInstructions = [NSArray arrayWithObjects:layerInstruction, nil];
    
    //3.3 AVMutableVideoComposition 管理所有视频轨道,可以决定最终视频的尺寸, 裁剪, 水印
    AVMutableVideoComposition *videoCompositon = [AVMutableVideoComposition videoComposition];
    //方向
    CGSize naturalSize;
    if ([self isPortrait:videoAssetTrack]) {
        naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
    }else{
        naturalSize = videoAssetTrack.naturalSize;
    }
    videoCompositon.renderSize = naturalSize;
    videoCompositon.instructions = [NSArray arrayWithObjects:mainInstruction, nil];
    videoCompositon.frameDuration = CMTimeMake(1, 25);
  
    [self exportVideoWithComposition:mixCompostion videoCompostion:videoCompositon exportURL:[XSFileManager tempVideoURL] complete:complete];
    
    return mixCompostion;
}

+ (void)mergeTrackWithModels:(NSArray<XSPhotoModel *>*)array
       compositionVideoTrack:(AVMutableCompositionTrack *)compositionVideoTrack
       compositionAudioTrack:(AVMutableCompositionTrack *)compositionAudioTrack{
    __block CMTime location = kCMTimeZero;
    for (XSPhotoModel *model in array) {
//        AVAsset *asset = [AVAsset assetWithURL:model.videoURL];
        AVAsset *asset = model.avasset;
        AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo]firstObject];
        AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio]firstObject];
        CMTimeRange range = CMTimeRangeMake(kCMTimeZero, asset.duration);
        if (compositionVideoTrack) {
            [compositionVideoTrack insertTimeRange:range ofTrack:videoTrack atTime:location error:nil];
        }
        if (compositionAudioTrack) {
            [compositionAudioTrack insertTimeRange:range ofTrack:audioTrack atTime:location error:nil];
        }
        
        location = CMTimeAdd(location, asset.duration);
     }
    
 }

+ (void)exportVideoWithComposition:(AVMutableComposition *)mixCompostion
                   videoCompostion:(AVMutableVideoComposition *)videoComposition
                         exportURL:(NSURL *)exportURL
                          complete:(void(^)(int status,NSString *errorMsg,NSURL * mergeFinishVideoURL))completionHandler{
    
    AVAssetExportSession *exportSession =[[AVAssetExportSession alloc]initWithAsset:mixCompostion presetName:AVAssetExportPresetHighestQuality];
    exportSession.outputFileType = AVFileTypeQuickTimeMovie;//是.mov格式  mpeg4是.mp4
    exportSession.outputURL = exportURL;
    exportSession.shouldOptimizeForNetworkUse = YES;
    if (videoComposition) {
        exportSession.videoComposition = videoComposition;//注意:当presetName:AVAssetExportPresetPassthrough, 该属性会被ignored
    }
 
    MBProgressHUD *hud =[MBProgressHUD showHUDAddedTo:KeyWindow animated:YES];
    hud.label.text = @"生成中...";
    [exportSession exportAsynchronouslyWithCompletionHandler:^{
        dispatch_async(dispatch_get_main_queue(), ^{
            if (exportSession.status == AVAssetExportSessionStatusCompleted) {
                hud.label.text = @"生成好了";
                dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.3 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
                    [hud hideAnimated:YES];
                    [XSPhotosManager saveVideoToAblum:exportURL completion:^(BOOL success, PHAsset *asset) {
                        completionHandler(1,nil,exportURL);
                    }];
                });
            }else{
                NSLog(@"%@ - path:%@",[NSString stringWithFormat:@"export video fail %@",exportSession.error],exportURL);
                hud.label.text = @"生成失败";
                dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.5 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
                    [hud hideAnimated:YES];
                    if (completionHandler) completionHandler(0,[NSString stringWithFormat:@"export video fail %@",exportSession.error],nil);
                });
            }
            
        });
        
    }];
    
    
    
}



/**
 视频方向
 
 */
+ (UIImageOrientation)getVideoOrientationFromAsset:(AVAsset *)asset
{
    AVAssetTrack * videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    CGSize size = [videoTrack naturalSize];
    CGAffineTransform txf = [videoTrack preferredTransform];
    
    if(size.width == txf.tx&& size.height == txf.ty)
        return UIImageOrientationLeft; //返回UIInterfaceOrientationLandscapeLeft;
    else if(txf.tx == 0&& txf.ty == 0)
        return UIImageOrientationRight; //返回UIInterfaceOrientationLandscapeRight;
    else if (txf.tx == 0&& txf.ty == size.width)
        return UIImageOrientationDown; //返回UIInterfaceOrientationPortraitUpsideDown;
    else
        return UIImageOrientationUp; //返回UIInterfaceOrientationPortrait;
}
+ (void)handleLayerInstruction:(AVMutableVideoCompositionLayerInstruction *)layerInstruction Asset:(AVAsset *)asset{
    UIImageOrientation videoOrientation = [self getVideoOrientationFromAsset:asset];
    
    CGAffineTransform t1 = CGAffineTransformIdentity;
    CGAffineTransform t2 = CGAffineTransformIdentity;
    
    switch (videoOrientation) {
        case UIImageOrientationUp:
//            t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height - cropOffX, 0 - cropOffY );
            t2 = CGAffineTransformRotate(t1, M_PI_2 );
            break;
        case UIImageOrientationDown:
//            t1 = CGAffineTransformMakeTranslation(0 - cropOffX, clipVideoTrack.naturalSize.width - cropOffY ); // not fixed width is the real height in upside down
            t2 = CGAffineTransformRotate(t1, - M_PI_2 );
            break;
        case UIImageOrientationRight:
//            t1 = CGAffineTransformMakeTranslation(0 - cropOffX, 0 - cropOffY );
            t2 = CGAffineTransformRotate(t1, 0 );
            break;
        case UIImageOrientationLeft:
//            t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.width - cropOffX, clipVideoTrack.naturalSize.height - cropOffY );
            t2 = CGAffineTransformRotate(t1, M_PI  );
            break;
        default:
            NSLog(@"no supported orientation has been found in this video");
            break;
    }
    
    CGAffineTransform finalTransform = t2;
    [layerInstruction setTransform:finalTransform atTime:kCMTimeZero];
    
}




+ (BOOL)isPortrait:(AVAssetTrack *)videoAssetTrack{
    //https://www.jianshu.com/p/c40e4bbb3ae2
    
    BOOL isVideoAssetPortrait_  = NO;
    CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
    if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {//bc代表旋转了 ad拉伸 xy平移
        isVideoAssetPortrait_ = YES;
    }
    if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
        isVideoAssetPortrait_ = YES;
    }
    return isVideoAssetPortrait_;
}




@end


