//
//  VideoEditFile.m
//  VideoCompositon
//
//  Created by 郭永峰 on 2025/10/16.
//

#import "VideoEditFile.h"
#import <Photos/Photos.h>

@implementation VideoEditFile

+ (NSArray<AVMutableCompositionTrack*> *)addUrl:(NSArray<NSURL *> *)urls toComposition:(AVMutableComposition *)composition atTime:(CMTime)time {
    NSMutableArray *tracks = [NSMutableArray array];
    for (NSURL * url in urls) {
        // 1. 加载URL资源
        AVURLAsset *asset = [AVURLAsset assetWithURL:url];

        // 2. 创建视频轨道
        AVMutableCompositionTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
        
        // 3. 获取视频轨道
        AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];

        // 4. 计算时间范围
        CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
        
        // 5. 插入视频轨道
        NSError *error = nil;
        [videoTrack insertTimeRange:timeRange ofTrack:assetTrack atTime:time error:&error];
        
        if (error) {
            NSLog(@"插入轨道失败: %@", error);
        } else {
            NSLog(@"成功插入轨道");
        }
        [tracks addObject:videoTrack];
    }
    
    return tracks;
}

+ (AVMutableVideoComposition *)compositionWithTracks:(NSArray<AVMutableCompositionTrack *> *)tracks timeRange:(CMTimeRange)timeRange Opacity:(NSArray<NSNumber *>*)opacities renderSize:(CGSize)renderSize {
    // 创建图层指令
    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
    videoComposition.frameDuration = CMTimeMake(1, 30); // 30 fps
    videoComposition.renderSize = renderSize;
    
    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    instruction.timeRange = timeRange;
    
    NSMutableArray * layerInstructions = [NSMutableArray array];
    for (int i = 0; i < tracks.count; i++) {
        AVMutableCompositionTrack * track  = tracks[i];
        // 创建第一个图层
        AVMutableVideoCompositionLayerInstruction *layerInstruction1 = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:track];
        [layerInstructions addObject:layerInstruction1];
        // 设置第二个图层的透明度
        [layerInstruction1 setOpacity:[opacities[i] floatValue] atTime:kCMTimeZero];
        // 读取视频轨的原始变换
        CGAffineTransform preferredTransform = track.preferredTransform;
                
        // 计算应用了方向变换后的实际尺寸
        CGSize videoNaturalSize = track.naturalSize;
        CGSize videoActualSize = CGSizeApplyAffineTransform(videoNaturalSize, preferredTransform);

        
        // 计算第二个视频(1080x760)的缩放变换
        CGRect scaledRect = [self calculateScaledRectForVideoSize:videoActualSize inTargetSize:renderSize];
        CGAffineTransform secondFinalTransform = [self finalTransformForVideoTrack:track inRect:scaledRect];
        [layerInstruction1 setTransform:secondFinalTransform atTime:kCMTimeZero];

    }
    instruction.layerInstructions = layerInstructions;
    videoComposition.instructions = @[instruction];
    
    return videoComposition;
}

+ (void)saveVideoToPhotoLibrary:(NSURL *)videoURL
                     completion:(void (^)(BOOL success, NSError *error))completion {
    
    [[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
        [PHAssetChangeRequest creationRequestForAssetFromVideoAtFileURL:videoURL];
    } completionHandler:^(BOOL success, NSError * _Nullable error) {
        dispatch_async(dispatch_get_main_queue(), ^{
            if (completion) {
                completion(success, error);
            }
        });
    }];
}


// 计算视频在目标尺寸中的缩放矩形（保持宽高比）
+ (CGRect)calculateScaledRectForVideoSize:(CGSize)videoSize inTargetSize:(CGSize)targetSize {
    CGFloat videoAspect = videoSize.width / videoSize.height;
    CGFloat targetAspect = targetSize.width / targetSize.height;
    
    CGRect scaledRect;
    
    if (videoAspect > targetAspect) {
        // 视频更宽，以宽度为基准缩放
        CGFloat scale = targetSize.width / videoSize.width;
        CGFloat scaledHeight = videoSize.height * scale;
        scaledRect = CGRectMake(0, (targetSize.height - scaledHeight) / 2, targetSize.width, scaledHeight);
    } else {
        // 视频更高，以高度为基准缩放
        CGFloat scale = targetSize.height / videoSize.height;
        CGFloat scaledWidth = videoSize.width * scale;
        scaledRect = CGRectMake((targetSize.width - scaledWidth) / 2, 0, scaledWidth, targetSize.height);
    }
    
    return scaledRect;
}

// 计算变换矩阵
+ (CGAffineTransform)transformForVideoSize:(CGSize)videoSize inRect:(CGRect)targetRect {
    CGFloat scaleX = targetRect.size.width / videoSize.width;
    CGFloat scaleY = targetRect.size.height / videoSize.height;
    
    CGAffineTransform scaleTransform = CGAffineTransformMakeScale(scaleX, scaleY);
    CGAffineTransform translateTransform = CGAffineTransformMakeTranslation(targetRect.origin.x, targetRect.origin.y);
    
    return CGAffineTransformConcat(scaleTransform, translateTransform);
}

// 计算最终的变换矩阵（包含方向修正和缩放）
+ (CGAffineTransform)finalTransformForVideoTrack:(AVAssetTrack *)videoTrack inRect:(CGRect)targetRect {
    CGSize naturalSize = videoTrack.naturalSize;
    CGAffineTransform preferredTransform = videoTrack.preferredTransform;
    
    // 计算缩放比例
    CGFloat scaleX = targetRect.size.width / naturalSize.width;
    CGFloat scaleY = targetRect.size.height / naturalSize.height;
    
    // 创建缩放变换
    CGAffineTransform scaleTransform = CGAffineTransformMakeScale(scaleX, scaleY);
    
    // 创建平移变换
    CGAffineTransform translateTransform = CGAffineTransformMakeTranslation(targetRect.origin.x, targetRect.origin.y);
    
    // 组合变换：先应用方向变换，然后缩放，最后平移
    CGAffineTransform finalTransform = CGAffineTransformConcat(preferredTransform, scaleTransform);
    finalTransform = CGAffineTransformConcat(finalTransform, translateTransform);
    
    return finalTransform;
}

@end
