//
//  USMediaLivePhotoManager.m
//  us
//
//  Created by karry on 2025/5/17.
//

#import "USMediaLivePhotoManager.h"
#import <UniformTypeIdentifiers/UniformTypeIdentifiers.h>

@implementation USMediaLivePhotoManager

+ (void)assembleLivePhotoWithImageURL:(NSURL *)imageURL
                             videoURL:(NSURL *)videoURL
                          identifier:(NSString *)identifier
                          completion:(void (^)(PHLivePhoto *livePhoto, NSURL *pairedImageURL, NSURL *pairedVideoURL, NSError *error))completion {

    NSURL *tempDir = [NSURL fileURLWithPath:NSTemporaryDirectory() isDirectory:YES];
    NSURL *pairedImageURL = [tempDir URLByAppendingPathComponent:[NSString stringWithFormat:@"%@.jpg", identifier]];
    NSURL *pairedVideoURL = [tempDir URLByAppendingPathComponent:[NSString stringWithFormat:@"%@.mov", identifier]];

    NSError *imageError = nil;
    [self addIdentifier:identifier toImageAtURL:imageURL outputURL:pairedImageURL error:&imageError];

    if (imageError) {
        completion(nil, nil, nil, imageError);
        return;
    }

    [self testaddIdentifier:identifier toVideoAtURL:videoURL outputURL:pairedVideoURL completion:^(NSError *videoError) {
        if (videoError) {
            completion(nil, nil, nil, videoError);
            return;
        }

        [PHLivePhoto requestLivePhotoWithResourceFileURLs:@[pairedImageURL, pairedVideoURL]
                                          placeholderImage:nil
                                                targetSize:CGSizeZero
                                               contentMode:PHImageContentModeAspectFill
                                             resultHandler:^(PHLivePhoto * _Nullable livePhoto, NSDictionary * _Nonnull info) {
            BOOL isDegraded = [info[PHLivePhotoInfoIsDegradedKey] boolValue];
            if (livePhoto && !isDegraded) {
                completion(livePhoto, pairedImageURL, pairedVideoURL, nil);
            } else {
                NSError *err = [NSError errorWithDomain:@"LivePhotoAssembler"
                                                   code:999
                                               userInfo:@{NSLocalizedDescriptionKey: @"Live Photo 创建失败或降级"}];
                completion(nil, pairedImageURL, pairedVideoURL, err);
            }
        }];
    }];
}

+ (void)addIdentifier:(NSString *)identifier
        toImageAtURL:(NSURL *)imageURL
           outputURL:(NSURL *)outputURL
               error:(NSError **)error {

    CGImageSourceRef source = CGImageSourceCreateWithURL((__bridge CFURLRef)imageURL, NULL);
    if (!source) return;

    CGImageRef imageRef = CGImageSourceCreateImageAtIndex(source, 0, NULL);
    if (!imageRef) return;

    NSDictionary *imageProps = (__bridge_transfer NSDictionary *)CGImageSourceCopyPropertiesAtIndex(source, 0, NULL);
    NSMutableDictionary *mutableProps = [imageProps mutableCopy];

    NSDictionary *makerDict = @{@"17": identifier}; // Key "17" is Apple’s identifier tag
    mutableProps[(NSString *)kCGImagePropertyMakerAppleDictionary] = makerDict;

    CGImageDestinationRef dest = CGImageDestinationCreateWithURL((__bridge CFURLRef)outputURL, (__bridge CFStringRef)UTTypeJPEG.identifier, 1, NULL);
    CGImageDestinationAddImage(dest, imageRef, (__bridge CFDictionaryRef)mutableProps);
    BOOL success = CGImageDestinationFinalize(dest);

    if (!success && error) {
        *error = [NSError errorWithDomain:@"LivePhotoAssembler"
                                     code:-1001
                                 userInfo:@{NSLocalizedDescriptionKey: @"无法写入带 identifier 的图片"}];
    }

    CFRelease(dest);
    CFRelease(imageRef);
    CFRelease(source);
}

+ (void)addIdentifier:(NSString *)identifier
        toVideoAtURL:(NSURL *)inputURL
           outputURL:(NSURL *)outputURL
          completion:(void (^)(NSError *error))completion {

    AVAsset *asset = [AVAsset assetWithURL:inputURL];
    AVAssetTrack *track = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];

    if (!track) {
        completion([NSError errorWithDomain:@"LivePhotoAssembler"
                                       code:-1002
                                   userInfo:@{NSLocalizedDescriptionKey: @"找不到视频轨道"}]);
        return;
    }

    // 创建 composition 并插入视频轨道
    AVMutableComposition *composition = [AVMutableComposition composition];
    AVMutableCompositionTrack *compTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                    preferredTrackID:kCMPersistentTrackID_Invalid];

    NSError *insertError = nil;
    [compTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
                       ofTrack:track
                        atTime:kCMTimeZero
                         error:&insertError];

    if (insertError) {
        completion(insertError);
        return;
    }

    // === 关键部分：设置视频方向（preferredTransform） ===
    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
//    videoComposition.renderSize = track.naturalSize;
    CGSize naturalSize = track.naturalSize;
    CGAffineTransform transform = track.preferredTransform;
    CGSize renderSize = CGSizeApplyAffineTransform(naturalSize, transform);
    renderSize.width = fabs(renderSize.width);
    renderSize.height = fabs(renderSize.height);
    videoComposition.renderSize = renderSize;

    
    videoComposition.frameDuration = CMTimeMake(1, 30);

    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);

    AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compTrack];
//    CGAffineTransform transform = track.preferredTransform;
    [layerInstruction setTransform:transform atTime:kCMTimeZero];

    instruction.layerInstructions = @[layerInstruction];
    videoComposition.instructions = @[instruction];

    // 创建 metadata（Live Photo 的标识符）
    AVMutableMetadataItem *metadataItem = [AVMutableMetadataItem metadataItem];
    metadataItem.keySpace = AVMetadataKeySpaceQuickTimeMetadata;
    metadataItem.key = @"com.apple.quicktime.content.identifier";
    metadataItem.value = identifier;
    metadataItem.dataType = (__bridge NSString *)kCMMetadataBaseDataType_UTF8;

    // 设置导出
    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition
                                                                            presetName:AVAssetExportPresetHighestQuality];
    exportSession.outputURL = outputURL;
    exportSession.outputFileType = AVFileTypeQuickTimeMovie;
    exportSession.metadata = @[metadataItem];
    exportSession.videoComposition = videoComposition;

    // 开始导出
    [exportSession exportAsynchronouslyWithCompletionHandler:^{
        if (exportSession.status == AVAssetExportSessionStatusCompleted) {
            completion(nil);
        } else {
            completion(exportSession.error ?: [NSError errorWithDomain:@"LivePhotoAssembler"
                                                                  code:-1003
                                                              userInfo:@{NSLocalizedDescriptionKey: @"导出视频失败"}]);
        }
    }];
}

+ (void)testaddIdentifier:(NSString *)identifier
        toVideoAtURL:(NSURL *)inputURL
           outputURL:(NSURL *)outputURL
          completion:(void (^)(NSError *error))completion {
    
    AVAsset *asset = [AVAsset assetWithURL:inputURL];
    AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
    
    if (!videoTrack) {
        NSError *error = [NSError errorWithDomain:@"LivePhotoAssembler"
                                             code:-1002
                                         userInfo:@{NSLocalizedDescriptionKey: @"找不到视频轨道"}];
        completion(error);
        return;
    }
    
    // 1. 创建 Composition
    AVMutableComposition *composition = [AVMutableComposition composition];
    
    // 2. 添加视频轨道
    AVMutableCompositionTrack *videoCompTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                        preferredTrackID:kCMPersistentTrackID_Invalid];
    NSError *videoInsertError = nil;
    [videoCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
                           ofTrack:videoTrack
                            atTime:kCMTimeZero
                             error:&videoInsertError];
    
    if (videoInsertError) {
        completion(videoInsertError);
        return;
    }
    
    // 3. 添加音频轨道（如果存在）
    AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject];
    if (audioTrack) {
        AVMutableCompositionTrack *audioCompTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
                                                                            preferredTrackID:kCMPersistentTrackID_Invalid];
        [audioCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
                               ofTrack:audioTrack
                                atTime:kCMTimeZero
                                 error:nil];
    }
    
    // 4. 配置视频合成
    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
    
    // 4.1 获取原始变换和尺寸
    CGAffineTransform transform = videoTrack.preferredTransform;
    CGSize naturalSize = videoTrack.naturalSize;
    
    // 关键修复：针对不同方向视频的特殊处理
    if (transform.b == 1.0 && transform.c == -1.0) {
        // 竖屏视频(Home键在下)
        transform.tx = naturalSize.height;
        transform.ty = 0;
    } else if (transform.b == -1.0 && transform.c == 1.0) {
        // 倒置竖屏视频(Home键在上)
        transform.tx = 0;
        transform.ty = naturalSize.width;
    } else if (transform.a == -1.0 && transform.d == -1.0) {
        // 横屏(Home键在左)
        transform.tx = naturalSize.width;
        transform.ty = naturalSize.height;
    }
    
    // 计算正确的渲染尺寸
    CGSize renderSize = CGSizeZero;
    if (transform.b == 1.0 || transform.b == -1.0) {
        // 竖屏视频，交换宽高
        renderSize = CGSizeMake(naturalSize.height, naturalSize.width);
    } else {
        // 横屏视频，保持原尺寸
        renderSize = naturalSize;
    }
    
    videoComposition.renderSize = renderSize;
    
    // 4.2 设置帧率
    float frameRate = videoTrack.nominalFrameRate > 0 ? videoTrack.nominalFrameRate : 30.0;
    videoComposition.frameDuration = CMTimeMake(1, frameRate);
    
    // 4.3 配置视频指令
    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
    
    AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompTrack];
    
    // 应用调整后的transform
    [layerInstruction setTransform:transform atTime:kCMTimeZero];
    
    instruction.layerInstructions = @[layerInstruction];
    videoComposition.instructions = @[instruction];
    
    // 5. 设置 Live Photo 元数据
    AVMutableMetadataItem *contentIdentifier = [AVMutableMetadataItem metadataItem];
    contentIdentifier.keySpace = AVMetadataKeySpaceQuickTimeMetadata;
    contentIdentifier.key = @"com.apple.quicktime.content.identifier";
    contentIdentifier.value = identifier;
    contentIdentifier.dataType = (__bridge NSString *)kCMMetadataBaseDataType_UTF8;
    
    CMTime stillImageTime = CMTimeMultiplyByFloat64(asset.duration, 0.5);
    AVMutableMetadataItem *stillImageTimeItem = [AVMutableMetadataItem metadataItem];
    stillImageTimeItem.keySpace = AVMetadataKeySpaceQuickTimeMetadata;
    stillImageTimeItem.key = @"com.apple.quicktime.still-image-time";
    stillImageTimeItem.value = @(CMTimeGetSeconds(stillImageTime));
    stillImageTimeItem.dataType = (__bridge NSString *)kCMMetadataBaseDataType_SInt8;
    
    // 6. 配置导出会话
    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition
                                                                          presetName:AVAssetExportPresetHighestQuality];
    
    exportSession.outputURL = outputURL;
    exportSession.outputFileType = AVFileTypeQuickTimeMovie;
    exportSession.metadata = @[contentIdentifier, stillImageTimeItem];
    exportSession.videoComposition = videoComposition;
    exportSession.shouldOptimizeForNetworkUse = YES;
    
    // 7. 开始导出
    [exportSession exportAsynchronouslyWithCompletionHandler:^{
        switch (exportSession.status) {
            case AVAssetExportSessionStatusCompleted:
                completion(nil);
                break;
            case AVAssetExportSessionStatusFailed:
                completion(exportSession.error ?: [NSError errorWithDomain:@"LivePhotoAssembler"
                                                                      code:-1003
                                                                  userInfo:@{NSLocalizedDescriptionKey: @"导出失败"}]);
                break;
            case AVAssetExportSessionStatusCancelled:
                completion([NSError errorWithDomain:@"LivePhotoAssembler"
                                               code:-1004
                                           userInfo:@{NSLocalizedDescriptionKey: @"导出已取消"}]);
                break;
            default:
                break;
        }
    }];
}
+ (void)logTransformInfoForTrack:(AVAssetTrack *)videoTrack {
    CGAffineTransform t = videoTrack.preferredTransform;
    NSLog(@"Transform: a=%f, b=%f, c=%f, d=%f, tx=%f, ty=%f", t.a, t.b, t.c, t.d, t.tx, t.ty);
    NSLog(@"Natural Size: %@", NSStringFromCGSize(videoTrack.naturalSize));
    
    CGRect transformedRect = CGRectApplyAffineTransform(CGRectMake(0, 0, videoTrack.naturalSize.width, videoTrack.naturalSize.height), t);
    NSLog(@"Transformed Rect: %@", NSStringFromCGRect(transformedRect));
}
// 1. 创建 Content Identifier 元数据项
- (AVMetadataItem *)metadataItemForIdentifier:(NSString *)identifier {
    AVMutableMetadataItem *item = [[AVMutableMetadataItem alloc] init];
    item.keySpace = AVMetadataKeySpaceQuickTimeMetadata; // @"mdta"
    item.dataType = (__bridge NSString *)kCMMetadataBaseDataType_UTF8;
    item.key = AVMetadataQuickTimeMetadataKeyContentIdentifier; // @"com.apple.quicktime.content.identifier"
    item.value = identifier;
    return [item copy]; // 返回不可变版本
}

// 2. 创建静态图像时间元数据适配器
- (AVAssetWriterInputMetadataAdaptor *)stillImageTimeMetadataAdaptor {
    NSString *quickTimeMetadataKeySpace = AVMetadataKeySpaceQuickTimeMetadata; // @"mdta"
    NSString *stillImageTimeKey = @"com.apple.quicktime.still-image-time";
    
    // 创建元数据格式描述
    NSDictionary *spec = @{
        (__bridge NSString *)kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier :
            [NSString stringWithFormat:@"%@/%@", quickTimeMetadataKeySpace, stillImageTimeKey],
        (__bridge NSString *)kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType :
            (__bridge NSString *)kCMMetadataBaseDataType_SInt8
    };
    
    CMFormatDescriptionRef desc = NULL;
    CMMetadataFormatDescriptionCreateWithMetadataSpecifications(
        kCFAllocatorDefault,
        kCMMetadataFormatType_Boxed,
        (__bridge CFArrayRef)@[spec],
        &desc);
    
    // 创建 Writer Input
    AVAssetWriterInput *input = [[AVAssetWriterInput alloc]
        initWithMediaType:AVMediaTypeMetadata
          outputSettings:nil
        sourceFormatHint:desc];
    
    if (desc) CFRelease(desc);
    
    return [AVAssetWriterInputMetadataAdaptor
            assetWriterInputMetadataAdaptorWithAssetWriterInput:input];
}

// 3. 静态图像时间元数据项（Swift中的stillImageTimeMetadataItem）
- (AVMetadataItem *)stillImageTimeMetadataItem {
    AVMutableMetadataItem *item = [[AVMutableMetadataItem alloc] init];
    item.keySpace = AVMetadataKeySpaceQuickTimeMetadata;
    item.key = @"com.apple.quicktime.still-image-time";
    item.dataType = (__bridge NSString *)kCMMetadataBaseDataType_SInt8;
    item.value = @0; // 实际值会在append时通过timedMetadataGroup设置
    return [item copy];
}

// 4. 计算静态图像时间范围（Swift中的makeStillImageTimeRange）
- (CMTimeRange)makeStillImageTimeRangeForAsset:(AVAsset *)asset
                                      percent:(float)percent
                                   frameCount:(int)frameCount {
    CMTime duration = asset.duration;
    CMTimeValue frameValue = (CMTimeValue)(percent * frameCount);
    CMTime stillTime = CMTimeMake(frameValue, duration.timescale);
    return CMTimeRangeMake(stillTime, CMTimeMake(1, duration.timescale));
}
@end
