//
//  PKEditorController.m
//  PKVideoOveray
//
//  Created by Snow Joker on 2023/4/10.
//

#import "PKEditorController.h"
#import "CustomVideoCompositionInstruction.h"
#import "CustomVideoCompositing.h"

@interface PKEditorController ()
@property(nonatomic,strong,readwrite)NSString *outputPath;
@property(nonatomic,strong,readwrite)AVMutableComposition *composition;
@property(nonatomic,strong,readwrite)AVMutableVideoComposition *videoComposition;

@property(nonatomic,strong,readwrite)AVMutableAudioMix *audioMax;
@end


@implementation PKEditorController


-(void)buildCompositionObjectsForPlayback:(BOOL)forPlayback
{
    if(_clips.count == 0){
        self.composition = nil;
        self.videoComposition = nil;
        return;
    }
    CGSize videoSize = [[_clips objectAtIndex:0]naturalSize];
    AVMutableComposition *composition = [AVMutableComposition composition];
    composition.naturalSize = videoSize;
    self.composition = [self buildComposition:composition];
    
    AVMutableVideoComposition *videoComposition  = [AVMutableVideoComposition videoCompositionWithPropertiesOfAsset:self.composition];
    videoComposition.customVideoCompositorClass = [CustomVideoCompositing class];
    [self buildVideoComposition:videoComposition];
    videoComposition.frameDuration = CMTimeMake(1, 30);
    videoComposition.renderSize = CGSizeMake(360, 304);
    videoComposition.renderScale = 1.0;
    self.videoComposition = videoComposition;
    
    
}
#pragma mark --裁剪视频时间段及视频画面
-(AVMutableVideoComposition *)cropVideoByVideoTrack:(AVURLAsset *)asset withTimeStart:(float)start withTimeDuration:(float)duration withCropFrame:(CGSize)cropSize withXY:(CGPoint)point
{
    AVAssetTrack *videoTrack = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
    CMTimeScale timeScale = asset.duration.timescale;
    CMTime frameDuration = CMTimeMakeWithSeconds(1.0/videoTrack.nominalFrameRate, timeScale);
    CMTime zeroTime = CMTimeMake(0, 600);
    CMTimeRange timeRange = CMTimeRangeMake(CMTimeMake(0, 600), CMTimeMake(3000, 600));
    CMTimeRange videoTimeRange = CMTimeRangeMake(CMTimeMake(start == 0?0:start, 600), CMTimeMake(duration == 0?0:duration, 600));
    AVMutableComposition *composition = [AVMutableComposition composition];
    //插入音频轨道
    AVAssetTrack *audioTrack = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
    AVMutableAudioMix *audioMax;
    if(audioTrack){
        AVMutableCompositionTrack *audioCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
        [audioCompositionTrack insertTimeRange:videoTimeRange ofTrack:audioTrack atTime:zeroTime error:nil];
        
        AVMutableAudioMixInputParameters *inputParameter = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:audioCompositionTrack];
        [inputParameter setVolume:1 atTime:zeroTime];
        
        audioMax  = [AVMutableAudioMix audioMix];
        audioMax.inputParameters = @[inputParameter];
        self.audioMax = audioMax;
    }
    //插入视频轨道
    AVMutableCompositionTrack *videoCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    [videoCompositionTrack insertTimeRange:videoTimeRange ofTrack:videoTrack atTime:zeroTime error:nil];
    
    CGSize videoSize = videoTrack.naturalSize;
    CGRect cropFrame = CGRectMake(0, 0, cropSize.width == 0?720:cropSize.width, cropSize.height == 0?1280:cropSize.height);
    CGSize renderSize = cropFrame.size;
    renderSize.width = floor(renderSize.width/16.0)*16.0;
    renderSize.height = floor(renderSize.height/16.0)*16.0;
    
    AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack];
    //确定形变矩阵
    CGAffineTransform transform;
    transform = CGAffineTransformMake(1, 0, 0, 1, point.x == 0?0:point.x, -point.y == 0 ?0:-point.y);
    [layerInstruction setTransform:transform atTime:zeroTime];
    
    AVMutableVideoCompositionInstruction *compositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    compositionInstruction.timeRange = timeRange;
    compositionInstruction.layerInstructions = @[layerInstruction];
    
    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
    videoComposition.instructions = @[compositionInstruction];
    videoComposition.frameDuration =frameDuration;
    videoComposition.renderScale = 1.0;
    videoComposition.renderSize = renderSize;
    
    
    self.composition = composition;
    self.videoComposition = videoComposition;
    return videoComposition;
}


-(AVMutableComposition *)buildComposition:(AVMutableComposition *)composition
{
    AVMutableCompositionTrack *compositionVideoTracks[2];
    AVMutableCompositionTrack *compositionAudioTracks[2];
    compositionVideoTracks[0] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    compositionVideoTracks[1] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    compositionAudioTracks[0] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//    compositionAudioTracks[1] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    CMTime nextClipStartTime = kCMTimeZero;
    NSInteger i;
    NSUInteger clipsCount = [_clips count];
    // Add two video tracks and two audio tracks.

    for (i = 0; i < clipsCount; i++ ) {
        NSInteger alternatingIndex = i % 2; // alternating targets: 0, 1, 0, 1, ...
        AVURLAsset *asset = [_clips objectAtIndex:i];
        NSValue *clipTimeRange = [_clipsTimeRanges objectAtIndex:i];
        CMTimeRange timeRangeInAsset;
        if (clipTimeRange)
            timeRangeInAsset = [clipTimeRange CMTimeRangeValue];
        else
            timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [asset duration]);
        
//        AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
//        [compositionVideoTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
        if(i == 0){
            AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
            [compositionVideoTracks[0] insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
        }else if (i == 1){
            AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
            [compositionVideoTracks[1] insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
        }else if (i==2){
            AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
            [compositionVideoTracks[1] insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:CMTimeMake(8, 1) error:nil];
        }
        
        

        AVAssetTrack *clipAudioTrack;
        NSArray *array = [asset tracksWithMediaType:AVMediaTypeAudio];
        if (i == 0) {
            clipAudioTrack = [array objectAtIndex:0];
            [compositionAudioTracks[0] insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
        }
        
    }
    
    
    return composition;
}


-(AVAssetExportSession *)assetExportSessionWithPreset:(NSString *)presetName
{
    AVAssetExportSession *session = [[AVAssetExportSession alloc]initWithAsset:self.composition presetName:presetName];
    session.videoComposition = self.videoComposition;
    session.outputFileType = AVFileTypeMPEG4;
    session.outputURL =  [NSURL fileURLWithPath:self.outputPath];
    if(self.audioMax){
        session.audioMix = self.audioMax;
    }
    return session;
}

-(NSString *)exportURL
{
    NSString *fileName = [NSString stringWithFormat:@"%f.mp4", [[NSDate date] timeIntervalSince1970] * 1000];
    NSString *string= [NSTemporaryDirectory() stringByAppendingPathComponent:fileName];
    return string;
}

-(NSString *)outputPath
{
    return ({
        if(!_outputPath){
            _outputPath = [self exportURL];
        }
        _outputPath;
    });
}


- (AVMutableVideoComposition *)buildVideoComposition:(AVMutableVideoComposition *)videoComposition
{
    NSMutableArray *instructions = [NSMutableArray array];
    for (AVVideoCompositionInstruction *instruction in videoComposition.instructions) {
        NSArray *layerInstructions = instruction.layerInstructions;
        // TrackIDs
        NSMutableArray *trackIDs = [NSMutableArray array];
        for (AVVideoCompositionLayerInstruction *layerInstruction in layerInstructions) {
            [trackIDs addObject:@(layerInstruction.trackID)];
        }
        CustomVideoCompositionInstruction *newInstruction = [[CustomVideoCompositionInstruction alloc] initWithSourceTrackIDs:trackIDs timeRange:instruction.timeRange];
        newInstruction.layerInstructions = instruction.layerInstructions;
        [instructions addObject:newInstruction];
    }
    

    
    
    
//    self.composition = composition;
    videoComposition.instructions = instructions;
    return  videoComposition;
}


-(AVPlayerItem *)playerItem
{
    AVPlayerItem *playerItem = [AVPlayerItem playerItemWithAsset:self.composition];
    playerItem.videoComposition = self.videoComposition;
    return playerItem;
}



@end
