//
//  WQAddWatermarkCommand.m
//  VideoDemo
//
//  Created by C on 2019/8/20.
//  Copyright © 2019 c. All rights reserved.
//

#import "WQAddWatermarkCommand.h"

@implementation WQAddWatermarkCommand

- (void)performWithAsset:(AVAsset*)asset
{
    AVAssetTrack *assetVideoTrack = nil;
    AVAssetTrack *assetAudioTrack = nil;
    // Check if the asset contains video and audio tracks
    if ([[asset tracksWithMediaType:AVMediaTypeVideo] count] != 0) {
        assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo][0];
    }
    if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] != 0) {
        assetAudioTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
    }
    
    CMTime insertionPoint = kCMTimeZero;
    NSError *error = nil;
    
    
    // Step 1
    // Create a composition with the given asset and insert audio and video tracks into it from the asset
    if(!self.mutableComposition) {
        
        // Check if a composition already exists, else create a composition using the input asset
        self.mutableComposition = [AVMutableComposition composition];
        
        // Insert the video and audio tracks from AVAsset
        if (assetVideoTrack != nil) {
            AVMutableCompositionTrack *compositionVideoTrack = [self.mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
            [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetVideoTrack atTime:insertionPoint error:&error];
        }
        if (assetAudioTrack != nil) {
            AVMutableCompositionTrack *compositionAudioTrack = [self.mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
            [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetAudioTrack atTime:insertionPoint error:&error];
        }
        
    }
    
    
    // Step 2
    // Create a water mark layer of the same size as that of a video frame from the asset
    if ([[self.mutableComposition tracksWithMediaType:AVMediaTypeVideo] count] != 0) {
        
        CGSize naturalSize = assetVideoTrack.naturalSize;
        if(!self.mutableVideoComposition) {
            
            // build a pass through video composition
            self.mutableVideoComposition = [AVMutableVideoComposition videoComposition];
            self.mutableVideoComposition.frameDuration = CMTimeMake(1, 30); // 30 fps
            
            CGAffineTransform transform = assetVideoTrack.preferredTransform;
            if (transform.ty == -560) {
                transform.ty = 0;
            }
            CGFloat videoAngleInDegree  = atan2(transform.b, transform.a) * 180 / M_PI;
            if (videoAngleInDegree == 90 || videoAngleInDegree == -90) {
                CGFloat width = naturalSize.width;
                naturalSize.width = naturalSize.height;
                naturalSize.height = width;
            }
            self.mutableVideoComposition.renderSize  = naturalSize;
            
            CGFloat screenWidth = [UIScreen mainScreen].bounds.size.width;
            CGFloat screenHeight = [UIScreen mainScreen].bounds.size.height;
            CGSize targetSize = CGSizeMake(screenWidth, screenHeight);
            // center inside
            {
                float ratio;
                float xratio = targetSize.width / naturalSize.width;
                float yratio = targetSize.height / naturalSize.height;
                ratio = MIN(xratio, yratio);
                
                float postWidth = naturalSize.width * ratio;
                float postHeight = naturalSize.height * ratio;
                float transx = (targetSize.width - postWidth) / 2;
                float transy = (targetSize.height - postHeight) / 2;
                
                CGAffineTransform matrix = CGAffineTransformMakeTranslation(transx / xratio, transy / yratio);
                matrix = CGAffineTransformScale(matrix, ratio / xratio, ratio / yratio);
                transform = CGAffineTransformConcat(transform, matrix);
            }
            
            AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
            passThroughInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, [self.mutableComposition duration]);
            
            AVAssetTrack *videoTrack = [self.mutableComposition tracksWithMediaType:AVMediaTypeVideo][0];
            AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
            
            [passThroughLayer setTransform:transform atTime:kCMTimeZero];
            
            passThroughInstruction.layerInstructions = @[passThroughLayer];
            self.mutableVideoComposition.instructions = @[passThroughInstruction];
            
        }
        
    }
    
    
    // Step 3
    // Notify AVSEViewController about add watermark operation completion
    [[NSNotificationCenter defaultCenter] postNotificationName:AVSEEditCommandCompletionNotification object:self];
}


@end
