//
//  VideoCore.m
//  VideoDiary
//
//  Created by qianfeng on 15/9/29.
//  Copyright © 2015年 HYZ. All rights reserved.
//

#import "VideoCore.h"
#import "Masonry.h"
#import "AAPLPreviewView.h"

@interface VideoCore ()  <AVCaptureFileOutputRecordingDelegate>

@property (nonatomic) dispatch_queue_t sessionQueue;
@property (nonatomic) AVCaptureSession *session;
@property (nonatomic) AVCaptureDeviceInput *videoDeviceInput;
@property (nonatomic) AVCaptureMovieFileOutput *movieFileOutput;
@property (nonatomic) UIBackgroundTaskIdentifier backgroundRecordingID;

@property (nonatomic, weak) AAPLPreviewView * previewView;

@end


@implementation VideoCore

- (id) initWithDelegate: (id<VideoCoreDelegate>)delegate
{
    self = [super init];
    if (self) {
        self.delegate = delegate;
#ifndef DEBUG
        self.videoTempPath = [NSTemporaryDirectory() stringByAppendingPathComponent:@"record_tmp.mov"];
#else
        NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
        self.videoTempPath = [paths[0] stringByAppendingPathComponent:@"record_tmp.mov"];
#endif
    }
    
    return self;
}

- (BOOL) setCameraAtView: (UIView *)view
{
    AVCaptureSession *session = [[AVCaptureSession alloc] init];
    [session beginConfiguration];
    session.sessionPreset = AVCaptureSessionPreset640x480;
    [session commitConfiguration];
    
    self.session = session;
    
    AAPLPreviewView * pView = [[AAPLPreviewView alloc] initWithFrame:CGRectZero];
    
    [view addSubview:pView];
    [pView mas_makeConstraints:^(MASConstraintMaker *make) {
        make.left.equalTo(view.mas_left);
        make.top.equalTo(view.mas_top).offset(-80);
        make.right.equalTo(view.mas_right);
        make.bottom.equalTo(view.mas_bottom).offset(80);
    }];
    
    self.previewView = pView;
    
    pView.session = session;
    
    dispatch_queue_t sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
    [self setSessionQueue:sessionQueue];
    
    dispatch_async(sessionQueue, ^{
        [self setBackgroundRecordingID:UIBackgroundTaskInvalid];
        
        NSError *error = nil;
        
        AVCaptureDevice *videoDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];
        
        AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
        
        if (error) {
            NSLog(@"%@", error);
            dispatch_async(dispatch_get_main_queue(), ^{
                [self.delegate videoCoreCameraFailed:self];
            });
        }
        
        if ([session canAddInput:videoDeviceInput]) {
            [session addInput:videoDeviceInput];
            [self setVideoDeviceInput:videoDeviceInput];
        }
        
        AVCaptureDevice *audioDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
        AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
        
        if (error) {
            NSLog(@"%@", error);
            dispatch_async(dispatch_get_main_queue(), ^{
                [self.delegate videoCoreCameraFailed:self];
            });
        }
        
        if ([session canAddInput:audioDeviceInput]) {
            [session addInput:audioDeviceInput];
        }
        
        AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
        if ([session canAddOutput:movieFileOutput]) {
            [session addOutput:movieFileOutput];
            [self setMovieFileOutput:movieFileOutput];
        }
    });
    
    return YES;
}

- (void) startCamera
{
    dispatch_async([self sessionQueue], ^{
        [[self session] startRunning];
    });
    
}

- (void) stopCamera
{
    dispatch_async([self sessionQueue], ^{
        [[self session] stopRunning];
    });
}

- (BOOL) isRecording
{
    return [[self movieFileOutput] isRecording];
}


- (BOOL) startRecordToPath: (NSString *)path
{
    if ([[NSFileManager defaultManager] fileExistsAtPath:path]) {
        [[NSFileManager defaultManager] removeItemAtPath:path error:nil];
    }
    if ([[NSFileManager defaultManager] fileExistsAtPath:self.videoTempPath]) {
        [[NSFileManager defaultManager] removeItemAtPath:self.videoTempPath error:nil];
    }
    
    self.videoPath = path;
    
    dispatch_async([self sessionQueue], ^{
        if (![self isRecording]) {
            if ([[UIDevice currentDevice] isMultitaskingSupported]) {
                // Setup background task. This is needed because the captureOutput:didFinishRecordingToOutputFileAtURL: callback is not received until AVCam returns to the foreground unless you request background execution time. This also ensures that there will be time to write the file to the assets library when AVCam is backgrounded. To conclude this background execution, -endBackgroundTask is called in -recorder:recordingDidFinishToOutputFileURL:error: after the recorded file has been saved.
                [self setBackgroundRecordingID:[[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil]];
            }
            
            // Update the orientation on the movie file output video connection before starting recording.
            [[[self movieFileOutput] connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:AVCaptureVideoOrientationPortrait];
            
            [[self movieFileOutput] startRecordingToOutputFileURL:[NSURL fileURLWithPath:self.videoTempPath] recordingDelegate:self];
        }
    });
    
    return YES;
}

- (void) stopRecord
{
    dispatch_async([self sessionQueue], ^{
        if ([self isRecording]) {
            [[self movieFileOutput] stopRecording];
        }
    });
}

- (AVCaptureDevice *) deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
    AVCaptureDevice *captureDevice = [devices firstObject];
    
    for (AVCaptureDevice *device in devices) {
        if ([device position] == position) {
            captureDevice = device;
            break;
        }
    }
    
    return captureDevice;
}

- (void) changeCamera
{
    dispatch_async([self sessionQueue], ^{
        AVCaptureDevice *currentVideoDevice = [[self videoDeviceInput] device];
        AVCaptureDevicePosition preferredPosition = AVCaptureDevicePositionUnspecified;
        AVCaptureDevicePosition currentPosition = [currentVideoDevice position];
        
        switch (currentPosition)
        {
            case AVCaptureDevicePositionUnspecified:
                preferredPosition = AVCaptureDevicePositionBack;
                break;
            case AVCaptureDevicePositionBack:
                preferredPosition = AVCaptureDevicePositionFront;
                break;
            case AVCaptureDevicePositionFront:
                preferredPosition = AVCaptureDevicePositionBack;
                break;
        }
        
        AVCaptureDevice *videoDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:preferredPosition];
        AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
        
        [[self session] beginConfiguration];
        
        [[self session] removeInput:[self videoDeviceInput]];
        
        if ([[self session] canAddInput:videoDeviceInput]) {
            [[self session] addInput:videoDeviceInput];
            [self setVideoDeviceInput:videoDeviceInput];
        }
        else {
            [[self session] addInput:[self videoDeviceInput]];
        }
        
        [[self session] commitConfiguration];
        
        dispatch_async(dispatch_get_main_queue(), ^{
            if (self.delegate && [self.delegate respondsToSelector:@selector(videoCoreCameraChanged:)]) {
                [self.delegate videoCoreCameraChanged:self];
            }
        });
    });
}

#pragma mark - AVCaptureFileOutputRecordingDelegate

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections
{
    if (self.delegate && [self.delegate respondsToSelector:@selector(videoCoreStartRecording:)]) {
        [self.delegate videoCoreStartRecording:self];
    }
}

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
    if (error)
        NSLog(@"%@", error);
    
//    UIBackgroundTaskIdentifier backgroundRecordingID = [self backgroundRecordingID];
    [self setBackgroundRecordingID:UIBackgroundTaskInvalid];
    
    if (self.delegate && [self.delegate respondsToSelector:@selector(videoCoreFinishRecording:)]) {
        [self.delegate videoCoreFinishRecording:self];
    }
}

- (void) cropVideo
{
    CGSize szVideo = CGSizeMake(480, 640);
    
    AVAsset *asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:self.videoTempPath]];
    AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    
    AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
    videoComposition.frameDuration = CMTimeMake(1, 30);
    videoComposition.renderSize = CGSizeMake(szVideo.width, szVideo.width);
    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
    
    AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
    
    CGAffineTransform t1 = CGAffineTransformMakeTranslation(szVideo.width, -(szVideo.height-szVideo.width)/2);
    CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
    
    CGAffineTransform finalTransform = t2;
    [transformer setTransform:finalTransform atTime:kCMTimeZero];
    
    //add the transformer layer instructions, then add to video composition
    instruction.layerInstructions = [NSArray arrayWithObject: transformer];
    videoComposition.instructions = [NSArray arrayWithObject: instruction];
    
    AVAssetExportSession * exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetMediumQuality];
    exporter.videoComposition = videoComposition;
    exporter.outputURL = [NSURL fileURLWithPath:self.videoPath];
    exporter.outputFileType = AVFileTypeMPEG4;
    
    [exporter exportAsynchronouslyWithCompletionHandler:^{
        dispatch_async(dispatch_get_main_queue(), ^{
            if (self.delegate && [self.delegate respondsToSelector:@selector(videoCoreFinishCropping:)]) {
                [self.delegate videoCoreFinishCropping:self];
            }
        });
    }];
}


- (void) flipVideo
{
    CGSize szVideo = CGSizeMake(480, 640);
    
    AVAsset *asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:self.videoTempPath]];
    AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    
    AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
    videoComposition.frameDuration = CMTimeMake(1, 30);
    videoComposition.renderSize = CGSizeMake(szVideo.width, szVideo.width);
    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
    
    AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
    
    CGAffineTransform t = CGAffineTransformMakeScale(-1.0f, 1.0f);
    t = CGAffineTransformTranslate(t, -szVideo.width, 0);
    
    CGAffineTransform finalTransform = t;
    [transformer setTransform:finalTransform atTime:kCMTimeZero];
    
    //add the transformer layer instructions, then add to video composition
    instruction.layerInstructions = [NSArray arrayWithObject: transformer];
    videoComposition.instructions = [NSArray arrayWithObject: instruction];
    
    if ([[NSFileManager defaultManager] fileExistsAtPath:self.videoPath]) {
        [[NSFileManager defaultManager] removeItemAtPath:self.videoPath error:nil];
    }
    
    //    Export
    AVAssetExportSession * exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetMediumQuality];
    exporter.videoComposition = videoComposition;
    exporter.outputURL = [NSURL fileURLWithPath:self.videoPath];
    exporter.outputFileType = AVFileTypeMPEG4;
    
    [exporter exportAsynchronouslyWithCompletionHandler:^{
        dispatch_async(dispatch_get_main_queue(), ^{
            NSLog(@"Video Flipped Completed.");
        });
    }];
    
}
@end
