//
//  ViewController.m
//  CIImageVideo
//
//  Created by MDLK-CC on 2017/7/13.
//  Copyright © 2017年 MC. All rights reserved.
//

#import "ViewController.h"
#import <MetalKit/MetalKit.h>
#import <AVFoundation/AVFoundation.h>
#import "AppDelegate.h"
#import <GLKit/GLKit.h>

@interface ViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate>

@property (strong , nonatomic) NSArray *filters;

@property (strong , nonatomic) CIFilter *filter;

@property (strong , nonatomic) CIContext *context;

@property (strong , nonatomic) MTKView *mtkView;

@property (strong, nonatomic) id<MTLCommandQueue> commandQueue;

@property (strong , nonatomic) GLKView *videoPreviewView;
@property (strong , nonatomic) EAGLContext *eaglContext;


@property (strong, nonatomic) AVCaptureSession *session;

@property (strong, nonatomic) AVAssetWriter *writer;
@property (strong, nonatomic) AVAssetWriterInput *videoWriterInput;
@property (strong, nonatomic) AVAssetWriterInput *audioWriterInput;

@property (strong, nonatomic) AVCaptureAudioDataOutput *audioOutput;
@property (strong, nonatomic) AVCaptureVideoDataOutput *videoOutput;
/**
 录制的数据传出的队列
 */
@property (strong, nonatomic) dispatch_queue_t queue;

@end

@implementation ViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    
    [self setupSession];
    
    self.filters = @[@"CIPhotoEffectMono",@"CIPhotoEffectFade",@"CIMotionBlur"];
}

- (void)loadView
{
    
//    id<MTLDevice> device = MTLCreateSystemDefaultDevice();
//    
//    MTKView *mtkView = [[MTKView alloc] initWithFrame:CGRectMake(0,0, [UIScreen mainScreen].bounds.size.width, [UIScreen mainScreen].bounds.size.height) device:device];
//    self.mtkView = mtkView;
//    self.view = mtkView;
//    mtkView.delegate = self;
//    mtkView.framebufferOnly = NO;
//    
//    self.commandQueue = device.newCommandQueue;
    UIView *window = ((AppDelegate *)[UIApplication sharedApplication].delegate).window;
    _eaglContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
    _videoPreviewView = [[GLKView alloc] initWithFrame:window.bounds context:_eaglContext];
    _videoPreviewView.enableSetNeedsDisplay = NO;
    
    // because the native video image from the back camera is in UIDeviceOrientationLandscapeLeft (i.e. the home button is on the right), we need to apply a clockwise 90 degree transform so that we can draw the video preview as if we were in a landscape-oriented view; if you're using the front camera and you want to have a mirrored preview (so that the user is seeing themselves in the mirror), you need to apply an additional horizontal flip (by concatenating CGAffineTransformMakeScale(-1.0, 1.0) to the rotation transform)
    _videoPreviewView.transform = CGAffineTransformMakeRotation(M_PI_2);
    _videoPreviewView.frame = window.bounds;
    
    // we make our video preview view a subview of the window, and send it to the back; this makes FHViewController's view (and its UI elements) on top of the video preview, and also makes video preview unaffected by device rotation
    [window addSubview:_videoPreviewView];
    [window sendSubviewToBack:_videoPreviewView];
    
    // create the CIContext instance, note that this must be done after _videoPreviewView is properly set up
    self.context = [CIContext contextWithEAGLContext:_eaglContext options:@{kCIContextWorkingColorSpace : [NSNull null]} ];
    
    // bind the frame buffer to get the frame buffer width and height;
    // the bounds used by CIContext when drawing to a GLKView are in pixels (not points),
    // hence the need to read from the frame buffer's width and height;
    // in addition, since we will be accessing the bounds in another queue (_captureSessionQueue),
    // we want to obtain this piece of information so that we won't be
    // accessing _videoPreviewView's properties from another thread/queue
    [_videoPreviewView bindDrawable];
    
//    self.context = [CIContext contextWithMTLDevice:device];
    
    self.filter = [CIFilter filterWithName:@"CIPhotoEffectMono"];
    
    UIPanGestureRecognizer *pan = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(filterChoose)];
    [self.videoPreviewView addGestureRecognizer:pan];
}

- (void)filterChoose
{
    NSInteger index = [self.filters indexOfObject:self.filter.name];
    if (index < self.filters.count - 1) {
        self.filter = [CIFilter filterWithName:self.filters[index+1]];
    }else{
        self.filter = [CIFilter filterWithName:self.filters[0]];
    }
}


- (void)setupSession
{
    //视频输入
    AVCaptureDevice *video = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:video error:nil];
    //音频输入
    
    AVCaptureSession *session = [[AVCaptureSession alloc] init];
    if ([session canAddInput:videoInput]) {
        [session addInput:videoInput];
    }
    _session = session;
    [session startRunning];
    //视频输出
    _queue = dispatch_queue_create("DataOutputQueue", DISPATCH_QUEUE_SERIAL);
    
    _videoOutput = [AVCaptureVideoDataOutput new];
    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
                                   nil];
    _videoOutput.videoSettings = videoSettings;
    [_videoOutput setAlwaysDiscardsLateVideoFrames:YES];
    [_videoOutput setSampleBufferDelegate:self queue:self.queue];
    if ([session canAddOutput:_videoOutput]){
        [session addOutput:_videoOutput];
    }
    AVCaptureConnection *connection = [_videoOutput connectionWithMediaType:AVMediaTypeVideo];
    connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
    //音频输出
    _audioOutput = [AVCaptureAudioDataOutput new];
    [_audioOutput setSampleBufferDelegate:self queue:self.queue];
    if ([session canAddOutput:_audioOutput]){
        [session addOutput:_audioOutput];
    }
}


/**
 生成录制视频的一些配置，分辨率，编码方式等等
 */
- (void)initialVideoInput
{
    NSDictionary* settings = [NSDictionary dictionaryWithObjectsAndKeys:
                              AVVideoCodecH264, AVVideoCodecKey,
                              [NSNumber numberWithInteger: [UIScreen mainScreen].bounds.size.width], AVVideoWidthKey,
                              [NSNumber numberWithInteger: [UIScreen mainScreen].bounds.size.height], AVVideoHeightKey,
                              AVVideoScalingModeResizeAspectFill,AVVideoScalingModeKey,
                              nil];
    //初始化视频写入类
    _videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings];
    //表明输入是否应该调整其处理为实时数据源的数据
    _videoWriterInput.expectsMediaDataInRealTime = YES;
    
    if ([self.writer canAddInput:_videoWriterInput]) {
        [self.writer addInput:_videoWriterInput];
    }
}


- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    BOOL isVideo = YES;
    
    CMFormatDescriptionRef formatDesc = CMSampleBufferGetFormatDescription(sampleBuffer);
    CMMediaType mediaType = CMFormatDescriptionGetMediaType(formatDesc);
    
    if (mediaType == kCMMediaType_Audio) {
        isVideo = NO;
    }
    if (mediaType == kCMMediaType_Audio)
    {
        return;
    }
    
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CIImage *sourceImage = [CIImage imageWithCVPixelBuffer:(CVPixelBufferRef)imageBuffer options:nil];
    
    // run the filter through the filter chain
    CIImage *filteredImage = RunFilter(sourceImage, self.filter);
    
    CGRect sourceExtent = sourceImage.extent;

    // we want to maintain the aspect radio of the screen size, so we clip the video image
    CGRect drawRect = sourceExtent;
//    if (sourceAspect > previewAspect)
//    {
//        // use full height of the video image, and center crop the width
//        drawRect.origin.x += (drawRect.size.width - drawRect.size.height * previewAspect) / 2.0;
//        drawRect.size.width = drawRect.size.height * previewAspect;
//    }
//    else
//    {
//        // use full width of the video image, and center crop the height
//        drawRect.origin.y += (drawRect.size.height - drawRect.size.width / previewAspect) / 2.0;
//        drawRect.size.height = drawRect.size.width / previewAspect;
//    }
    
//    [self.context drawImage:filteredImage inRect:CGRectMake(0, 0, self.mtkView.drawableSize.width, self.mtkView.drawableSize.height) fromRect:drawRect];
//    [self.mtkView draw];
    [_videoPreviewView bindDrawable];
    
    if (_eaglContext != [EAGLContext currentContext])
        [EAGLContext setCurrentContext:_eaglContext];
    
    // clear eagl view to grey
    glClearColor(0.5, 0.5, 0.5, 1.0);
    glClear(GL_COLOR_BUFFER_BIT);
    
    // set the blend mode to "source over" so that CI will use that
    glEnable(GL_BLEND);
    glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
    
    if (filteredImage)
        [self.context drawImage:filteredImage inRect:CGRectMake(0, 0, self.videoPreviewView.drawableWidth, self.videoPreviewView.drawableHeight) fromRect:drawRect];
    
    [_videoPreviewView display];

}

static inline CIImage *RunFilter(CIImage *cameraImage, CIFilter *filter)
{
    CIImage *currentImage = nil;
    
    [filter setValue:cameraImage forKey:kCIInputImageKey];
        
    currentImage = filter.outputImage;
    if (currentImage == nil)
    return nil;
    
    if (CGRectIsEmpty(currentImage.extent))
        return nil;
    return currentImage;
}


- (AVAssetWriter *)writer
{
    if (!_writer) {
        NSError *writerError;
        AVAssetWriter *writer = [AVAssetWriter assetWriterWithURL:[self fileUrl] fileType:AVFileTypeMPEG4 error:&writerError];
        writer.shouldOptimizeForNetworkUse = YES;
        _writer = writer;
    }
    return _writer;
}

/**
 获取储存的路径
 */
- (NSURL *)fileUrl
{
    NSString *path = [[NSHomeDirectory() stringByAppendingPathComponent:@"Documents"] stringByAppendingPathComponent:@"video"];
    if (![[NSFileManager defaultManager] fileExistsAtPath:path]) {
        [[NSFileManager defaultManager] createDirectoryAtPath:path withIntermediateDirectories:YES attributes:nil error:nil];
    }
    NSDateFormatter *formater = [[NSDateFormatter alloc] init];
    [formater setDateFormat:@"yyyy-MM-dd-HH:mm:ss"];
    NSString *outputFielPath=[[[NSHomeDirectory() stringByAppendingPathComponent:@"Documents"] stringByAppendingPathComponent:@"video"] stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mp4",[formater stringFromDate:[NSDate date]]]];
    NSLog(@"save path is :%@",outputFielPath);
    NSURL *tempUrl = [NSURL fileURLWithPath:outputFielPath];
    return tempUrl;
}

@end
