/*----------------------------------------------------------------------------------------------
 *
 * This file is XIU's property. It contains XIU's trade secret, proprietary and
 * confidential information.
 *
 * The information and code contained in this file is only for authorized XIU employees
 * to design, create, modify, or review.
 *
 * DO NOT DISTRIBUTE, DO NOT DUPLICATE OR TRANSMIT IN ANY FORM WITHOUT PROPER AUTHORIZATION.
 *
 * If you are not an intended recipient of this file, you must not copy, distribute, modify,
 * or take any action in reliance on it.
 *
 * If you have received this file in error, please immediately notify XIU and
 * permanently delete the original and any copy of any file and any printout thereof.
 * (c) www.xiusdk.cn
 *---------------------------------------------------------------------------------------------*/

#import "CameraViewController.h"
#import <CoreMedia/CoreMedia.h>
#import <GLKit/GLKit.h>
#import <AVFoundation/AVFoundation.h>

@interface CameraViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate>
{
    EAGLContext *_eaglContext;
    GLKView *_videoPreviewView;
    
    CGColorSpaceRef _deviceRgbColorSpace;
    CGRect _videoPreviewViewBounds;
    
    AVCaptureDevice *_videoDevice;
    NSTimeInterval _startTime;
    int _frames;
}

@end

@implementation CameraViewController

-(instancetype)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
    self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
    if(self)
    {
        _startTime = 0;
        _captureSessionQueue = NULL;
        [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(applicationDidEnterBackground:) name:UIApplicationDidEnterBackgroundNotification object:nil];
        [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(applicationWillEnterForeground:) name:UIApplicationWillEnterForegroundNotification object:nil];
    }
    return self;
}


-(instancetype)initWithCoder:(NSCoder *)aDecoder
{
    self = [super initWithCoder:aDecoder];
    if(self)
    {
        _captureSessionQueue = NULL;
        _startTime = 0;
        
        [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(applicationDidEnterBackground:) name:UIApplicationDidEnterBackgroundNotification object:nil];
        [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(applicationWillEnterForeground:) name:UIApplicationWillEnterForegroundNotification object:nil];
    }
    return self;
}

-(void)dealloc
{
    [[NSNotificationCenter defaultCenter] removeObserver:self];
    if(_deviceRgbColorSpace)
    {
        CGColorSpaceRelease(_deviceRgbColorSpace);
    }
}
    
- (void)viewDidLoad {
    [super viewDidLoad];

    //self.view.backgroundColor = nil;
    _deviceRgbColorSpace = CGColorSpaceCreateDeviceRGB();
    
    // create the dispatch queue for handling capture session delegate method calls
    if( _captureSessionQueue  == NULL)
    {
        _captureSessionQueue = dispatch_queue_create("capture_session_queue", DISPATCH_QUEUE_SERIAL);
    }
    
    _eaglContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
    if( _eaglContext == nil)
    {
        _eaglContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
    }
    _videoPreviewView = [[GLKView alloc] initWithFrame:self.view.bounds context:_eaglContext];
    _videoPreviewView.enableSetNeedsDisplay = NO;
    _videoPreviewView.drawableDepthFormat = GLKViewDrawableDepthFormat24;
    //_videoPreviewView.drawableMultisample = GLKViewDrawableMultisample4X;
    [self.view insertSubview:_videoPreviewView atIndex:0];
    
    // create the CIContext instance, note that this must be done after _videoPreviewView is properly set up
    _ciContext = [CIContext contextWithEAGLContext:_eaglContext options:@{kCIContextWorkingColorSpace : [NSNull null]} ];
    
    // bind the frame buffer to get the frame buffer width and height;
    // the bounds used by CIContext when drawing to a GLKView are in pixels (not points),
    // hence the need to read from the frame buffer's width and height;
    // in addition, since we will be accessing the bounds in another queue (_captureSessionQueue),
    // we want to obtain this piece of information so that we won't be
    // accessing _videoPreviewView's properties from another thread/queue
    [_videoPreviewView bindDrawable];
    _videoPreviewViewBounds = CGRectZero;
    _videoPreviewViewBounds.size.width = _videoPreviewView.drawableWidth;
    _videoPreviewViewBounds.size.height = _videoPreviewView.drawableHeight;
    // see if we have any video device
    if ([[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count] > 0)
    {
        __weak id weakSelf = self;
        AVCaptureVideoOrientation videoOrientation = (AVCaptureVideoOrientation)[UIApplication sharedApplication].statusBarOrientation;
        dispatch_async(_captureSessionQueue, ^(void) {
            [weakSelf _start:videoOrientation];
        });
    }
}

-(void)viewWillAppear:(BOOL)animated
{
    [super viewWillAppear:animated];
    
    __weak id captureSession = _captureSession;
    dispatch_async(_captureSessionQueue, ^{
        [captureSession startRunning];
    });
}

-(void)viewWillDisappear:(BOOL)animated
{
    [super viewWillDisappear:animated];
    
    __weak id captureSession = _captureSession;
    dispatch_async(_captureSessionQueue, ^{
        [captureSession stopRunning];
    });
}

-(void)applicationDidEnterBackground:(NSNotification*)aNotification
{
    if( self.isViewLoaded && self.view.window != nil )
    {
        __weak id captureSession = _captureSession;
        dispatch_async(_captureSessionQueue, ^{
            [captureSession stopRunning];
        });
    }
}

-(void)applicationWillEnterForeground:(NSNotification*)aNotification
{
    if( self.isViewLoaded && self.view.window != nil )
    {
        __weak id captureSession = _captureSession;
        dispatch_async(_captureSessionQueue, ^{
            [captureSession startRunning];
        });
    }
}

#pragma mark - Private methods
- (void)_start:(AVCaptureVideoOrientation)videoOrientation
{
    if (_captureSession)
        return;
    
    // get the input device and also validate the settings
    NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    
    AVCaptureDevicePosition position = AVCaptureDevicePositionFront;
    
    _videoDevice = nil;
    for (AVCaptureDevice *device in videoDevices)
    {
        if (device.position == position) {
            _videoDevice = device;
            break;
        }
    }
    
    if (!_videoDevice)
    {
        _videoDevice = [videoDevices objectAtIndex:0];
    }
    
    NSError* error = nil;
    
    //BOOL backward = _videoDevice.position == AVCaptureDevicePositionFront;
    // obtain device input
    AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_videoDevice error:&error];
    if (!videoDeviceInput)
    {
        _videoDevice = nil;
        NSLog(@"error - Unable to obtain video device input, error: %@", error);
        return;
    }
    
    // obtain the preset and validate the preset
    //NOTE: AVCaptureSessionPresetPhoto cannot record audio, WHY?
    //NSString *preset = AVCaptureSessionPresetPhoto;
    NSString *preset = AVCaptureSessionPreset1280x720;
    if (![_videoDevice supportsAVCaptureSessionPreset:preset])
    {
        preset = AVCaptureSessionPresetMedium;
    }
    if (![_videoDevice supportsAVCaptureSessionPreset:preset])
    {
        NSLog(@"error - Capture session preset not supported by video device: %@", preset);
        return;
    }
    
    if( [_videoDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus])
    {
        [_videoDevice lockForConfiguration:&error];
        [_videoDevice setFocusMode:AVCaptureFocusModeAutoFocus];
        [_videoDevice unlockForConfiguration];
    }
    
    // CoreImage wants BGRA pixel format
    NSDictionary *outputSettings = @{ (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInteger:kCVPixelFormatType_32BGRA]}; // set image type
    
    // create the capture session
    _captureSession = [[AVCaptureSession alloc] init];
    _captureSession.sessionPreset = preset;
    
    // create and configure video data output
    AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
    videoDataOutput.videoSettings = outputSettings;
    videoDataOutput.alwaysDiscardsLateVideoFrames = YES;
    [videoDataOutput setSampleBufferDelegate:self  queue:_captureSessionQueue];
    
    // begin configure capture session
    [_captureSession beginConfiguration];
    
    if (![_captureSession canAddOutput:videoDataOutput])
    {
        NSLog(@"error - Cannot add video data output");
        _captureSession = nil;
        return;
    }
    
    [_captureSession addInput:videoDeviceInput];
    [_captureSession addOutput:videoDataOutput];
    [_captureSession commitConfiguration];
    
    NSUInteger index = [videoDataOutput.connections indexOfObjectPassingTest:^BOOL(AVCaptureConnection*  _Nonnull connect, NSUInteger idx, BOOL * _Nonnull stop) {
        return [connect.inputPorts indexOfObjectPassingTest :^BOOL(AVCaptureInputPort*  _Nonnull port, NSUInteger idx, BOOL * _Nonnull stop) {
            return [[port mediaType] isEqual:AVMediaTypeVideo];
        }] != NSNotFound;
    }];
    AVCaptureConnection* connect = index != NSNotFound ? videoDataOutput.connections[index] : nil;
    if( connect.supportsVideoMirroring)
    {
        connect.videoMirrored = _videoDevice.position == AVCaptureDevicePositionFront;
        connect.videoOrientation = videoOrientation;
    }
    // then start everything
    [_captureSession startRunning];
}

-(GLuint)processPixelBuffer:(CVPixelBufferRef)pixelbuffer
{
    return 0;
}

#pragma mark - Delegate methods
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    
    CVPixelBufferRetain(pixelBuffer);
    
    __weak id weakSelf = self;
    dispatch_async(_captureSessionQueue, ^{
        
        @autoreleasepool {
            [weakSelf render:pixelBuffer];
        }
        CVPixelBufferRelease(pixelBuffer);
    });
}

-(void)benchmark
{
    NSTimeInterval now = [[NSDate date] timeIntervalSince1970];
    
    if( _startTime == 0 )
    {
        _startTime = now;
    }
    else if( ++_frames > 100 )
    {
        NSLog(@"frame rate = %.2f fps", _frames / (now - _startTime));
        _frames = 0;
         _startTime = now;
    }
}

-(void)render:(CVPixelBufferRef)pixelBuffer
{
    [self benchmark];
    
    [EAGLContext setCurrentContext:_eaglContext];
    
    GLuint result = [self processPixelBuffer:pixelBuffer];
    
    CGSize imageSize = CGSizeMake(CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer));
    CIImage* filteredImage = result ? [CIImage imageWithTexture:result size:imageSize flipped:YES colorSpace:_deviceRgbColorSpace] : [CIImage imageWithCVPixelBuffer:pixelBuffer];
    
    [_videoPreviewView bindDrawable];
    
    CGRect sourceExtent = filteredImage.extent;
    
    CGFloat sourceAspect = sourceExtent.size.width / sourceExtent.size.height;
    CGFloat previewAspect = _videoPreviewViewBounds.size.width  / _videoPreviewViewBounds.size.height;
    
    CGRect drawRect = _videoPreviewViewBounds;
    if (sourceAspect > previewAspect)
    {
        // use full width of the video image, and center crop the height
        drawRect.origin.y += (drawRect.size.height - drawRect.size.width / sourceAspect) / 2.0;
        drawRect.size.height = drawRect.size.width / sourceAspect;
    }
    else
    {
        // use full height of the video image, and center crop the width
        drawRect.origin.x += (drawRect.size.width - drawRect.size.height * sourceAspect) / 2.0;
        drawRect.size.width = drawRect.size.height * sourceAspect;
    }
    
    
    // clear eagl view to grey
    glClearColor(1., 0., 0., 1.0);
    glClear(GL_COLOR_BUFFER_BIT);
    
    // set the blend mode to "source over" so that CI will use that
    glDisable(GL_BLEND);
    //glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
    
    if (filteredImage)
        [_ciContext drawImage:filteredImage inRect:drawRect fromRect:sourceExtent];
    
    glEnable(GL_BLEND);
    glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
    
    [_videoPreviewView display];
}

@end
