//
//  SLCameraHelper.m
//  SeewoLink
//
//  Created by guoqingwei on 16/5/13.
//  Copyright © 2016年 cvte. All rights reserved.
//

#import "SLCameraHelper.h"

static inline BOOL EX_UIDeviceOrientationIsLandscape(UIDeviceOrientation orientation) {
    return (UIDeviceOrientationIsLandscape(orientation) || (orientation) == UIDeviceOrientationFaceDown || (orientation) == UIDeviceOrientationFaceUp || (orientation) == UIDeviceOrientationUnknown);
}

static inline BOOL EX_UIDeviceOrientationIsPortrait(UIDeviceOrientation orientation) {
    return (UIDeviceOrientationIsPortrait(orientation) || (orientation) == UIDeviceOrientationFaceDown || (orientation) == UIDeviceOrientationFaceUp || (orientation) == UIDeviceOrientationUnknown);
}

@interface SLCameraHelper () <AVCaptureVideoDataOutputSampleBufferDelegate>

@property (strong, nonatomic) AVCaptureSession           *session;
@property (strong, nonatomic) AVCaptureDevice            *defaultDevice;
@property (strong, nonatomic) AVCaptureDeviceInput       *defaultDeviceInput;
@property (strong, nonatomic) AVCaptureDevice            *frontDevice;
@property (strong, nonatomic) AVCaptureDeviceInput       *frontDeviceInput;

@property (strong, nonatomic) AVCaptureVideoPreviewLayer *previewLayer;
@property (nonatomic, strong) AVCaptureStillImageOutput  *stillImageOutput;
@property (nonatomic, strong) AVCaptureVideoDataOutput   *videoDataOutput;

@property (nonatomic, strong) UIView *captureView;
@property (nonatomic, strong) UIImage *videoFrame;

@end

@implementation SLCameraHelper

#pragma mark Life methods

+ (SLCameraHelper *)sharedInstance
{
    static dispatch_once_t token = 0;
    static SLCameraHelper *sharedInstance = nil;
    dispatch_once(&token, ^{
        sharedInstance = [[SLCameraHelper alloc] init];
    });
    return  sharedInstance;
}

- (id)init
{
    if (self = [super init]) {
        [self setupAVComponents];
    }
    return self;
}

- (void)dealloc
{
    [_session stopRunning];
    _previewLayer = nil;
    _session = nil;
    _stillImageOutput = nil;
}

#pragma mark - Public methods

- (void)insertSublayerWithCaptureView:(UIView *)captureView atRootView:(UIView *)rootView
{
    _previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
    _previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    _previewLayer.connection.videoOrientation = (AVCaptureVideoOrientation)([UIApplication sharedApplication].statusBarOrientation);
    _previewLayer.frame = captureView.frame;
    _captureView = captureView;
    CALayer *rootLayer = [rootView layer];
    rootLayer.masksToBounds = NO;
    [rootLayer insertSublayer:_previewLayer atIndex:0];
}

- (void)removePreviewLayer
{
    [_previewLayer removeFromSuperlayer];
    _previewLayer = nil;
}

- (void)updatePreviewLayer
{
    self.previewLayer.frame = _captureView.frame;
}

- (void)startRunning
{
    if (![self.session isRunning]) {
        [self.session startRunning];
    }
}

- (void)stopRunning
{
    if ([self.session isRunning]) {
        [self.session stopRunning];
    }
}

- (void)takePhotoWithOrientation:(UIDeviceOrientation)videoOrientation cropSize:(CGSize)cropSize
{
    AVCaptureConnection *videoConnection = [_stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
    if ([videoConnection isVideoOrientationSupported]) {
        AVCaptureVideoOrientation orientation = [self videoOrientationForDeviceOrientation:videoOrientation];
        [videoConnection setVideoOrientation:orientation];
    }

    __weak __typeof(self)weakSelf = self;
    
    [_stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
        if (imageDataSampleBuffer) {
            
            NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
            UIImage *image = [UIImage imageWithData:imageData];
            NSLog(@"Take photo with size: %f, %f", [image size].width, [image size].height);
            
            // No necessary to crop when photo taken
            CGSize applySize = [image size]; // cropSize;
            
            if (UIDeviceOrientationIsLandscape((UIDeviceOrientation)_previewLayer.connection.videoOrientation)) {
                 applySize = EX_UIDeviceOrientationIsLandscape(videoOrientation) ? applySize : CGSizeMake(applySize.height, applySize.width);
            } else if (UIDeviceOrientationIsPortrait((UIDeviceOrientation)_previewLayer.connection.videoOrientation)) {
                applySize = EX_UIDeviceOrientationIsPortrait(videoOrientation) ? applySize : CGSizeMake(applySize.height, applySize.width);
            }

            UIImage *croppedImage = [weakSelf cropImage:image withCropSize:applySize];
        
            if ([self.photodelegate respondsToSelector:@selector(cameraDidTakePhoto:)]) {
                [self.photodelegate cameraDidTakePhoto:croppedImage];
            }
        }
    }];
}

- (void)captureImageFrameWithOrientation:(UIDeviceOrientation)videoOrientation cropSize:(CGSize)cropSize
{
    AVCaptureConnection *videoConnection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
    
    if ([videoConnection isVideoOrientationSupported]) {
        AVCaptureVideoOrientation orientation = [self videoOrientationForDeviceOrientation:[UIDevice currentDevice].orientation];
        [videoConnection setVideoOrientation:orientation];
    }
    
    CGSize applySize = cropSize;
    
    if (UIDeviceOrientationIsLandscape((UIDeviceOrientation)_previewLayer.connection.videoOrientation)) {
        applySize = EX_UIDeviceOrientationIsLandscape(videoOrientation) ? cropSize : CGSizeMake(cropSize.height, cropSize.width);
    } else if (UIDeviceOrientationIsPortrait((UIDeviceOrientation)_previewLayer.connection.videoOrientation)) {
        applySize = EX_UIDeviceOrientationIsPortrait(videoOrientation) ? cropSize : CGSizeMake(cropSize.height, cropSize.width);
    }
    
    UIImage *croppedImage = [self cropImage:self.videoFrame withCropSize:applySize];
    
    if ([self.captureDelegate respondsToSelector:@selector(cameraDidCaptureFrame:)]) {
        [self.captureDelegate cameraDidCaptureFrame:croppedImage];
    }
}

#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    if (sampleBuffer) {
        UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
        self.videoFrame = image;
    }
}

#pragma mark - Private methods

- (void)setupAVComponents
{
    _session = [[AVCaptureSession alloc] init];
//    [_session setSessionPreset:AVCaptureSessionPresetPhoto];
    [_session setSessionPreset:AVCaptureSessionPreset1920x1080];
    
    self.defaultDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    
    if (self.defaultDevice) {

        if ([_defaultDevice lockForConfiguration:nil]) {
            
            if (_defaultDevice.autoFocusRangeRestrictionSupported) {
                _defaultDevice.autoFocusRangeRestriction = AVCaptureAutoFocusRangeRestrictionNear;
            }
            
            if (_defaultDevice.smoothAutoFocusSupported) {
                _defaultDevice.smoothAutoFocusEnabled = YES;
            }
            if ([_defaultDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
                [_defaultDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
            }
            if ([_defaultDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) {
                [_defaultDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
            }
            
            [_defaultDevice unlockForConfiguration];
        }
        
        // add device input to session
        self.defaultDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_defaultDevice error:nil];
        if ([_session canAddInput:_defaultDeviceInput]) {
            [_session addInput:_defaultDeviceInput];
        }
        
        // add output to session
        NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
        _stillImageOutput = [AVCaptureStillImageOutput new];
        _stillImageOutput.outputSettings = outputSettings;
        if ([_session canAddOutput:_stillImageOutput]) {
            [_session addOutput:_stillImageOutput];
        }
        
        NSDictionary *videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
        _videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
        if ([_session canAddOutput:_videoDataOutput]) {
            [_session addOutput:_videoDataOutput];
        }
        
        dispatch_queue_t videoQueue = dispatch_queue_create("com.cvte.video", NULL);
        
        [_videoDataOutput setSampleBufferDelegate:self queue:videoQueue];
        _videoDataOutput.videoSettings = videoSettings;
        
        for (AVCaptureDevice *device in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
            if (device.position == AVCaptureDevicePositionFront) {
                self.frontDevice = device;
            }
        }
        if (_frontDevice) {
            self.frontDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_frontDevice error:nil];
        }
    }
}

- (UIImage *)cropImage:(UIImage *)image withCropSize:(CGSize)cropSize
{
    UIImage *newImage = nil;

    CGSize imageSize = image.size;
    CGFloat width = imageSize.width;
    CGFloat height = imageSize.height;
    CGFloat targetWidth = cropSize.width;
    CGFloat targetHeight = cropSize.height;
    CGFloat scaleFactor = 0;
    CGFloat scaledWidth = targetWidth;
    CGFloat scaledHeight = targetHeight;

    CGPoint thumbnailPoint = CGPointMake(0, 0);
    
    if (CGSizeEqualToSize(imageSize, cropSize) == NO) {
        CGFloat widthFactor = targetWidth / width;
        CGFloat heightFactor = targetHeight / height;
        
        if (widthFactor > heightFactor) {
            scaleFactor = widthFactor;
        } else {
            scaleFactor = heightFactor;
        }
        
        scaledWidth  = width * scaleFactor;
        scaledHeight = height * scaleFactor;
        
        if (widthFactor > heightFactor) {
            thumbnailPoint.y = (targetHeight - scaledHeight) * .5f;
        } else {
            if (widthFactor < heightFactor) {
                thumbnailPoint.x = (targetWidth - scaledWidth) * .5f;
            }
        }
    }
    
    UIGraphicsBeginImageContextWithOptions(cropSize, YES, 0);
    CGRect thumbnailRect = CGRectZero;
    thumbnailRect.origin = thumbnailPoint;
    thumbnailRect.size.width  = scaledWidth;
    thumbnailRect.size.height = scaledHeight;
    [image drawInRect:thumbnailRect];
    newImage = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();
    
    return newImage;
}

- (AVCaptureVideoOrientation)videoOrientationForDeviceOrientation:(UIDeviceOrientation)deviceOrientation
{
    AVCaptureVideoOrientation result;
    
    switch (deviceOrientation) {
        case UIDeviceOrientationFaceUp:
        case UIDeviceOrientationUnknown:
        case UIDeviceOrientationFaceDown:
            
            if (UIDeviceOrientationIsLandscape((UIDeviceOrientation)_previewLayer.connection.videoOrientation)) {
                result =  AVCaptureVideoOrientationLandscapeRight;
            } else {
                result =   AVCaptureVideoOrientationPortrait;
            }
            break;
            
        case UIDeviceOrientationLandscapeLeft:
            result =   AVCaptureVideoOrientationLandscapeRight;
            break;
            
        case UIDeviceOrientationLandscapeRight:
            result =   AVCaptureVideoOrientationLandscapeLeft;
            break;
        default:
            result =   (AVCaptureVideoOrientation)deviceOrientation;
            break;
    }
    
    return result;
}

- (UIImage *)imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer, 0);
    
    void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
                                                 bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGImageRef quartzImage = CGBitmapContextCreateImage(context);
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);
    UIImage *image = [UIImage imageWithCGImage:quartzImage];
    CGImageRelease(quartzImage);
    return (image);
}

@end
