//
//  VideoSource.m
//  MinimumOpenCVLiveCamera
//
//  Created by Akira Iwaya on 2015/11/05.
//  Copyright © 2015年 akira108. All rights reserved.
//

#import "VideoSource.h"
#import <AVFoundation/AVFoundation.h>
#import <Accelerate/Accelerate.h>

using namespace cv;
using namespace std;
@implementation UIImage (scale)
-(UIImage*)scaleToSize:(CGSize)size
{
    // 创建一个bitmap的context
    // 并把它设置成为当前正在使用的context
    UIGraphicsBeginImageContext(size);
    // 绘制改变大小的图片
    [self drawInRect:CGRectMake(0, 0, size.width, size.height)];
    // 从当前context中创建一个改变大小后的图片
    UIImage* scaledImage = UIGraphicsGetImageFromCurrentImageContext();
    // 使当前的context出堆栈
    UIGraphicsEndImageContext();
    // 返回新的改变大小后的图片
    return scaledImage;
}
@end


@interface VideoSource () <AVCaptureVideoDataOutputSampleBufferDelegate,UIGestureRecognizerDelegate>
@property (strong, nonatomic) CALayer *previewLayer;
@property (strong, nonatomic) AVCaptureSession *captureSession;
@property (assign, nonatomic) BOOL isFrontCamera;
@property (strong, nonatomic) AVCaptureDeviceInput *captureDeviceInput;
@end

@implementation VideoSource
- (void)setTargetView:(UIView *)targetView {
    if (self.previewLayer == nil) {
        _previewLayer = [CALayer layer];
    }
    _targetView = targetView;
    [targetView.layer addSublayer:self.previewLayer];
    self.previewLayer.contentsGravity = kCAGravityResizeAspectFill;
    self.previewLayer.frame = targetView.bounds;
    switch (UIApplication.sharedApplication.statusBarOrientation) {
        case UIInterfaceOrientationPortrait:{
            CGAffineTransform t = CGAffineTransformMakeScale(1.0,1.0);
            t = CGAffineTransformRotate(t, M_PI / 2);
            self.previewLayer.affineTransform = t;
        }
            break;
        case UIInterfaceOrientationPortraitUpsideDown:{
            CGAffineTransform t = CGAffineTransformMakeScale(1.0,1.0);
            t = CGAffineTransformRotate(t, -M_PI / 2);
            self.previewLayer.affineTransform = t;
        }
            break;
        case UIInterfaceOrientationLandscapeLeft:{
            CGAffineTransform t = CGAffineTransformMakeScale(1.0,1.0);
            t = CGAffineTransformRotate(t, M_PI);
            self.previewLayer.affineTransform = t;
        }
            break;
        case UIInterfaceOrientationLandscapeRight:
            break;
        default:

            break;
    }

}


- (instancetype)init
{
    self = [super init];
    if (self) {
        _captureSession = [[AVCaptureSession alloc] init];
        _captureSession.sessionPreset =  AVCaptureSessionPresetHigh;
       
        AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
        output.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA)};
        output.alwaysDiscardsLateVideoFrames = YES;
        [_captureSession addOutput:output];
        
        dispatch_queue_t queue = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL);
        [output setSampleBufferDelegate:self queue:queue];
 
    }
    
    return self;
}



- (void)setupCameraPostion:(BOOL)isFrontCamera{
    _isFrontCamera = isFrontCamera;
    [self stop];
    [_captureSession beginConfiguration];
    [_captureSession removeInput:_captureDeviceInput];
    AVCaptureDevice *device = [self currentCaptureDevice];
    if(device){
        NSError *error = nil;
        _captureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error];
        [_captureSession addInput:_captureDeviceInput];
        
    }
    [_captureSession commitConfiguration];
    [self start];
}


-(AVCaptureDevice *)currentCaptureDevice{
    NSArray *devices = [AVCaptureDevice devices];
    for (AVCaptureDevice *device in devices) {
        
        if ([device hasMediaType:AVMediaTypeVideo]) {
            if ([device position] == AVCaptureDevicePositionFront && _isFrontCamera) {
                return device;
            }
            else if([device position] == AVCaptureDevicePositionBack && !_isFrontCamera){
                return device;
            }
        }
    }
    return nil;
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer, 0);

    uint8_t *base;
    int width, height, bytesPerRow;
    base = (uint8_t*)CVPixelBufferGetBaseAddress(imageBuffer);
    width = (int)CVPixelBufferGetWidth(imageBuffer);
    height = (int)CVPixelBufferGetHeight(imageBuffer);
    bytesPerRow = (int)CVPixelBufferGetBytesPerRow(imageBuffer);
 

    Mat mat = Mat(height, width, CV_8UC4, base);
    if(_isFrontCamera){
        int nHeight = mat.rows;
        int nWidth  = mat.cols;
        cv::Point2f srcPoint[4],dstPoint[4];
        srcPoint[0].x = nWidth*0.4;
        srcPoint[0].y = 0;
        srcPoint[1].x = nWidth;
        srcPoint[1].y = nHeight*1/6;
        srcPoint[2].x = nWidth;
        srcPoint[2].y = nHeight*5/6;
        srcPoint[3].x = nWidth*0.4;
        srcPoint[3].y = nHeight;
        
        
        dstPoint[0].x = 0;
        dstPoint[0].y = nHeight;
        dstPoint[1].x = nWidth;
        dstPoint[1].y = nHeight;
        dstPoint[2].x = nWidth;
        dstPoint[2].y = 0;
        dstPoint[3].x = 0;
        dstPoint[3].y = 0;
        cv::Mat matMap = getPerspectiveTransform(srcPoint,dstPoint);
        cv::Mat img_trans = cv::Mat::zeros(nHeight,nWidth,CV_8UC4);
        warpPerspective(mat,img_trans,matMap,cv::Size(nWidth,nHeight));
        [self.delegate processFrame:img_trans];
        CGImageRef imageRef = [self CGImageFromCVMat:img_trans];
        dispatch_sync(dispatch_get_main_queue(), ^{
            self.previewLayer.contents = (__bridge id)imageRef;
        });
        CGImageRelease(imageRef);
        CVPixelBufferUnlockBaseAddress( imageBuffer, 0 );
    
    }
    else{
        [self.delegate processFrame:mat];
        CGImageRef imageRef = [self CGImageFromCVMat:mat];
        dispatch_sync(dispatch_get_main_queue(), ^{
            
            self.previewLayer.contents = (__bridge id)imageRef;
        });
        
        CGImageRelease(imageRef);
        CVPixelBufferUnlockBaseAddress( imageBuffer, 0 );
    }
}

- (void)start {
    [self.captureSession startRunning];
}

- (void)stop {
    [self.captureSession stopRunning];
}

- (CGImageRef)CGImageFromCVMat:(Mat)cvMat {

    if (cvMat.elemSize() == 4) {
        cv::cvtColor(cvMat, cvMat, COLOR_BGRA2RGBA);
    }
    NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()];
    CGColorSpaceRef colorSpace;
    
    if (cvMat.elemSize() == 1) {
        colorSpace = CGColorSpaceCreateDeviceGray();
    } else {
        colorSpace = CGColorSpaceCreateDeviceRGB();
    }
    CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
    
    // Creating CGImage from cv::Mat
    CGImageRef imageRef = CGImageCreate(cvMat.cols,                                 //width
                                        cvMat.rows,                                 //height
                                        8,                                          //bits per component
                                        8 * cvMat.elemSize(),                       //bits per pixel
                                        cvMat.step[0],                            //bytesPerRow
                                        colorSpace,                                 //colorspace
                                        kCGImageAlphaPremultipliedLast|kCGImageAlphaNone|kCGBitmapByteOrderDefault,// bitmap info
                                        provider,                                   //CGDataProviderRef
                                        NULL,                                       //decode
                                        false,                                      //should interpolate
                                        kCGRenderingIntentDefault                   //intent
                                        );
    
    CGDataProviderRelease(provider);
    CGColorSpaceRelease(colorSpace);
    
    return imageRef;
}

- (UIImage*)generateImage{
    CGRect rect = self.previewLayer.frame;
    UIGraphicsBeginImageContextWithOptions(rect.size, NO, [UIScreen mainScreen].scale);
    CGContextRef currentContext = UIGraphicsGetCurrentContext();
    CGContextTranslateCTM(currentContext, - CGRectGetMinX(rect), - CGRectGetMinY(rect));
    [self.previewLayer renderInContext:currentContext];
    UIImage *snapshotImage = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();
    return  snapshotImage;
}


@end
