//
//  ICFaceDetectorViewController.m
//  ICome
//
//  Created by zhangrongwu on 2017/7/18.
//  Copyright © 2017年 iCom. All rights reserved.
//

#import "ICFaceDetectorViewController.h"
#import "QRView.h"
#import "QRUtil.h"
#import "UIImage+Extension.h"

#define clamp(a) (a>255?255:(a<0?0:a))

@interface ICFaceDetectorViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureMetadataOutputObjectsDelegate,UINavigationControllerDelegate,UIImagePickerControllerDelegate>

@property (strong, nonatomic) AVCaptureDevice * device;
@property (strong, nonatomic) AVCaptureDeviceInput * input;
@property (strong, nonatomic) AVCaptureSession * session;
@property (strong, nonatomic) AVCaptureVideoPreviewLayer * preview;

@property (nonatomic, strong) UIButton *backBtn;
@property (nonatomic, strong)UILabel *alertLabel;

@property (nonatomic, strong) QRView *qrView;


@property (nonatomic, assign)BOOL detector;

@property (nonatomic) dispatch_queue_t videoDataOutputQueue;
@property (nonatomic) AVCaptureVideoDataOutput *videoDataOutput;
@property (nonatomic) AVCaptureDeviceInput *videoDeviceInput;
@property (nonatomic) dispatch_queue_t sessionQueue; // Communicate with the session and other session objects on this queue.

@property (nonatomic, strong)CALayer *leftEye;
@property (nonatomic, strong)CALayer *rightEye;
@property (nonatomic, strong)CALayer *mouth;
@property (nonatomic, strong)CALayer *face;
@property (nonatomic) UIBackgroundTaskIdentifier backgroundRecordingID;

@end

@implementation ICFaceDetectorViewController
-(CALayer *)leftEye {
    if (!_leftEye) {
        _leftEye = [[CALayer alloc] init];
        _leftEye.size = CGSizeMake(10, 10);
        _leftEye.borderColor = [UIColor redColor].CGColor;
        _leftEye.borderWidth = 1;
        [self.preview addSublayer:_leftEye];
    }
    return _leftEye;
}

-(CALayer *)rightEye {
    if (!_rightEye) {
        _rightEye = [[CALayer alloc] init];
        _rightEye.size = CGSizeMake(10, 10);
        _rightEye.borderColor = [UIColor redColor].CGColor;
        _rightEye.borderWidth = 1;
        [self.preview addSublayer:_rightEye];
    }
    return _rightEye;
}

-(CALayer *)mouth {
    if (!_mouth) {
        _mouth = [[CALayer alloc] init];
        _mouth.size = CGSizeMake(20, 10);
        _mouth.borderColor = [UIColor redColor].CGColor;
        _mouth.borderWidth = 1;
        [self.preview addSublayer:_mouth];
    }
    return _mouth;
}


-(CALayer *)face {
    if (!_face) {
        _face = [[CALayer alloc] init];
        _face.size = CGSizeMake(20, 10);
        _face.borderColor = [UIColor redColor].CGColor;
        _face.borderWidth = 1;
        [self.preview addSublayer:_face];

    }
    return _face;
}




- (id)init
{
    if ((self = [super init])) {
        self.session = [[AVCaptureSession alloc] init];
        self.preview = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
    }
    return self;
}

- (void)viewDidLoad {
    [super viewDidLoad];
    self.detector = YES;
    [self configUI];
    [self startRunning];
}

-(void)viewDidAppear:(BOOL)animated {
    [super viewDidAppear:animated];
    [self updateLayout];
}

- (void)configUI {
    
    [self.view addSubview:self.qrView];
    [self.view addSubview:self.backBtn];
    [self.view addSubview:self.alertLabel];
    
    [self.alertLabel mas_remakeConstraints:^(MASConstraintMaker *make) {
        make.top.equalTo(self.view.mas_centerY).offset(200);
        make.centerX.equalTo(self.view.mas_centerX);
    }];
}

- (void)updateLayout {
    
    _qrView.center = CGPointMake([QRUtil screenBounds].size.width / 2, [QRUtil screenBounds].size.height / 2);
    
    //    //修正扫描区域
    //    CGFloat screenHeight = self.view.frame.size.height;
    //    CGFloat screenWidth = self.view.frame.size.width;
    //    CGRect cropRect = CGRectMake((screenWidth - self.qrView.transparentArea.width) / 2,
    //                                 (screenHeight - self.qrView.transparentArea.height) / 2,
    //                                 self.qrView.transparentArea.width,
    //                                 self.qrView.transparentArea.height);
    //    [self.videoDataOutput setRectOfInterest:CGRectMake(cropRect.origin.y / screenHeight,
    //                                              cropRect.origin.x / screenWidth,
    //                                              cropRect.size.height / screenHeight,
    //                                              cropRect.size.width / screenWidth)];
}

- (void)backAction:(UIButton *)button {
    
    [self stopRunning];
    
    [self dismissViewControllerAnimated:YES completion:nil];
}

#pragma mark - Public Method
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
    AVCaptureDevice *captureDevice = [devices firstObject];
    
    if ([captureDevice lockForConfiguration:nil]) {
        
        if ([captureDevice isFlashModeSupported:AVCaptureFlashModeAuto]) {
            [captureDevice setFlashMode:AVCaptureFlashModeAuto];
        }
        //自动白平衡
        if ([captureDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]) {
            [captureDevice setWhiteBalanceMode:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance];
        }
        //自动对焦
        if ([captureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
            [captureDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
        }
        //自动曝光
        if ([captureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
            [captureDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
        }
        [captureDevice unlockForConfiguration];
    }
    
    for (AVCaptureDevice *device in devices){
        if ([device position] == position){
            captureDevice = device;
            break;
        }
    }
    
    return captureDevice;
}


- (void)startRunning {
    
    
    _preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
    _preview.frame = [QRUtil screenBounds];
    
    [self.view.layer insertSublayer:_preview atIndex:0];
    
    AVCaptureStillImageOutput* myStillImageOutput = [[AVCaptureStillImageOutput alloc] init];
    NSDictionary *myOutputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG,AVVideoCodecKey,nil];
    [myStillImageOutput setOutputSettings:myOutputSettings];
    [self.session addOutput:myStillImageOutput];
    
    dispatch_queue_t sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
    [self setSessionQueue:sessionQueue];
    
    dispatch_async(sessionQueue, ^{
        [self setBackgroundRecordingID:UIBackgroundTaskInvalid];
        [self.session beginConfiguration];
        
        if([_session canSetSessionPreset:AVCaptureSessionPreset640x480]){
            [_session setSessionPreset:AVCaptureSessionPreset640x480];
        }
        
        NSError *error = nil;
        AVCaptureDevice *videoDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionFront];
        
        //input device
        AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
        if (error){
            NSLog(@"%@", error);
        }
        if ([_session canAddInput:videoDeviceInput]){
            [_session addInput:videoDeviceInput];
            [self setVideoDeviceInput:videoDeviceInput];
        }
        
        AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
        if ([self.session canAddOutput:videoDataOutput]){
            [self.session addOutput:videoDataOutput];
            AVCaptureConnection *connection = [videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
            if ([connection isVideoStabilizationSupported]){                
                //[connection setEnablesVideoStabilizationWhenAvailable:YES];
                connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeStandard|AVCaptureVideoStabilizationModeCinematic|AVCaptureVideoStabilizationModeAuto;
            }
            
            if ([connection isVideoOrientationSupported]){
                connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
            }
            
            // Configure your output.
            
            self.videoDataOutputQueue = dispatch_queue_create("videoDataOutput", NULL);
            [videoDataOutput setSampleBufferDelegate:self queue:self.videoDataOutputQueue];
            // Specify the pixel format
            
            //获取灰度图像数据
            videoDataOutput.videoSettings =[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange]forKey:(id)kCVPixelBufferPixelFormatTypeKey];
            [self setVideoDataOutput:videoDataOutput];
        }
        
        [self.session commitConfiguration];
        
    });
    
    [self.session startRunning];
}

- (void)stopRunning {
    
    [_preview removeFromSuperlayer];
    [_session stopRunning];
    
}

#pragma mark AVCaptureMetadataOutputObjectsDelegate


- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
    UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
    image = [UIImage imageWithCGImage:image.CGImage scale:1 orientation:UIImageOrientationLeftMirrored];
    image = [image fixOrientation];//图像反转
    
    [self detectForFacesInUIImage:image];
}



/////人脸识别
-(void)detectForFacesInUIImage:(UIImage *)facePicture
{
    CIImage* image = [CIImage imageWithCGImage:facePicture.CGImage];
    
    CIDetector* detector = [CIDetector detectorOfType:CIDetectorTypeFace
                                              context:nil
                                              options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyHigh
                                                                                  forKey:CIDetectorAccuracy]];
    
    NSArray* features = [detector featuresInImage:image];
  
    for(CIFaceFeature* faceObject in features)
    {
        NSLog(@"found face");
        if (self.detector) {

            if (faceObject.hasLeftEyePosition) {
                self.leftEye.center = CGPointMake(faceObject.leftEyePosition.x, faceObject.leftEyePosition.y);
            }
            
            if (faceObject.hasRightEyePosition) {
                self.rightEye.center = CGPointMake(faceObject.rightEyePosition.x, faceObject.rightEyePosition.y);
            }
            
            if (faceObject.hasMouthPosition) {
                self.mouth.center = CGPointMake(faceObject.mouthPosition.x, faceObject.mouthPosition.y);
            }
            
            if (faceObject.hasLeftEyePosition && faceObject.hasRightEyePosition && faceObject.hasMouthPosition && faceObject.hasFaceAngle) {
                NSLog(@"Left eye %g %g", faceObject.leftEyePosition.x, faceObject.leftEyePosition.y);
                NSLog(@"Right eye %g %g", faceObject.rightEyePosition.x, faceObject.rightEyePosition.y);
                NSLog(@"Mouth %g %g", faceObject.mouthPosition.x, faceObject.mouthPosition.y);
                
                self.face.bounds = faceObject.bounds;

                CGFloat eyeWidth = faceObject.rightEyePosition.x - faceObject.leftEyePosition.x;
                if (eyeWidth >95 && eyeWidth < 120) {
                    dispatch_async(dispatch_get_main_queue(), ^{
                        if (self.faceDetector) {
                            NSData *imageData = UIImageJPEGRepresentation([facePicture imageByResizeToSize:CGSizeMake(512, 512)], 0.3);
                            self.faceDetector(imageData);
                        }
                        self.detector = NO;
                        [self backAction:nil];
                    });
                    
                }
                
            }
            return;
        }
    }
}


- (UIImage *)imageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer,0);
    
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    uint8_t *yBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
    size_t yPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
    uint8_t *cbCrBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1);
    size_t cbCrPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 1);
    
    int bytesPerPixel = 4;
    uint8_t *rgbBuffer = malloc(width * height * bytesPerPixel);
    
    for(int y = 0; y < height; y++) {
        uint8_t *rgbBufferLine = &rgbBuffer[y * width * bytesPerPixel];
        uint8_t *yBufferLine = &yBuffer[y * yPitch];
        uint8_t *cbCrBufferLine = &cbCrBuffer[(y >> 1) * cbCrPitch];
        
        for(int x = 0; x < width; x++) {
            int16_t y = yBufferLine[x];
            int16_t cb = cbCrBufferLine[x & ~1] - 128;
            int16_t cr = cbCrBufferLine[x | 1] - 128;
            
            uint8_t *rgbOutput = &rgbBufferLine[x*bytesPerPixel];
            
            int16_t r = (int16_t)roundf( y + cr *  1.4 );
            int16_t g = (int16_t)roundf( y + cb * -0.343 + cr * -0.711 );
            int16_t b = (int16_t)roundf( y + cb *  1.765);
            
            rgbOutput[0] = 0xff;
            rgbOutput[1] = clamp(b);
            rgbOutput[2] = clamp(g);
            rgbOutput[3] = clamp(r);
        }
    }
    
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(rgbBuffer, width, height, 8, width * bytesPerPixel, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipLast);
    CGImageRef quartzImage = CGBitmapContextCreateImage(context);
    UIImage *image = [UIImage imageWithCGImage:quartzImage];
    
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);
    CGImageRelease(quartzImage);
    free(rgbBuffer);
    
    CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
    
    return image;
}

#pragma mark - Getter and Setter
-(UIButton *)backBtn {
    if (!_backBtn) {
        _backBtn = [UIButton buttonWithType:UIButtonTypeCustom];
        _backBtn.frame = CGRectMake(15, 20, 50, 50);
        [_backBtn setTitle:@"取消" forState:UIControlStateNormal];
        [_backBtn addTarget:self action:@selector(backAction:) forControlEvents:UIControlEventTouchUpInside];
    }
    return _backBtn;
}

-(UILabel *)alertLabel {
    if (!_alertLabel) {
        _alertLabel = [[UILabel alloc] init];
        _alertLabel.text = @"请在光线明亮处扫脸识别";
        _alertLabel.textColor = [UIColor whiteColor];
    }
    return _alertLabel;
}

-(QRView *)qrView {
    if (!_qrView) {
        CGRect screenRect = [QRUtil screenBounds];
        _qrView = [[QRView alloc] initWithFrame:screenRect];
        float width = 300.0 / 375.0 * App_Frame_Width;
        _qrView.transparentArea = CGSizeMake(width, width);
        
        _qrView.backgroundColor = [UIColor clearColor];
    }
    return _qrView;
}

@end
