//
//  STSilentLivenessCamera.m
//  TestSTSilentLivenessController
//
//  Created by huoqiuliang on 2019/1/14.
//  Copyright © 2019年 sensetime. All rights reserved.
//

#import "STSilentLivenessCamera.h"
#import "STSilentLivenessCommon.h"
@interface STSilentLivenessCamera () <AVCaptureVideoDataOutputSampleBufferDelegate>

@property (nonatomic, strong, readwrite) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;
@property (nonatomic, strong) AVCaptureDeviceInput *deviceInput;
@property (nonatomic, strong) AVCaptureVideoDataOutput *dataOutput;
@property (nonatomic, strong) AVCaptureSession *session;
@property (nonatomic, strong) AVCaptureDevice *deviceFront;
@property (nonatomic, strong) AVCaptureConnection *connection;

@property (nonatomic, assign) STIDSilentLivenessFaceOrientaion faceOrientation;
@property (nonatomic, assign) CGFloat videoHeight;
@property (nonatomic, assign) CGFloat videoWeight;

@property (nonatomic, assign) CGRect prepareframe;
@property (nonatomic, assign) CGRect previewframe;
@end

@implementation STSilentLivenessCamera

- (instancetype)initWithPrepareframe:(CGRect)prepareframe previewframe:(CGRect)previewframe {
    self = [super init];
    if (self) {
        _prepareframe = prepareframe;
        _previewframe = previewframe;
        [self setupCaptureSession];
    }
    return self;
}

- (void)dealloc {
    if (_session) {
        [_session beginConfiguration];
        [_session removeOutput:_dataOutput];
        [_session removeInput:_deviceInput];
        [_session commitConfiguration];

        if ([_session isRunning]) {
            [_session stopRunning];
        }
        _session = nil; //! OCLINT
    }
}

- (void)setupCaptureSession {
#if !TARGET_IPHONE_SIMULATOR
    self.session = [[AVCaptureSession alloc] init];
    // iPhone 4S, +
    self.session.sessionPreset = AVCaptureSessionPreset640x480;
    AVCaptureVideoPreviewLayer *captureVideoPreviewLayer =
        [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
    captureVideoPreviewLayer.frame = self.previewframe;
    [captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspect];

    self.captureVideoPreviewLayer = captureVideoPreviewLayer;

    NSArray *devices = [AVCaptureDevice devices];
    for (AVCaptureDevice *device in devices) {
        if ([device hasMediaType:AVMediaTypeVideo]) {
            if ([device position] == AVCaptureDevicePositionFront) {
                self.deviceFront = device;
            }
        }
    }

    NSError *error = nil;
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:self.deviceFront error:&error];
    self.deviceInput = input;

    self.dataOutput = [[AVCaptureVideoDataOutput alloc] init];

    [self.dataOutput setAlwaysDiscardsLateVideoFrames:YES];

    //视频的格式只能为kCVPixelFormatType_32BGRA
    [self.dataOutput
        setVideoSettings:@{(id) kCVPixelBufferPixelFormatTypeKey: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]}];

    dispatch_queue_t queueBuffer = dispatch_queue_create("SILENT_LIVENESS_BUFFER_QUEUE", NULL);

    [self.dataOutput setSampleBufferDelegate:self queue:queueBuffer];

    [self.session beginConfiguration];

    if ([self.session canAddOutput:self.dataOutput]) {
        [self.session addOutput:self.dataOutput];
    }
    if ([self.session canAddInput:input]) {
        [self.session addInput:input];
    }

    // 更改视频方向
    // AVCaptureConnection *connection = [self.dataOutput connectionWithMediaType:AVMediaTypeVideo];
    // connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;

    [self.session commitConfiguration];
#endif
}

- (void)startRunning {
    AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];

    switch (authStatus) {
        case AVAuthorizationStatusNotDetermined: {
            [AVCaptureDevice
                requestAccessForMediaType:AVMediaTypeVideo
                        completionHandler:^(BOOL granted) {
                            if (granted) {
                                if (self.session && self.dataOutput && ![self.session isRunning]) {
                                    [self.session startRunning];
                                    if (self.delegate &&
                                        [self.delegate respondsToSelector:@selector(cameraAuthorizationSuccessfully)]) {
                                        [self.delegate cameraAuthorizationSuccessfully];
                                    }
                                }

                            } else {
                                if (self.delegate &&
                                    [self.delegate respondsToSelector:@selector(cameraAuthorizationFailed)]) {
                                    [self.delegate cameraAuthorizationFailed];
                                }
                            }
                        }];
            break;
        }
        case AVAuthorizationStatusAuthorized: {
            if (self.session && self.dataOutput && ![self.session isRunning]) {
                [self.session startRunning];
                if (self.delegate && [self.delegate respondsToSelector:@selector(cameraAuthorizationSuccessfully)]) {
                    [self.delegate cameraAuthorizationSuccessfully];
                }
            }
            break;
        }
        case AVAuthorizationStatusDenied:
        case AVAuthorizationStatusRestricted: {
            if (self.delegate && [self.delegate respondsToSelector:@selector(cameraAuthorizationFailed)]) {
                [self.delegate cameraAuthorizationFailed];
            }
            break;
        }
    }
}
- (void)stopRunning {
    if (self.session && self.dataOutput && ![self.session isRunning]) {
        [self.session stopRunning];
    }
}

#pragma - mark -
#pragma - mark AVCaptureVideoDataOutputSampleBufferDelegate

- (void)captureOutput:(AVCaptureOutput *)captureOutput
    didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
           fromConnection:(AVCaptureConnection *)connection
    __attribute__((annotate("oclint:suppress[unused method parameter]"))) {
    CVPixelBufferRef pixelBuffer = (CVPixelBufferRef) CMSampleBufferGetImageBuffer(sampleBuffer);
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    int stride = (int) CVPixelBufferGetBytesPerRow(pixelBuffer);
    int height = (int) CVPixelBufferGetHeight(pixelBuffer);
    int width = stride / 4;

    size_t top, bottom, left, right;
    CVPixelBufferGetExtendedPixels(pixelBuffer, &left, &right, &top, &bottom);

    width = width + (int) left + (int) right;
    height = height + (int) top + (int) bottom;

    STIDSilentLivenessFaceOrientaion faceOrientation = STIDSilentLiveness_FACE_UP;

    switch (connection.videoOrientation) {
        case AVCaptureVideoOrientationPortrait:
            faceOrientation = STIDSilentLiveness_FACE_UP;

            break;
        case AVCaptureVideoOrientationPortraitUpsideDown:

            faceOrientation = STIDSilentLiveness_FACE_DOWN;

            break;
        case AVCaptureVideoOrientationLandscapeRight:

            faceOrientation = STIDSilentLiveness_FACE_RIGHT;

            break;
        case AVCaptureVideoOrientationLandscapeLeft:

            faceOrientation = STIDSilentLiveness_FACE_LEFT;

            break;
    }

    self.connection = connection;
    self.faceOrientation = faceOrientation;
    self.videoHeight = height;
    self.videoWeight = width;

    NSDictionary *imageDic = [self convertImageByScreen];
    CGRect imagePrepareRect = CGRectFromString(imageDic[@"imagePrepareRect"]);
    CGRect imagePreviewRect = CGRectFromString(imageDic[@"imagePreviewRect"]);

    if (self.delegate &&
        [self.delegate respondsToSelector:@selector(didOutputSampleBuffer:
                                                           faceOrientaion:imagePreparewRect:imagePreviewRect:)]) {
        [self.delegate didOutputSampleBuffer:sampleBuffer
                              faceOrientaion:faceOrientation
                           imagePreparewRect:imagePrepareRect
                            imagePreviewRect:imagePreviewRect];
    }

    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
}

- (NSDictionary *)convertImageByScreen {
    CGFloat imageWidth = self.videoHeight;
    CGFloat imageHeight = self.videoWeight;
    CGFloat screenPreviewLeft = self.previewframe.origin.x;
    CGFloat screenPreviewRight = self.previewframe.origin.x + self.previewframe.size.width;
    CGFloat screenPreviewTop = self.previewframe.origin.y;
    CGFloat screenPreviewBottom = self.previewframe.origin.y + self.previewframe.size.height;

    CGFloat imagePreviewLeft = 0.0;
    CGFloat imagePreviewTop = 0.0;
    CGFloat imagePreviewRight = 0.0;
    CGFloat imagePreviewBottom = 0.0;

    CGFloat screenPrepareLeft = self.prepareframe.origin.x;
    CGFloat screenPrepareRight = self.prepareframe.origin.x + self.prepareframe.size.width;
    CGFloat screenPrepareTop = self.prepareframe.origin.y;
    CGFloat screenPrepareBottom = self.prepareframe.origin.y + self.prepareframe.size.height;

    CGFloat imagePrepareLeft = 0.0;
    CGFloat imagePrepareTop = 0.0;
    CGFloat imagePrepareRight = 0.0;
    CGFloat imagePrepareBottom = 0.0;

    CGFloat leftSpace = 0.0;
    CGFloat topSpace = 0.0;
    CGFloat screenOnImageScale = 0.0;

    switch (self.faceOrientation) {
        case STIDSilentLiveness_FACE_UP:

            imagePreviewRight = self.previewframe.size.width - screenPreviewLeft;
            imagePreviewLeft = self.previewframe.size.width - screenPreviewRight;
            imagePreviewTop = screenPreviewTop;
            imagePreviewBottom = screenPreviewBottom;

            imagePrepareRight = self.previewframe.size.width - screenPrepareLeft;
            imagePrepareLeft = self.previewframe.size.width - screenPrepareRight;
            imagePrepareTop = screenPrepareTop;
            imagePrepareBottom = screenPrepareBottom;

            screenOnImageScale = MIN(self.previewframe.size.height / imageWidth, //! OCLINT
                                     self.previewframe.size.width / imageHeight); //! OCLINT

            leftSpace = (imageHeight * screenOnImageScale - self.previewframe.size.width) / 2.0;
            topSpace = (imageWidth * screenOnImageScale - self.previewframe.size.height) / 2.0;

            break;

        case STIDSilentLiveness_FACE_DOWN:

            imagePreviewRight = screenPreviewRight;
            imagePreviewLeft = screenPreviewLeft;
            imagePreviewTop = self.previewframe.size.height - screenPreviewBottom;
            imagePreviewBottom = self.previewframe.size.height - screenPreviewTop;

            imagePrepareRight = screenPrepareRight;
            imagePrepareLeft = screenPrepareLeft;
            imagePrepareTop = self.previewframe.size.height - screenPrepareBottom;
            imagePrepareBottom = self.previewframe.size.height - screenPrepareTop;

            screenOnImageScale =
                MIN(self.previewframe.size.height / imageWidth, self.previewframe.size.width / imageHeight); //! OCLINT

            leftSpace = (imageHeight * screenOnImageScale - self.previewframe.size.width) / 2.0;
            topSpace = (imageWidth * screenOnImageScale - self.previewframe.size.height) / 2.0;

            break;

        case STIDSilentLiveness_FACE_RIGHT:

            imagePreviewLeft = self.previewframe.size.height - screenPreviewBottom;
            imagePreviewRight = self.previewframe.size.height - screenPreviewTop;
            imagePreviewTop = screenPreviewLeft;
            imagePreviewBottom = screenPreviewRight;

            imagePrepareLeft = self.previewframe.size.height - screenPrepareBottom;
            imagePrepareRight = self.previewframe.size.height - screenPrepareTop;
            imagePrepareTop = screenPrepareLeft;
            imagePrepareBottom = screenPrepareRight;

            screenOnImageScale = MIN(self.previewframe.size.height / imageHeight, //! OCLINT
                                     self.previewframe.size.width / imageWidth); //! OCLINT

            topSpace = (imageWidth * screenOnImageScale - self.previewframe.size.width) / 2.0;
            leftSpace = (imageHeight * screenOnImageScale - self.previewframe.size.height) / 2.0;

            break;

        case STIDSilentLiveness_FACE_LEFT:

            imagePreviewLeft = screenPreviewTop;
            imagePreviewRight = screenPreviewBottom;
            imagePreviewTop = screenPreviewLeft;
            imagePreviewBottom = screenPreviewRight;

            imagePrepareLeft = screenPrepareTop;
            imagePrepareRight = screenPrepareBottom;
            imagePrepareTop = screenPrepareLeft;
            imagePrepareBottom = screenPrepareRight;

            screenOnImageScale = MIN(self.previewframe.size.height / imageHeight, //! OCLINT
                                     self.previewframe.size.width / imageWidth); //! OCLINT
            topSpace = (imageWidth * screenOnImageScale - self.previewframe.size.width) / 2.0;
            leftSpace = (imageHeight * screenOnImageScale - self.previewframe.size.height) / 2.0;

            break;
    }

    CGRect imagePrepareRect = CGRectMake((imagePrepareLeft + leftSpace) / screenOnImageScale,
                                         (imagePrepareTop + topSpace) / screenOnImageScale,
                                         (imagePrepareRight - imagePrepareLeft) / screenOnImageScale,
                                         (imagePrepareBottom - imagePrepareTop) / screenOnImageScale);
    CGRect imagePreviewRect = CGRectMake((imagePreviewLeft + leftSpace) / screenOnImageScale,
                                         (imagePreviewTop + topSpace) / screenOnImageScale,
                                         (imagePreviewRight - imagePreviewLeft) / screenOnImageScale,
                                         (imagePreviewBottom - imagePreviewTop) / screenOnImageScale);

    return @{
        @"imagePrepareRect": NSStringFromCGRect(imagePrepareRect),
        @"imagePreviewRect": NSStringFromCGRect(imagePreviewRect)
    };
}
- (CGRect)convertScreenRectByImageRect:(CGRect)imagePreviewRect {
    CGFloat imageOnPreviewScale;
    CGRect screenPreviewRect;
    CGFloat imageWidth = self.videoHeight;
    CGFloat imageHeight = self.videoWeight;

    CGFloat imagePreviewLeft = imagePreviewRect.origin.x;
    CGFloat imagePreviewTop = imagePreviewRect.origin.y;
    CGFloat imagePreviewRight = imagePreviewRect.size.width + imagePreviewRect.origin.x;
    CGFloat imagePreviewBottom = imagePreviewRect.size.height + imagePreviewRect.origin.y;

    CGFloat screenPreviewLeft = 0.0;
    CGFloat screenPreviewTop = 0.0;
    CGFloat screenPreviewRight = 0.0;
    CGFloat screenPreviewBottom = 0.0;

    CGFloat leftSpace = 0.0;
    CGFloat topSpace = 0.0;

    switch (self.faceOrientation) {
        case STIDSilentLiveness_FACE_UP:

            screenPreviewLeft = imageHeight - imagePreviewRight;
            screenPreviewRight = imageHeight - imagePreviewLeft;
            screenPreviewTop = imagePreviewTop;
            screenPreviewBottom = imagePreviewBottom;

            imageOnPreviewScale = MIN(self.previewframe.size.height / imageWidth, //! OCLINT
                                      self.previewframe.size.width / imageHeight); //! OCLINT

            leftSpace = (imageHeight * imageOnPreviewScale - self.previewframe.size.width) / 2.0;
            topSpace = (imageWidth * imageOnPreviewScale - self.previewframe.size.height) / 2.0;
            break;

        case STIDSilentLiveness_FACE_DOWN:

            screenPreviewLeft = imagePreviewRight;
            screenPreviewRight = imagePreviewLeft;
            screenPreviewTop = imageWidth - imagePreviewTop;
            screenPreviewBottom = imageWidth - imagePreviewBottom;

            imageOnPreviewScale =
                MIN(self.previewframe.size.height / imageWidth, self.previewframe.size.width / imageHeight); //! OCLINT

            leftSpace = (imageHeight * imageOnPreviewScale - self.previewframe.size.width) / 2.0;
            topSpace = (imageWidth * imageOnPreviewScale - self.previewframe.size.height) / 2.0;
            break;

        case STIDSilentLiveness_FACE_LEFT:

            screenPreviewTop = imagePreviewLeft;
            screenPreviewBottom = imagePreviewRight;
            screenPreviewLeft = imagePreviewTop;
            screenPreviewRight = imagePreviewBottom;

            imageOnPreviewScale = MIN(self.previewframe.size.height / imageHeight, //! OCLINT
                                      self.previewframe.size.width / imageWidth); //! OCLINT

            leftSpace = (imageWidth * imageOnPreviewScale - self.previewframe.size.width) / 2.0;
            topSpace = (imageHeight * imageOnPreviewScale - self.previewframe.size.height) / 2.0;

            break;
        case STIDSilentLiveness_FACE_RIGHT:

            screenPreviewTop = imageHeight - imagePreviewLeft;
            screenPreviewBottom = imageHeight - imagePreviewRight;
            screenPreviewLeft = imageWidth - imagePreviewTop;
            screenPreviewRight = imageWidth - imagePreviewBottom;

            imageOnPreviewScale = MIN(self.previewframe.size.height / imageHeight, //! OCLINT
                                      self.previewframe.size.width / imageWidth); //! OCLINT

            leftSpace = (imageWidth * imageOnPreviewScale - self.previewframe.size.width) / 2.0;
            topSpace = (imageHeight * imageOnPreviewScale - self.previewframe.size.height) / 2.0;

            break;
    }

    screenPreviewRect = CGRectMake(imageOnPreviewScale * screenPreviewLeft - leftSpace,
                                   imageOnPreviewScale * screenPreviewTop - topSpace,
                                   imageOnPreviewScale * (screenPreviewRight - screenPreviewLeft),
                                   imageOnPreviewScale * (screenPreviewBottom - screenPreviewTop));

    return screenPreviewRect;
}

@end
