//
//  SMLiveDetectionViewController.m
//  Runner
//
//  Created by tock on 2023/9/23.
//

#import "SMLiveDetectionViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <CoreVideo/CoreVideo.h>
#import "UIUtilities.h"
#import <GoogleMLKit/MLKit.h>

static NSString *const smsessionQueueLabel = @"com.smurfsai.visiondetector.SessionQueue";
static NSString *const smvideoDataOutputQueueLabel =
    @"com.smurfsai.visiondetector.VideoDataOutputQueue";

static float const MLKImageLabelConfidenceThreshold = 0.75;
static const CGFloat MLKSmallDotRadius = 4.0;
static const CGFloat MLKconstantScale = 1.0;
static const CGFloat MLKImageLabelResultFrameX = 0.4;
static const CGFloat MLKImageLabelResultFrameY = 0.1;
static const CGFloat MLKImageLabelResultFrameWidth = 0.5;
static const CGFloat MLKImageLabelResultFrameHeight = 0.8;
static const CGFloat MLKSegmentationMaskAlpha = 0.5;
static const CGFloat SMBorderWidth = 4.0;
static const CGFloat SMCircleSpace = 5.0;

typedef void(^resultCallback)(BOOL succed,NSDictionary *actionStatus,NSArray *faces);

@interface SMLiveDetectionViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate>{
    BOOL doDetectionFaceAction;
    BOOL turnHeadLeftFinish;
    BOOL turnHeadRightFinish;
}

typedef NS_ENUM(NSInteger, Detector) {
  DetectorOnDeviceBarcode,
  DetectorOnDeviceFace,
  DetectorOnDeviceText,
  DetectorOnDeviceTextChinese,
  DetectorOnDeviceTextDevanagari,
  DetectorOnDeviceTextJapanese,
  DetectorOnDeviceTextKorean,
  DetectorOnDeviceImageLabels,
  DetectorOnDeviceImageLabelsCustom,
  DetectorOnDeviceObjectProminentNoClassifier,
  DetectorOnDeviceObjectProminentWithClassifier,
  DetectorOnDeviceObjectMultipleNoClassifier,
  DetectorOnDeviceObjectMultipleWithClassifier,
  DetectorOnDeviceObjectCustomProminentNoClassifier,
  DetectorOnDeviceObjectCustomProminentWithClassifier,
  DetectorOnDeviceObjectCustomMultipleNoClassifier,
  DetectorOnDeviceObjectCustomMultipleWithClassifier,
  DetectorPose,
  DetectorPoseAccurate,
  DetectorSegmentationSelfie,
};

typedef NS_ENUM(NSInteger, MSActionType){
    MSActionNone = 0,
    MSActionCloseTheCamera,
    MSActionFaceAvable,
    MSActionDetectionFace,
    MSActionTurnHead,
    MSActionCloseEye,
    MSActionOpenMouth,
    MSActionFinished,
};

@property(nonatomic, strong) UILabel *messageLabel;
@property(nonatomic, strong) UILabel *timeOutLabel;
@property(nonatomic) NSArray *detectors;
@property(nonatomic) Detector currentDetector;
@property(nonatomic) bool isUsingFrontCamera;
@property(nonatomic, nonnull) AVCaptureVideoPreviewLayer *previewLayer;
@property(nonatomic) AVCaptureSession *captureSession;
@property(nonatomic) dispatch_queue_t sessionQueue;
@property(nonatomic) UIView *annotationOverlayView;
@property(nonatomic) UIImageView *previewOverlayView;
@property(nonatomic) UIView *cameraView;
@property(nonatomic) CMSampleBufferRef lastFrame;

@property(nonatomic) CGFloat containWidth;
@property(nonatomic) CGFloat containHeight;
@property(nonatomic) CGRect validRect;

@property(nonatomic) CGFloat begin_nose_x;
@property(nonatomic) CGFloat begin_eye_height;
@property(nonatomic) CGFloat begin_lips_height;
@property(nonatomic) MSActionType actionType;

@property(strong) NSArray * actionList;
@property(assign) NSInteger timeOut;
@property(strong) UIImage * faceImg1;
@property(strong) UIImage * faceImg2;
@property(strong) UIImage * faceImg3;
@property(copy) resultCallback callback;
@property(strong) dispatch_source_t timer;

@property(nonatomic) Detector lastDetector;
@end



@implementation SMLiveDetectionViewController


+(SMLiveDetectionViewController *) doFaceDetectionWithActionList:(NSArray *) actionList timeOut:(NSInteger) timeOut callback:(void(^)(BOOL succed,NSDictionary *actionStatus,NSArray *faces)) callback{
  SMLiveDetectionViewController *livenessVc = [[SMLiveDetectionViewController alloc]init];
  livenessVc.actionList = actionList;
  livenessVc.callback = callback;
  livenessVc.timeOut = timeOut;
//  [[self currentWindow] addSubview:livenessVc.view];
  return livenessVc;
}

+ (UIWindow *)currentWindow {
    if (@available(iOS 15, *)) {
       __block UIScene * _Nonnull tmpSc;
        [[[UIApplication sharedApplication] connectedScenes] enumerateObjectsUsingBlock:^(UIScene * _Nonnull obj, BOOL * _Nonnull stop) {
            if (obj.activationState == UISceneActivationStateForegroundActive || obj.activationState == UISceneActivationStateForegroundInactive) {
                tmpSc = obj;
                *stop = YES;
            }
        }];
        UIWindowScene *curWinSc = (UIWindowScene *)tmpSc;
        return curWinSc.keyWindow;
    } else {
        return [[[UIApplication sharedApplication] windows] objectAtIndex:0];
    }
}


-(void)stopDetection{
//  [self stopSession];
    dispatch_async(dispatch_get_main_queue(), ^{
        [self.view removeFromSuperview];
          if(self.callback){
              self.callback = nil;
          }
    });
}






- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view.
    
    self.actionType = MSActionNone;
    turnHeadLeftFinish = FALSE;
    turnHeadRightFinish = FALSE;
    _begin_nose_x = -1;
    self.containWidth = [UIScreen mainScreen].bounds.size.width - 100;
    self.containHeight = _containWidth + 100;
    
    self.currentDetector = DetectorOnDeviceFace;
    _isUsingFrontCamera = YES;
    _captureSession = [[AVCaptureSession alloc] init];
    _sessionQueue = dispatch_queue_create(smsessionQueueLabel.UTF8String, nil);
    _previewOverlayView = [[UIImageView alloc] initWithFrame:CGRectZero];
    _previewOverlayView.contentMode = UIViewContentModeScaleAspectFill;
    _previewOverlayView.clipsToBounds = YES;
    _annotationOverlayView = [[UIView alloc] initWithFrame:CGRectZero];

    doDetectionFaceAction = NO;
    self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:_captureSession];
    [self initDetectionView];
    [self setUpPreviewOverlayView];
    [self setUpAnnotationOverlayView];
    [self setUpCaptureSessionOutput];
    [self setUpCaptureSessionInput];
    [self setupVideoView:_cameraView];
    [self setupAnimatedLayer:_cameraView];
}

- (void)viewDidAppear:(BOOL)animated {
  [super viewDidAppear:animated];
  [self startSession];
    self.actionType = MSActionNone;
  __unsafe_unretained typeof(self) weakSelf = self;
  if (weakSelf.timeOut == 0) {
    weakSelf.timeOut = 30;
  }
  dispatch_source_t timer = dispatch_source_create(DISPATCH_SOURCE_TYPE_TIMER, 0, 0, dispatch_get_main_queue());
  dispatch_source_set_timer(timer, DISPATCH_TIME_NOW, 1 * NSEC_PER_SEC, 0 * NSEC_PER_SEC);
  dispatch_source_set_event_handler(timer, ^{
      if(weakSelf.timeOut > 0){
          weakSelf.timeOut -= 1;
          weakSelf.timeOutLabel.text = [NSString stringWithFormat:@"%ld",(long)weakSelf.timeOut];
      }
    if(weakSelf.timeOut == 0/*weakSelf.actionType != MSActionFinished*/){
      NSString *faceImgStr = [weakSelf image2DataURL:weakSelf.faceImg1];
      if(!faceImgStr){
        faceImgStr = @"";
      }
      NSMutableArray *faces = [NSMutableArray arrayWithCapacity:0];
      if(weakSelf.faceImg1){
        [faces addObject:weakSelf.faceImg1];
      }
      if(weakSelf.faceImg2){
        [faces addObject:weakSelf.faceImg2];
      }
      if(weakSelf.faceImg3){
        [faces addObject:weakSelf.faceImg3];
      }
        weakSelf.callback(NO,@{@"pic":faceImgStr}, @[]);
        [self.navigationController popViewControllerAnimated:YES];
    }
  });
  dispatch_resume(timer);
  self.timer = timer;
}

- (void)viewDidDisappear:(BOOL)animated {
  [super viewDidDisappear:animated];
    if (@available(iOS 8.0, *)) {
        dispatch_cancel(self.timer);
    } else {
        // Fallback on earlier versions
    }
  [self stopSession];
}

- (void)viewDidLayoutSubviews {
  [super viewDidLayoutSubviews];
  _previewLayer.frame = _cameraView.bounds;
}

-(void) setActionType:(MSActionType)actionType{
    _actionType = actionType;

    NSString *msg = @"未监测到人脸";
    switch (_actionType) {
        case MSActionNone:
            msg =@"未监测到人脸";
            break;
        case MSActionDetectionFace:
            msg = @"监测到人脸请保持";
            if(self.faceImg1){
                [self callBackFaceImg];
            }
            break;
        case MSActionTurnHead:
            msg = @"请左右转头";
    
            break;
        case MSActionCloseEye:
            msg = @"请闭合眼睛";
            break;
        case MSActionOpenMouth:
            msg = @"请张开嘴巴";
            break;
      case MSActionFinished:{
          msg  = @"检测通过";
          [self callBackFaceImg];
      }
            break;
        case MSActionCloseTheCamera:
            msg = @"请靠近一些";
            break;
        case MSActionFaceAvable:
            msg =@"监测到人脸";
            break;
        default:
            break;
    }
    dispatch_async(dispatch_get_main_queue(), ^{
        self.messageLabel.text = msg;
    });
    
}


-(void)callBackFaceImg{
    __unsafe_unretained typeof(self) weakSelf = self;
    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
      if(weakSelf.callback){
        NSString *faceImgStr = [weakSelf image2DataURL:weakSelf.faceImg1];
        if(!faceImgStr){
          faceImgStr = @"";
        }
        NSMutableArray *faces = [NSMutableArray arrayWithCapacity:0];
        if(weakSelf.faceImg1){
          [faces addObject:weakSelf.faceImg1];
        }
        if(weakSelf.faceImg2){
          [faces addObject:weakSelf.faceImg2];
        }
        if(weakSelf.faceImg3){
          [faces addObject:weakSelf.faceImg3];
        }
          weakSelf.callback(YES,@{@"pic":faceImgStr}, @[]);
          [self.navigationController popViewControllerAnimated:YES];
      }
    });
}

-(void) initDetectionView{
    
  UILabel *timeOutLabel = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, [UIScreen mainScreen].bounds.size.width, 150)];
  timeOutLabel.font = [UIFont boldSystemFontOfSize:44];
  timeOutLabel.textAlignment = NSTextAlignmentCenter;
  timeOutLabel.textColor = [UIColor blueColor];
  [self.view addSubview:timeOutLabel];
  self.timeOutLabel = timeOutLabel;
    CGFloat contain_x = ([UIScreen mainScreen].bounds.size.width - _containWidth)/2;
    CGFloat contain_y = 150;
    UIView *containView = [[UIView alloc] initWithFrame:CGRectMake(contain_x, contain_y, _containWidth, _containHeight)];
    containView.backgroundColor = UIColor.clearColor;
    
    CGRect videoRect = CGRectMake(0, 0, _containWidth, _containWidth);
    UIView *videoView = [[UIView alloc] initWithFrame:videoRect];
    videoView.backgroundColor = [UIColor clearColor];
    [containView addSubview:videoView];
    
    
    UILabel *messageLabel = [[UILabel alloc] initWithFrame:CGRectMake(0, _containWidth + SMCircleSpace, _containWidth, (_containHeight - _containWidth - SMCircleSpace))];
    messageLabel.textColor = UIColor.redColor;
    messageLabel.textAlignment = NSTextAlignmentCenter;
    messageLabel.font = [UIFont systemFontOfSize:24];
    messageLabel.backgroundColor = [UIColor clearColor];
    messageLabel.numberOfLines = 2;
    [containView addSubview:messageLabel];
    
    [self.view addSubview:containView];
    self.view.backgroundColor = [UIColor whiteColor];
    self.messageLabel = messageLabel;
    self.cameraView = videoView;
    
    
}

-(void) setupVideoView:(UIView *)view{
    CAGradientLayer *gradientLayer = [[CAGradientLayer alloc]init];
    gradientLayer.frame = view.bounds;
    gradientLayer.colors = @[(id)[UIColor colorWithRed:197/255.0 green:189/255.0 blue:246/255.0 alpha:1].CGColor,(id)[UIColor colorWithRed:244/255.0 green:243/255.0 blue:248/255.0 alpha:1].CGColor];
    
    CAShapeLayer *maskLayer = [[CAShapeLayer alloc]init];
    maskLayer.lineWidth = SMBorderWidth;
    maskLayer.path = [UIBezierPath bezierPathWithArcCenter:CGPointMake(view.bounds.size.width/2, view.bounds.size.height/2) radius:view.bounds.size.width/2.0 - SMBorderWidth startAngle:-0.5*M_PI endAngle:1.5*M_PI clockwise:YES].CGPath;
    maskLayer.fillColor = UIColor.clearColor.CGColor;
    maskLayer.strokeColor = UIColor.blackColor.CGColor;
    maskLayer.cornerRadius = view.bounds.size.width/2.0;
    maskLayer.fillRule = kCAFillRuleEvenOdd;
    gradientLayer.mask = maskLayer;
    
    CABasicAnimation *pathAnima = [CABasicAnimation animationWithKeyPath:@"transform.rotation"];
    pathAnima.duration = 3.0f;
    pathAnima.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
    pathAnima.fromValue = @(M_PI * 2);
    pathAnima.toValue = @0;
    pathAnima.repeatCount = MAXFLOAT;
    pathAnima.fillMode = kCAFillModeForwards;
    pathAnima.removedOnCompletion = YES;
    [gradientLayer addAnimation:pathAnima forKey:@"strokeEndAnimation"];
    
    [view.layer addSublayer:gradientLayer];
}

-(void) setupAnimatedLayer:(UIView *)view{
    CAGradientLayer *gradientLayer = [[CAGradientLayer alloc]init];
    CGRect frame = view.bounds;
    frame.origin.x += (SMBorderWidth + SMCircleSpace);
    frame.origin.y += (SMBorderWidth + SMCircleSpace);
    frame.size.width -= (SMBorderWidth + SMCircleSpace) *2;
    frame.size.height -= (SMBorderWidth + SMCircleSpace) *2;
    gradientLayer.frame = frame;
    gradientLayer.colors = @[(id)[UIColor colorWithRed:161/255.0 green:163/255.0 blue:216/255.0 alpha:1].CGColor,(id)[UIColor colorWithRed:242/255.0 green:240/255.0 blue:249/255.0 alpha:1].CGColor,(id)[UIColor colorWithRed:161/255.0 green:163/255.0 blue:216/255.0 alpha:1].CGColor,(id)[UIColor colorWithRed:242/255.0 green:240/255.0 blue:249/255.0 alpha:1].CGColor];
    
    CAShapeLayer *maskLayer = [[CAShapeLayer alloc]init];
    maskLayer.lineWidth = SMBorderWidth/2.0;
    maskLayer.path = [UIBezierPath bezierPathWithArcCenter:CGPointMake(frame.size.width/2, frame.size.height/2) radius:frame.size.width/2.0 - SMBorderWidth/2.0 startAngle:-0.5*M_PI endAngle:1.5*M_PI clockwise:YES].CGPath;
    maskLayer.fillColor = UIColor.clearColor.CGColor;
    maskLayer.strokeColor = UIColor.blackColor.CGColor;
    maskLayer.cornerRadius = frame.size.width/2.0;
    maskLayer.fillRule = kCAFillRuleEvenOdd;
    gradientLayer.mask = maskLayer;
    
    CABasicAnimation *pathAnima = [CABasicAnimation animationWithKeyPath:@"transform.rotation"];
    pathAnima.duration = 3.0f;
    pathAnima.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
    pathAnima.fromValue = @0;
    pathAnima.toValue = @(M_PI * 2);
    pathAnima.repeatCount = MAXFLOAT;
    pathAnima.fillMode = kCAFillModeForwards;
    pathAnima.removedOnCompletion = YES;
    [gradientLayer addAnimation:pathAnima forKey:@"strokeEndAnimation"];
    
    [view.layer addSublayer:gradientLayer];
}

#pragma mark - Private

- (void)setUpCaptureSessionOutput {
  __weak typeof(self) weakSelf = self;
  dispatch_async(_sessionQueue, ^{
    __strong typeof(weakSelf) strongSelf = weakSelf;
    if (strongSelf == nil) {
      NSLog(@"Failed to setUpCaptureSessionOutput because self was deallocated");
      return;
    }
    [strongSelf.captureSession beginConfiguration];
    // When performing latency tests to determine ideal capture settings,
    // run the app in 'release' mode to get accurate performance metrics
    strongSelf.captureSession.sessionPreset = AVCaptureSessionPresetMedium;

    AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
    output.videoSettings = @{
      (id)
      kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]
    };
    output.alwaysDiscardsLateVideoFrames = YES;
    dispatch_queue_t outputQueue = dispatch_queue_create(smvideoDataOutputQueueLabel.UTF8String, nil);
    [output setSampleBufferDelegate:self queue:outputQueue];
    if ([strongSelf.captureSession canAddOutput:output]) {
      [strongSelf.captureSession addOutput:output];
      [strongSelf.captureSession commitConfiguration];
    } else {
      NSLog(@"%@", @"Failed to add capture session output.");
    }
  });
}

- (void)setUpCaptureSessionInput {
  __weak typeof(self) weakSelf = self;
  dispatch_async(_sessionQueue, ^{
    __strong typeof(weakSelf) strongSelf = weakSelf;
    if (strongSelf == nil) {
      NSLog(@"Failed to setUpCaptureSessionInput because self was deallocated");
      return;
    }
    AVCaptureDevicePosition cameraPosition =
        strongSelf.isUsingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack;
    AVCaptureDevice *device = [strongSelf captureDeviceForPosition:cameraPosition];
    if (device) {
      [strongSelf.captureSession beginConfiguration];
      NSArray<AVCaptureInput *> *currentInputs = strongSelf.captureSession.inputs;
      for (AVCaptureInput *input in currentInputs) {
        [strongSelf.captureSession removeInput:input];
      }
      NSError *error;
      AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device
                                                                          error:&error];
      if (error) {
        NSLog(@"Failed to create capture device input: %@", error.localizedDescription);
        return;
      } else {
        if ([strongSelf.captureSession canAddInput:input]) {
          [strongSelf.captureSession addInput:input];
        } else {
          NSLog(@"%@", @"Failed to add capture session input.");
        }
      }
      [strongSelf.captureSession commitConfiguration];
    } else {
      NSLog(@"Failed to get capture device for camera position: %ld", cameraPosition);
    }
  });
}

- (void)startSession {
   

  __weak typeof(self) weakSelf = self;
  if (![weakSelf.captureSession isRunning]) {
        dispatch_async(_sessionQueue, ^{
          [weakSelf.captureSession startRunning];
        });
  }

}

- (void)stopSession {
  __weak typeof(self) weakSelf = self;
  dispatch_async(_sessionQueue, ^{
      if ([weakSelf.captureSession isRunning]) {
          [weakSelf.captureSession stopRunning];
      }
  });
}


- (void)setUpPreviewOverlayView {
  [_cameraView addSubview:_previewOverlayView];
    
    CGRect frame = _cameraView.bounds;
    frame.origin.x += (SMBorderWidth + SMCircleSpace  + SMBorderWidth/2);
    frame.origin.y += (SMBorderWidth + SMCircleSpace + SMBorderWidth/2);
    frame.size.width -= (SMBorderWidth + SMCircleSpace + SMBorderWidth/2) *2;
    frame.size.height -= (SMBorderWidth + SMCircleSpace + SMBorderWidth/2) *2;
    
    _previewOverlayView.frame = frame;
    _previewOverlayView.layer.cornerRadius = (frame.size.width)/2.0;
    
    CGRect _rect = frame;
    _rect.origin.x += 10;
    _rect.origin.y += 10;
    _rect.size.width -= 20;
    _rect.size.height -= 20;
    self.validRect = _rect;
  
}
- (void)setUpAnnotationOverlayView {
  [_cameraView addSubview:_annotationOverlayView];
    _annotationOverlayView.frame = _cameraView.bounds;
}

- (AVCaptureDevice *)captureDeviceForPosition:(AVCaptureDevicePosition)position {
  if (@available(iOS 10, *)) {
    AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
        discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
                              mediaType:AVMediaTypeVideo
                               position:AVCaptureDevicePositionUnspecified];
    for (AVCaptureDevice *device in discoverySession.devices) {
      if (device.position == position) {
        return device;
      }
    }
  }
  return nil;
}

- (void)removeDetectionAnnotations {
  for (UIView *annotationView in _annotationOverlayView.subviews) {
    [annotationView removeFromSuperview];
  }
}

- (void)updatePreviewOverlayViewWithLastFrame {
  CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(_lastFrame);
  [self updatePreviewOverlayViewWithImageBuffer:imageBuffer];
}

- (void)updatePreviewOverlayViewWithImageBuffer:(CVImageBufferRef)imageBuffer {
  if (imageBuffer == nil) {
    return;
  }
  UIImageOrientation orientation =
      _isUsingFrontCamera ? UIImageOrientationLeftMirrored : UIImageOrientationRight;
  UIImage *image = [UIUtilities UIImageFromImageBuffer:imageBuffer orientation:orientation];
  _previewOverlayView.image = image;
}

-(void) doFaceDetectionAction{
    if(doDetectionFaceAction){
        return;
    }
    doDetectionFaceAction = YES;
    self.view.backgroundColor = UIColor.whiteColor;
    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.5 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
        self.view.backgroundColor = UIColor.redColor;
        dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.5 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
            self.view.backgroundColor = UIColor.yellowColor;
            dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.5 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
                self.view.backgroundColor = UIColor.blueColor;
                dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.5 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
                    self.view.backgroundColor = UIColor.whiteColor;
                    self.actionType = MSActionTurnHead;
                    self->doDetectionFaceAction = NO;
                });
            });
        });
    });
}

- (NSArray<NSValue *> *)convertedPointsFromPoints:(NSArray<NSValue *> *)points
                                            width:(CGFloat)width
                                           height:(CGFloat)height {
  NSMutableArray *result = [NSMutableArray arrayWithCapacity:points.count];
  for (NSValue *point in points) {
    CGPoint cgPointValue = point.CGPointValue;
    CGPoint normalizedPoint = CGPointMake(cgPointValue.x / width, cgPointValue.y / height);
    CGPoint cgPoint = [_previewLayer pointForCaptureDevicePointOfInterest:normalizedPoint];
    [result addObject:[NSValue valueWithCGPoint:cgPoint]];
  }
  return result;
}

- (CGPoint)normalizedPointFromVisionPoint:(MLKVisionPoint *)point
                                    width:(CGFloat)width
                                   height:(CGFloat)height {
  CGPoint cgPointValue = CGPointMake(point.x, point.y);
  CGPoint normalizedPoint = CGPointMake(cgPointValue.x / width, cgPointValue.y / height);
  CGPoint cgPoint = [_previewLayer pointForCaptureDevicePointOfInterest:normalizedPoint];
  return cgPoint;
}

- (void)addContoursForFace:(MLKFace *)face width:(CGFloat)width height:(CGFloat)height {
    MSActionType nextAction = MSActionNone;
  // Face
  MLKFaceContour *faceContour = [face contourOfType:MLKFaceContourTypeFace];
  for (MLKVisionPoint *point in faceContour.points) {
    CGPoint cgPoint = [self normalizedPointFromVisionPoint:point width:width height:height];
    [UIUtilities addCircleAtPoint:cgPoint
                           toView:self->_annotationOverlayView
                            color:UIColor.blueColor
                           radius:MLKSmallDotRadius];
      if(_actionType == MSActionFaceAvable){
          self.actionType = MSActionDetectionFace;
          [self doFaceDetectionAction];
      }
  }
    

  // Eyes
    CGFloat eye_min_y = -1;
    CGFloat eye_max_y = -1;
    CGFloat eye_close_height = -1;
  MLKFaceContour *leftEyeContour = [face contourOfType:MLKFaceContourTypeLeftEye];
  for (MLKVisionPoint *point in leftEyeContour.points) {
      
    CGPoint cgPoint = [self normalizedPointFromVisionPoint:point width:width height:height];
      if(eye_min_y == -1){
          eye_min_y = cgPoint.y;
      }
      if(eye_max_y == -1){
          eye_max_y = cgPoint.y;
      }
      if(eye_min_y > cgPoint.y){
          eye_min_y = cgPoint.y;
      }
      if(eye_max_y < cgPoint.y){
          eye_max_y = cgPoint.y;
      }
  }
    
  MLKFaceContour *rightEyeContour = [face contourOfType:MLKFaceContourTypeRightEye];
  for (MLKVisionPoint *point in rightEyeContour.points) {
    CGPoint cgPoint = [self normalizedPointFromVisionPoint:point width:width height:height];
      
      if(eye_min_y == -1){
          eye_min_y = cgPoint.y;
      }
      if(eye_max_y == -1){
          eye_max_y = cgPoint.y;
      }
      if(eye_min_y > cgPoint.y){
          eye_min_y = cgPoint.y;
      }
      if(eye_max_y < cgPoint.y){
          eye_max_y = cgPoint.y;
      }
  }
    if(self.begin_eye_height <= fabs(eye_max_y - eye_min_y)){
        self.begin_eye_height = fabs(eye_max_y - eye_min_y);
    }
    eye_close_height = fabs(eye_max_y - eye_min_y);
    if(_actionType == MSActionCloseEye){
        if(eye_close_height <= self.begin_eye_height/2.0){
            self.actionType = MSActionFinished;
        }
    }
    

    CGFloat lips_min_y = -1;
    CGFloat lips_max_y = -1;
    CGFloat lips_height = -1;
  // Lips
  MLKFaceContour *upperLipBottomContour = [face contourOfType:MLKFaceContourTypeUpperLipBottom];
  for (MLKVisionPoint *point in upperLipBottomContour.points) {
    CGPoint cgPoint = [self normalizedPointFromVisionPoint:point width:width height:height];
      if(lips_min_y == -1){
          lips_min_y = cgPoint.y;
      }
      if(lips_min_y > cgPoint.y){
          lips_min_y = cgPoint.y;
      }
      
  }
  MLKFaceContour *lowerLipTopContour = [face contourOfType:MLKFaceContourTypeLowerLipTop];
  for (MLKVisionPoint *point in lowerLipTopContour.points) {
    CGPoint cgPoint = [self normalizedPointFromVisionPoint:point width:width height:height];
      if(lips_max_y == -1){
          lips_max_y = cgPoint.y;
      }
      if(lips_max_y < cgPoint.y){
          lips_max_y = cgPoint.y;
      }
      [UIUtilities addCircleAtPoint:cgPoint
                             toView:self->_annotationOverlayView
                              color:UIColor.redColor
                             radius:MLKSmallDotRadius];
  }
    
    
    if(_actionType == MSActionDetectionFace){
        self.begin_lips_height = fabs(lips_max_y - lips_min_y);
    }
    lips_height = fabs(lips_max_y - lips_min_y);
    if(_actionType == MSActionOpenMouth){
        float value = fabs(lips_height - _begin_lips_height);
        if(value >= 5){
            self.actionType = MSActionFinished;
        }
    }
    
    // Nose
    CGFloat begin_nose_x = -1;
    CGFloat begin_nose_x_max = -1;
    MLKFaceContour *noseBridgeContour = [face contourOfType:MLKFaceContourTypeNoseBridge];
    for (MLKVisionPoint *point in noseBridgeContour.points) {
      CGPoint cgPoint = [self normalizedPointFromVisionPoint:point width:width height:height];
      [UIUtilities addCircleAtPoint:cgPoint
                             toView:self->_annotationOverlayView
                              color:UIColor.yellowColor
                             radius:MLKSmallDotRadius];
        if(begin_nose_x == -1){
            begin_nose_x = cgPoint.x;
        }
        if(begin_nose_x > cgPoint.x){
            begin_nose_x = cgPoint.x;
        }
        
        if(begin_nose_x_max == -1){
            begin_nose_x_max = cgPoint.x;
        }
        if(begin_nose_x_max < cgPoint.x){
            begin_nose_x_max = cgPoint.x;
        }
    }
    MLKFaceContour *noseBottomContour = [face contourOfType:MLKFaceContourTypeNoseBottom];
    for (MLKVisionPoint *point in noseBottomContour.points) {
      CGPoint cgPoint = [self normalizedPointFromVisionPoint:point width:width height:height];
      [UIUtilities addCircleAtPoint:cgPoint
                             toView:self->_annotationOverlayView
                              color:UIColor.yellowColor
                             radius:MLKSmallDotRadius];
        if(begin_nose_x == -1){
            begin_nose_x = cgPoint.x;
        }
        if(begin_nose_x > cgPoint.x){
            begin_nose_x = cgPoint.x;
        }
        
        if(begin_nose_x_max == -1){
            begin_nose_x_max = cgPoint.x;
        }
        if(begin_nose_x_max < cgPoint.x){
            begin_nose_x_max = cgPoint.x;
        }
    }
    if(_actionType == MSActionDetectionFace){
        self.begin_nose_x = begin_nose_x;
    }
    if(_actionType == MSActionTurnHead){
        CGFloat value = begin_nose_x - _begin_nose_x;
        if(value < 0 && fabs(value) >= fabs(begin_nose_x_max - begin_nose_x)/2.0){
            turnHeadLeftFinish = YES;
        }else if (value > 0 && fabs(value) >= fabs(begin_nose_x_max - begin_nose_x)/2.0){
            turnHeadRightFinish = YES;
        }
        if(turnHeadLeftFinish && turnHeadRightFinish){
            self.actionType = MSActionOpenMouth;
        }
    }
    NSLog(@"鼻子起始坐标：%f\n眼睛起始最大高度:%f\n嘴唇起始最大高度:%f",_begin_nose_x,_begin_eye_height,_begin_lips_height);
}

- (void)rotateView:(UIView *)view orientation:(UIImageOrientation)orientation {
  CGFloat degree = 0.0;
  switch (orientation) {
    case UIImageOrientationUp:
    case UIImageOrientationUpMirrored:
      degree = 90.0;
      break;
    case UIImageOrientationRightMirrored:
    case UIImageOrientationLeft:
      degree = 180.0;
      break;
    case UIImageOrientationDown:
    case UIImageOrientationDownMirrored:
      degree = 270.0;
      break;
    case UIImageOrientationLeftMirrored:
    case UIImageOrientationRight:
      degree = 0.0;
      break;
  }
  view.transform = CGAffineTransformMakeRotation(degree * 3.141592654 / 180);
}


- (UIImage*)changeBufferToImage:(CMSampleBufferRef)sampleBufferRef {
    CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBufferRef);
    
    CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
    
    CIContext *context = [CIContext contextWithOptions:nil];//CPU
    CGRect rect = CGRectMake(0, 0, CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer));
    
    CGImageRef cgImage = [context createCGImage:ciImage fromRect:rect];
//    UIImage *image = [UIImage imageWithCGImage:cgImage];
    UIImage *image = [UIImage imageWithCGImage:cgImage  scale:0 orientation:UIImageOrientationRight];
    CGImageRelease(cgImage);
   //可以在此处保存到系统相册查看下是否为转换成功
   // UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil);
    return image;
}

- (BOOL) imageHasAlpha: (UIImage *) image
{
    CGImageAlphaInfo alpha = CGImageGetAlphaInfo(image.CGImage);
    return (alpha == kCGImageAlphaFirst ||
            alpha == kCGImageAlphaLast ||
            alpha == kCGImageAlphaPremultipliedFirst ||
            alpha == kCGImageAlphaPremultipliedLast);
}
- (NSString *) image2DataURL: (UIImage *) image
{
    NSString *mimeType  = @"image/png";
//    UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil);
    
    NSData *data = UIImageJPEGRepresentation(image, 1);
    if(data.length >= 512 * 1024){
        CGFloat rate = 100.0 * 1024.0/data.length;
        data = UIImageJPEGRepresentation(image, rate);
    }
    NSData *_data =[data base64EncodedDataWithOptions:NSDataBase64Encoding64CharacterLineLength];
    return [NSString stringWithFormat:@"data:%@;base64,%@", mimeType,
            [data base64EncodedStringWithOptions: 0]];
}

#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate

- (void)captureOutput:(AVCaptureOutput *)output
    didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
           fromConnection:(AVCaptureConnection *)connection {
  CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  if (imageBuffer) {

    Detector activeDetector = self.currentDetector;
    [self resetManagedLifecycleDetectorsForActiveDetector:activeDetector];
      
    _lastFrame = sampleBuffer;
    MLKVisionImage *visionImage = [[MLKVisionImage alloc] initWithBuffer:sampleBuffer];
    UIImageOrientation orientation = [UIUtilities
        imageOrientationFromDevicePosition:_isUsingFrontCamera ? AVCaptureDevicePositionFront
                                                               : AVCaptureDevicePositionBack];

    visionImage.orientation = orientation;

    GMLImage *inputImage = [[GMLImage alloc] initWithSampleBuffer:sampleBuffer];
    inputImage.orientation = orientation;

    CGFloat imageWidth = CVPixelBufferGetWidth(imageBuffer);
    CGFloat imageHeight = CVPixelBufferGetHeight(imageBuffer);
      
    [self detectFacesOnDeviceInImage:visionImage width:imageWidth height:imageHeight];
    if(self.actionType == MSActionDetectionFace){
      if(!self.faceImg1){
        self.faceImg1 = [self changeBufferToImage:sampleBuffer];
      }
      else if(!self.faceImg2){
        self.faceImg2 = [self changeBufferToImage:sampleBuffer];
      }
      else if(!self.faceImg3){
        self.faceImg3 = [self changeBufferToImage:sampleBuffer];
      }
    }
  } else {
    NSLog(@"%@", @"Failed to get image buffer from sample buffer.");
  }
}

#pragma mark - On-Device Detections

- (void)detectFacesOnDeviceInImage:(MLKVisionImage *)image
                             width:(CGFloat)width
                            height:(CGFloat)height {
    if(self.actionType == MSActionFinished)
        return;
  // When performing latency tests to determine ideal detection settings, run the app in 'release'
  // mode to get accurate performance metrics.
  MLKFaceDetectorOptions *options = [[MLKFaceDetectorOptions alloc] init];
  options.performanceMode = MLKFaceDetectorPerformanceModeFast;
  options.contourMode = MLKFaceDetectorContourModeAll;
  options.landmarkMode = MLKFaceDetectorLandmarkModeAll;
  options.classificationMode = MLKFaceDetectorClassificationModeAll;
  options.minFaceSize = (CGFloat) 0.2f;

    
  MLKFaceDetector *faceDetector = [MLKFaceDetector faceDetectorWithOptions:options];
  NSError *error;
  NSArray<MLKFace *> *faces = [faceDetector resultsInImage:image error:&error];
  __weak typeof(self) weakSelf = self;
  dispatch_sync(dispatch_get_main_queue(), ^{
    __strong typeof(weakSelf) strongSelf = weakSelf;
    [strongSelf updatePreviewOverlayViewWithLastFrame];
    [strongSelf removeDetectionAnnotations];
    if (error != nil) {
      NSLog(@"Failed to detect faces with error: %@", error.localizedDescription);
        self.actionType = MSActionNone;
      return;
    }
    if (faces.count == 0) {
//      NSLog(@"On-Device face detector returned no results.");
        self.actionType = MSActionNone;
      return;
    }
      
      BOOL faceInTheValidRect = NO;
      BOOL tooLong = NO;
    for (MLKFace *face in faces) {
        
        
      CGRect normalizedRect =
          CGRectMake(face.frame.origin.x / width, face.frame.origin.y / height,
                     face.frame.size.width / width, face.frame.size.height / height);
      CGRect standardizedRect = CGRectStandardize(
          [strongSelf.previewLayer rectForMetadataOutputRectOfInterest:normalizedRect]);
        if(CGRectContainsRect(_validRect, standardizedRect)){
            
            faceInTheValidRect = YES;
            if(standardizedRect.size.height < _validRect.size.height*0.6){
                tooLong = YES;
            }
            if(!tooLong){

                [strongSelf addContoursForFace:face width:width height:height];
                if (self.actionType == MSActionCloseTheCamera ||
                    self.actionType == MSActionNone) {
                    self.actionType = MSActionFaceAvable;
                }
            }else{
                if(self.actionType == MSActionNone){
                    self.actionType = MSActionCloseTheCamera;
                }
            }
        }
    }
      if(!faceInTheValidRect){
          self.actionType = MSActionNone;
          return;
      }
  });
}

#pragma mark - Private


- (void)resetManagedLifecycleDetectorsForActiveDetector:(Detector)activeDetector {
  if (activeDetector == self.lastDetector) {
    // Same row as before, no need to reset any detectors.
    return;
  }
  // Clear the old detector, if applicable.
  switch (self.lastDetector) {
    case DetectorPose:
    case DetectorPoseAccurate:
//      self.poseDetector = nil;
      break;
    case DetectorSegmentationSelfie:
//      self.segmenter = nil;
    default:
      break;
  }
  // Initialize the new detector, if applicable.
  switch (activeDetector) {
    case DetectorPose:
    case DetectorPoseAccurate: {
      break;
    }
    case DetectorSegmentationSelfie: {
      break;
    }
    default:
      break;
  }
  self.lastDetector = activeDetector;
}

@end
