//
//  FBFaceInputViewController.m
//  流量魔盒
//
//  Created by zhoumeineng on 3/31/18.
//  Copyright © 2018 zhoumeineng. All rights reserved.
//


#import "FBFaceInputViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
@interface BackView:UIView

@end
@implementation BackView
- (instancetype)initWithFrame:(CGRect)frame
{
    self = [super initWithFrame:frame];
    if (self) {
        self.layer.cornerRadius = frame.size.height/2.0;
        self.layer.masksToBounds = YES;
    }
    return self;
}
- (void)drawRect:(CGRect)rect{
    CGContextRef context = UIGraphicsGetCurrentContext();
    CGContextBeginPath(context);
    CGContextAddArc(context, 105, 105, 103, 0,M_PI, 1);
    CGContextSetRGBStrokeColor(context, 64/255.0, 163/255.0, 63/255.0, 1);
    CGContextSetLineWidth(context, 2);
    CGContextDrawPath(context, kCGPathStroke);
    CGContextClosePath(context);
    
}
@end

@interface FBFaceInputViewController ()<AVCapturePhotoCaptureDelegate,AVCaptureVideoDataOutputSampleBufferDelegate>
@property(nonatomic,strong)AVCaptureDevice * CaptureDevice;//捕获设备 ，前置摄像头，后置摄像头，麦克风

@property(nonatomic,strong)AVCaptureDeviceInput * CaptureInput;//设备输入

@property(nonatomic,strong)AVCapturePhotoOutput * CaptureStillImageOutput;//图片输出

//@property(nonatomic,strong)AVCaptureVideoDataOutput * CaptureVideoDataOutput;//视频输出

//session：由他把输入输出结合在一起，并开始启动捕获设备（摄像头）
@property (nonatomic, strong) AVCaptureSession *session;

//图像预览层，实时显示捕获的图像
@property (nonatomic ,strong) AVCaptureVideoPreviewLayer *previewLayer;

@property(nonatomic,strong)BackView * backView;
@property(nonatomic,strong)UILabel * TitleLble;
@end

@implementation FBFaceInputViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    self.view.backgroundColor = [UIColor whiteColor];
    [self backView];
    [self TitleLble];
    [self previewLayer];
    [self addAnimal];
    [self.session startRunning];
    [self TakePhoto];
}
-(void)TakePhoto{
    //    //暂时 停用
    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(2 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
        [self Tacketaction];
    });
}
- (void)didReceiveMemoryWarning {
    [super didReceiveMemoryWarning];
    
    // Dispose of any resources that can be recreated.
}

- (AVCaptureDevice *)CaptureDevice{
    if(!_CaptureDevice){
        _CaptureDevice  = [self cameraWithPosition: AVCaptureDevicePositionFront];
    }
    return _CaptureDevice;
}
/**
 * 设置前置相机
 */
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position
{
    if (@available(iOS 11.1, *)) {
        AVCaptureDeviceDiscoverySession * der = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInDualCamera,AVCaptureDeviceTypeBuiltInTrueDepthCamera,AVCaptureDeviceTypeBuiltInTelephotoCamera,AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:position];
        for ( AVCaptureDevice *device in  [der devices] )
            if ( device.position == position )
                return device;
    } else {
        // Fallback on earlier versions
    }
    return nil;
}

- (AVCaptureDeviceInput *)CaptureInput{
    if (!_CaptureInput) {
        _CaptureInput = [[AVCaptureDeviceInput alloc]initWithDevice:self.CaptureDevice error:nil];
    }
    return _CaptureInput;
}

- (AVCapturePhotoOutput *)CaptureStillImageOutput{
    if (!_CaptureStillImageOutput) {
        _CaptureStillImageOutput = [[AVCapturePhotoOutput alloc]init];
    }
    return _CaptureStillImageOutput;
}

- (AVCaptureSession *)session{
    if (!_session) {
        _session = [[AVCaptureSession alloc]init];
        _session.sessionPreset = AVCaptureSessionPresetPhoto;
        /**
         * 添加了设备的输入 和 图片的输出
         */
        if ([_session canAddInput:self.CaptureInput]) {
            [_session addInput:self.CaptureInput];
        }
        if ([_session canAddOutput:self.CaptureStillImageOutput]) {
            [_session addOutput:self.CaptureStillImageOutput];
        }
       
    }
    return _session;
}


- (AVCaptureVideoPreviewLayer *)previewLayer{
    if (!_previewLayer) {
        _previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:self.session];
        _previewLayer.frame = CGRectMake(self.backView.center.x-100, self.backView.center.y-100, 200, 200);
        _previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
        _previewLayer.cornerRadius = 100;
        _previewLayer.backgroundColor = [UIColor grayColor].CGColor;
        [self.view.layer addSublayer:_previewLayer];
    }
 return _previewLayer;
}


- (BackView *)backView{
    if (!_backView) {
        _backView = [[BackView alloc]initWithFrame:CGRectMake(160, 160, 210, 210)];
        _backView.center = CGPointMake(self.view.center.x, _backView.center.y);
        [self.view addSubview:_backView];
    }
    return _backView;
}
- (UILabel *)TitleLble{
    if (!_TitleLble) {
        _TitleLble = [[UILabel alloc]initWithFrame:CGRectMake(0, CGRectGetMaxY(self.backView.frame)+20, self.view.frame.size.width, 20)];
        _TitleLble.text = @"将脸部置于检测区域";
        _TitleLble.font = [UIFont systemFontOfSize:14];
        _TitleLble.textColor = [UIColor grayColor];
        _TitleLble.textAlignment = NSTextAlignmentCenter;
        [self.view addSubview:_TitleLble];
    }
    return _TitleLble;
}

-(void)addAnimal{
    CABasicAnimation * Animal = [CABasicAnimation animationWithKeyPath:@"transform.rotation.z"];
    Animal.fromValue = [NSNumber numberWithFloat:0.f];
    Animal.toValue = [NSNumber numberWithFloat:M_PI*2];
    Animal.duration = 2;
    Animal.repeatCount = MAXFLOAT;
    [self.backView.layer addAnimation:Animal forKey:nil];
}

/**
 * 开始拍照
 */
-(void)Tacketaction{
   AVCaptureConnection *CaptureConnection = [self.CaptureStillImageOutput connectionWithMediaType:AVMediaTypeVideo];
    if (CaptureConnection) {
        AVCapturePhotoSettings * set = [AVCapturePhotoSettings photoSettings];
        [self.CaptureStillImageOutput capturePhotoWithSettings:set delegate:self];
    }
}


- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
     CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer);

    
    
}
- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(NSError *)error{
    
   CGImageRef cg = [photo CGImageRepresentation];
   UIImage * image =  [UIImage imageWithCGImage:cg];
   image = [self image:image rotation:UIImageOrientationRight];
   NSData * data = UIImageJPEGRepresentation(image, 0.1);
   [self Fce:image data:data];

}

/**
 * 脸部识别
 */
-(void)Fce:(UIImage*)imageInput data:(NSData*)data{
    
    CIContext *context = [CIContext contextWithOptions:nil];
    CIImage *image = [CIImage imageWithCGImage:imageInput.CGImage];
    //设置识别参数
    NSDictionary *param = [NSDictionary dictionaryWithObject:CIDetectorAccuracyHigh
                                          forKey:CIDetectorAccuracy];
    //声明一个CIDetector，并设定识别类型
    CIDetector *faceDetector = [CIDetector detectorOfType:CIDetectorTypeFace
                                                  context:context options:param];
    //取得识别结果
    NSArray *detectResult = [faceDetector featuresInImage:image];
    
    NSMutableArray *arrM = [NSMutableArray arrayWithCapacity: detectResult.count];
    if (detectResult.count==0) {
        NSLog(@"头像验证失败");
        _TitleLble.text = @"头像验证失败";
           [self TakePhoto];
    }else{
          NSLog(@"头像验证成功");
         _TitleLble.text = @"头像验证成功";
    }
    for(CIFaceFeature *faceFeature in detectResult) {
        NSLog(@"-------------------%@",faceFeature);
  
           
    }

}


/**
 *
 */
-(UIImage *)image:(UIImage *)image rotation:(UIImageOrientation)orientation
{
    long double rotate = 0.0;
    CGRect rect;
    float translateX = 0;
    float translateY = 0;
    float scaleX = 0.7;
    float scaleY = 0.7;
    
    switch (orientation) {
        case UIImageOrientationLeft:
            rotate =M_PI_2;
            rect =CGRectMake(0,0,image.size.height, image.size.width);
            translateX=0;
            translateY= -rect.size.width;
            scaleY =rect.size.width/rect.size.height;
            scaleX =rect.size.height/rect.size.width;
            break;
        case UIImageOrientationRight:
            rotate =3 *M_PI_2;
            rect =CGRectMake(0,0,image.size.height, image.size.width);
            translateX= -rect.size.height;
            translateY=0;
            scaleY =rect.size.width/rect.size.height;
            scaleX =rect.size.height/rect.size.width;
            break;
        case UIImageOrientationDown:
            rotate =M_PI;
            rect =CGRectMake(0,0,image.size.width, image.size.height);
            translateX= -rect.size.width;
            translateY= -rect.size.height;
            break;
        default:
            rotate =0.0;
            rect =CGRectMake(0,0,image.size.width, image.size.height);
            translateX=0;
            translateY=0;
            break;
    }
    
    UIGraphicsBeginImageContext(rect.size);
    CGContextRef context =UIGraphicsGetCurrentContext();
    //做CTM变换
    CGContextTranslateCTM(context, 0.0, rect.size.height);
    CGContextScaleCTM(context, 1.0, -1.0);
    CGContextRotateCTM(context, rotate);
    CGContextTranslateCTM(context, translateX,translateY);
    
    CGContextScaleCTM(context, scaleX,scaleY);
    //绘制图片
    CGContextDrawImage(context, CGRectMake(0,0,rect.size.width, rect.size.height), image.CGImage);
    
    UIImage *newPic =UIGraphicsGetImageFromCurrentImageContext();
    
    return newPic;
}

@end

