//
//  FaceCertifyViewController.m
//  WZEfengAndEtong
//
//  Created by 李旭 on 16/8/3.
//  Copyright © 2016年 wanzhao. All rights reserved.
//

/*

#import "FaceCertifyViewController.h"
#import <AVFoundation/AVFoundation.h>
#import "FaceppLocalDetector.h"
#import "CContactMgr.h"
#import "CContactModel.h"
#import "FaceppDetection+LocalResultUploader.h"
#import "FaceppAPI.h"
#import "FaceppRecognition.h"
#import "FaceppResult.h"
#import "UIUtil.h"
@interface FaceCertifyViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate>
@property(nonatomic,strong) AVCaptureSession *captureSession;
@property(nonatomic,strong) AVCaptureDevice *captureDevice;
@property(nonatomic,strong) AVCaptureVideoPreviewLayer *previewLayer;
@property(nonatomic,strong) FaceppLocalDetector *detector;
@property(nonatomic,strong) NSString *companyName;
@property(nonatomic,strong) CContactModel *currentUser;
@property (nonatomic,assign) NSTimeInterval lasttime;
@end

@implementation FaceCertifyViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    [self configCaptureSession];
    [self initdetector];
    [self.navigationItem setNewTitleCenter:@"人脸识别打卡"];
    // Do any additional setup after loading the view.
    _currentUser = [[[CContactMgr alloc]init] loadCurContact];
    NSRange range = [_currentUser.Email rangeOfString:@"@"];
    self.companyName = [_currentUser.Email substringFromIndex:range.location+1];
    NSRange range1 = [self.companyName rangeOfString:@"."];
    self.companyName = [self.companyName substringToIndex:range1.location];
    UILabel *label = [[UILabel alloc]init];
    label.text = @"请将头部放入框内";
    label.textAlignment = NSTextAlignmentCenter;
    label.textColor = [UIColor redColor];
    [self.view addSubview:label];
    [label mas_makeConstraints:^(MASConstraintMaker *make) {
        make.top.mas_equalTo(10);
        make.width.mas_equalTo(150);
        make.height.mas_equalTo(40);
        make.left.mas_equalTo(self.view.bounds.size.width/2.0 - 75);
    }];
    self.lasttime = [[UIUtil getCurrentTimeStamp] doubleValue];
//    UIButton *cancelButton = [[UIButton alloc]init];
//    cancelButton.titleLabel.text = @"取消";
//    cancelButton.backgroundColor = [UIColor redColor];
//    [cancelButton addTarget:self action:@selector(cancelBtnclick) forControlEvents:UIControlEventTouchUpInside];
//    [self.view addSubview:cancelButton];
//    [cancelButton mas_makeConstraints:^(MASConstraintMaker *make) {
//        make.width.mas_equalTo(100);
//        make.height.mas_equalTo(40);
//        make.bottom.mas_equalTo(-150);
//        make.centerX.mas_equalTo(self.view);
//    }];
}
-(void)cancelBtnclick
{
    [self dismissViewControllerAnimated:YES completion:nil];
}

-(void)initdetector
{
    NSDictionary *option = [NSDictionary dictionaryWithObjects:[NSArray arrayWithObjects:[NSNumber numberWithBool:NO],[NSNumber numberWithInt:20],FaceppDetectorAccuracyHigh, nil]
                                                       forKeys:[NSArray arrayWithObjects:FaceppDetectorTracking,FaceppDetectorMinFaceSize,FaceppDetectorAccuracy, nil]];
    _detector = [FaceppLocalDetector detectorOfOptions:option
                                             andAPIKey:@"ac4e5df665ee33efa53f8705454bc0d2"];
}
-(void)configCaptureSession
{
    NSArray *devices = [[NSArray alloc]init];
    devices = [AVCaptureDevice devices];
    for (AVCaptureDevice *device in devices) {
        if ([device hasMediaType:AVMediaTypeVideo]) {
            if (device.position == AVCaptureDevicePositionFront) {
                _captureDevice = device;
                if (_captureDevice != nil) {
                    _captureSession = [[AVCaptureSession alloc]init];
                    _captureSession.sessionPreset = AVCaptureSessionPresetMedium;
                    AVCaptureDeviceInput *captureDeviceInput = [[AVCaptureDeviceInput alloc]initWithDevice:_captureDevice error:nil];
                    [_captureSession addInput:captureDeviceInput];
                    AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc]init];
                    dispatch_queue_t cameraQueue;
                    cameraQueue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL);
                    [output setSampleBufferDelegate:self queue:cameraQueue];
                    NSDictionary *videoSettings = [[NSDictionary alloc] initWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],kCVPixelBufferPixelFormatTypeKey, nil];
                    output.videoSettings = videoSettings;
                    [_captureSession addOutput:output];
                    _previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:_captureSession];
                    _previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
                    _previewLayer.frame = CGRectMake(20,100, self.view.bounds.size.width - 40, self.view.bounds.size.width-40);
                     _previewLayer.cornerRadius = (self.view.bounds.size.width - 40)/2;
                    _previewLayer.masksToBounds = YES;
                   
                    [self.view.layer addSublayer:_previewLayer];
                }
            }
        }
    }

}
-(void)viewWillAppear:(BOOL)animated
{
    [super viewWillAppear:animated];
    [self.captureSession startRunning];
}
-(UIImage *)sampleBufferToImage:(CMSampleBufferRef)sampleBuffer{
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CIImage *ciImage = [CIImage imageWithCVPixelBuffer:imageBuffer];
    CIContext *temporaryContext = [CIContext contextWithOptions:nil];
    CGImageRef videoImage = [temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0, CVPixelBufferGetWidth(imageBuffer), CVPixelBufferGetHeight(imageBuffer))];
    UIImage *result = [[UIImage alloc] initWithCGImage:videoImage scale:1.0 orientation:UIImageOrientationLeftMirrored];
    CGImageRelease(videoImage);
    return result;
    
}



- (UIImage *)fixOrientation:(UIImage *)aImage {
    
    // No-op if the orientation is already correct
    if (aImage.imageOrientation == UIImageOrientationUp)
        return aImage;
    
    // We need to calculate the proper transformation to make the image upright.
    // We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.
    CGAffineTransform transform = CGAffineTransformIdentity;
    
    switch (aImage.imageOrientation) {
        case UIImageOrientationDown:
        case UIImageOrientationDownMirrored:
            transform = CGAffineTransformTranslate(transform, aImage.size.width, aImage.size.height);
            transform = CGAffineTransformRotate(transform, M_PI);
            break;
            
        case UIImageOrientationLeft:
        case UIImageOrientationLeftMirrored:
            transform = CGAffineTransformTranslate(transform, aImage.size.width, 0);
            transform = CGAffineTransformRotate(transform, M_PI_2);
            break;
            
        case UIImageOrientationRight:
        case UIImageOrientationRightMirrored:
            transform = CGAffineTransformTranslate(transform, 0, aImage.size.height);
            transform = CGAffineTransformRotate(transform, -M_PI_2);
            break;
        default:
            break;
    }
    
    switch (aImage.imageOrientation) {
        case UIImageOrientationUpMirrored:
        case UIImageOrientationDownMirrored:
            transform = CGAffineTransformTranslate(transform, aImage.size.width, 0);
            transform = CGAffineTransformScale(transform, -1, 1);
            break;
            
        case UIImageOrientationLeftMirrored:
        case UIImageOrientationRightMirrored:
            transform = CGAffineTransformTranslate(transform, aImage.size.height, 0);
            transform = CGAffineTransformScale(transform, -1, 1);
            break;
        default:
            break;
    }
    
    // Now we draw the underlying CGImage into a new context, applying the transform
    // calculated above.
    CGContextRef ctx = CGBitmapContextCreate(NULL, aImage.size.width, aImage.size.height,
                                             CGImageGetBitsPerComponent(aImage.CGImage), 0,
                                             CGImageGetColorSpace(aImage.CGImage),
                                             CGImageGetBitmapInfo(aImage.CGImage));
    CGContextConcatCTM(ctx, transform);
    switch (aImage.imageOrientation) {
        case UIImageOrientationLeft:
        case UIImageOrientationLeftMirrored:
        case UIImageOrientationRight:
        case UIImageOrientationRightMirrored:
            // Grr...
            CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.height,aImage.size.width), aImage.CGImage);
            break;
            
        default:
            CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.width,aImage.size.height), aImage.CGImage);
            break;
    }
    
    // And now we just create a new UIImage from the drawing context
    CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
    UIImage *img = [UIImage imageWithCGImage:cgimg];
    CGContextRelease(ctx);
    CGImageRelease(cgimg);
    return img;
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
    __weak typeof (self) weakSelf = self;
    NSTimeInterval time = [[UIUtil getCurrentTimeStamp] doubleValue] - self.lasttime;
    double i = time/1000.0;
     if(i <= 3)
     {
         return;
     }
    UIImage *image = [self sampleBufferToImage:sampleBuffer];
    UIImage *okImage = [self fixOrientation:image];
    FaceppLocalResult *localresult = [_detector detectWithImage:okImage];
    if (localresult.faces.count == 1)
    {
        [self.captureSession stopRunning];
        NSData *data = UIImageJPEGRepresentation(okImage, 1);
        dispatch_async(dispatch_get_main_queue(), ^{
            [weakSelf showanimation];
        });
        
    dispatch_async(dispatch_get_global_queue(0, 0), ^{
        FaceppResult *result =  [[[FaceppDetection alloc]init] detectWithURL:nil orImageData:data mode:FaceppDetectionModeOneFace];
        if (result.success) {
            if ([result.content[@"face"] count]>0) {
                FaceppResult *result = [[[FaceppRecognition alloc]init] identifyWithGroupId:nil orGroupName:self.companyName andURL:nil orImageData:data orKeyFaceId:nil async:NO];
                if (result.success) {
                    if ([result.content[@"face"] count]>0){
                        NSString *name = [result.content[@"face"] firstObject][@"candidate"][0][@"person_name"];
                        dispatch_async(dispatch_get_main_queue(), ^{
                            if ([name isEqualToString:self.currentUser.UserName]) {
                                [self speakName:self.currentUser.UserName];
                                if ([weakSelf.delegate respondsToSelector:@selector(Verificationsuccessfulwithbutton:)]) {
                                    [weakSelf.delegate Verificationsuccessfulwithbutton:self.button];
                                }
                                [MBProgressHUD hideHUDForView:weakSelf.view];
                                [weakSelf dismissViewControllerAnimated:YES completion:nil];
                            }
                            else
                            {
                                [MBProgressHUD hideHUDForView:weakSelf.view];
                                weakSelf.lasttime = [[UIUtil getCurrentTimeStamp] doubleValue];
                                [weakSelf speakName:@"认证失败请重试"];
                                [weakSelf.captureSession startRunning];
                            }
                        });
                     
                    }
                }
            }
        }
        else
        {
            [MBProgressHUD hideHUDForView:weakSelf.view];
            [weakSelf.captureSession startRunning];
        }
    });
      
    }
}
-(void)restart
{
    [self.captureSession startRunning];
}
- (UIImage *)imageFromImage:(UIImage *)image inRect:(CGRect)rect {
    CGImageRef sourceImageRef = [image CGImage];
    CGImageRef newImageRef = CGImageCreateWithImageInRect(sourceImageRef, rect);
    UIImage *newImage = [UIImage imageWithCGImage:newImageRef];
    return newImage;
}
-(void)speakName:(NSString*)str
{
    //1.声音合成对象
    AVSpeechSynthesizer *synthesizer = [AVSpeechSynthesizer new];
    //2.说话方式对象
    AVSpeechUtterance *utterance = [AVSpeechUtterance speechUtteranceWithString:str];
    //3.设置文本/语言/语音/语调/语速
    AVSpeechSynthesisVoice *voice = [AVSpeechSynthesisVoice voiceWithLanguage:@"zh-CN"];
    //绑定说话方式和语言
    utterance.voice = voice;
    //音量:0.0 ~ 1.0
    utterance.volume = 0.5;
    //语速
    utterance.rate = 0.4;
    //语调
    utterance.pitchMultiplier = 1.0;
    //4.执行说得动作
    [synthesizer speakUtterance:utterance];

}

-(void)showanimation
{
    MBProgressHUD* progress = [[MBProgressHUD alloc] initWithView:self.view];
    [progress setAlpha:0.5];
    progress.removeFromSuperViewOnHide = YES;
    [progress setLabelText:@"正在认证..."];
    [self.view addSubview:progress];
    [self.view bringSubviewToFront:progress];
    [progress show:YES];
}
@end
*/
