//
//  YLFaceIdentifyViewController.m
//  TheTenthAnniversaryOfMobileUnion
//
//  Created by kuangbiao on 2019/1/25.
//  Copyright © 2019 kuangbiao. All rights reserved.
//

#import "YLFaceIdentifyViewController.h"
#import "YLSettingViewController.h"
#import "LFFaceTracker.h"
#import "LFFaceFeature.h"
#import "YLSignSuccessViewController.h"
#import "YLFaceUserInfoModel.h"
#import "YLSignFailedViewController.h"

@interface YLFaceIdentifyViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate>{
     NSTimer *timer;
}

@property (nonatomic, strong) LFFaceTracker *tracker;                       // 人脸追踪对象
@property (nonatomic) AVCaptureSession *captureSession;
@property (nonatomic) AVCaptureVideoPreviewLayer *videoPreviewLayer;
@property (nonatomic, strong) AVCaptureVideoDataOutput *videoDataOutput;    //原始视频帧，用于获取实时图像以及视频录制
@property (nonatomic, strong) AVCaptureDevice *captureDevice;
@property (nonatomic, strong) LFFaceFeature *feature;                        // 追踪到的多人脸特征数组
@property (nonatomic, assign) CFAbsoluteTime countTime;
@property (nonatomic, strong) AVCaptureDeviceInput *input;                  // 输入源
@property (nonatomic, assign) NSInteger frameCount;                  // 输入源
@property (nonatomic, strong) NSMutableDictionary *rectDict;                // 绘制的人脸位置框字典
@property (nonatomic, assign) CFAbsoluteTime markTime;
@property (nonatomic, strong) UILabel *markTimeLabel;

@property (nonatomic, strong) UIButton *switchButton;
@property (nonatomic, weak) UILabel *fps;
@property (weak, nonatomic) IBOutlet UIView *preview;// 绘制的人脸位置框字典
@property (weak, nonatomic) IBOutlet UIImageView *gridlineImageView;
@property (nonatomic, strong) YLFaceUserInfoModel *userinfoModel;

@end

@implementation YLFaceIdentifyViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    [self initView];
//    [self setupSubviews];
    [self startTracking];
}

- (void)initView{
    self.hiddenLogo = YES;
    self.countTime = CFAbsoluteTimeGetCurrent();
    timer = [NSTimer scheduledTimerWithTimeInterval:3.1 repeats:YES block:^(NSTimer * _Nonnull timer) {
        
        self.gridlineImageView.frame = CGRectMake(0, -713, 634, 713);
        [UIView animateWithDuration:3 animations:^{
            self.gridlineImageView.frame = CGRectMake(0, 0, 634, 713);
        } completion:^(BOOL finished) {
            
        }];
    }];
    
        [timer setFireDate:[NSDate distantPast]];
    
}

- (void)viewWillAppear:(BOOL)animated{
    [super viewWillAppear:animated];
    self.navigationController.navigationBar.hidden = YES;
    [self.captureSession startRunning];
    [timer setFireDate:[NSDate distantPast]];
}

- (void)viewWillDisappear:(BOOL)animated{
    [super viewWillDisappear:animated];
    self.navigationController.navigationBar.hidden = NO;
    [self.captureSession stopRunning];
    [timer setFireDate:[NSDate distantFuture]];
}

- (void)viewDidLayoutSubviews{
    [super viewDidLayoutSubviews];
    

}

- (void)setupSubviews
{
//    [self.preview.layer addSublayer:self.videoPreviewLayer];
    
    // FPS
    UILabel *fps = [[UILabel alloc] initWithFrame:CGRectMake(kScreenWidth - 180, 25, 60, 30)];
    fps.layer.cornerRadius = 5;
    fps.clipsToBounds = YES;
    fps.textAlignment = NSTextAlignmentCenter;
    fps.backgroundColor = [UIColor colorWithWhite:0.000 alpha:0.700];
    fps.textColor = [UIColor whiteColor];
    fps.font = [UIFont fontWithName:@"Menlo" size:14];
    [self.view addSubview:fps];
    self.fps = fps;
    
    // 帧时间
    UILabel *markTimeLabel = [[UILabel alloc] initWithFrame:CGRectMake(kScreenWidth - 270, 60, 150, 30)];
    markTimeLabel.layer.cornerRadius = 5;
    markTimeLabel.clipsToBounds = YES;
    markTimeLabel.textAlignment = NSTextAlignmentCenter;
    markTimeLabel.backgroundColor = [UIColor colorWithWhite:0.000 alpha:0.700];
    markTimeLabel.textColor = [UIColor whiteColor];
    markTimeLabel.font = [UIFont fontWithName:@"Menlo" size:14];
    [self.view addSubview:markTimeLabel];
    self.markTimeLabel = markTimeLabel;
    self.markTime = CFAbsoluteTimeGetCurrent();
}

#pragma mark - 设置按钮点击事件
- (IBAction)setBtnClickAction:(id)sender {
    YLSettingViewController *setVC = [[YLSettingViewController alloc] initWithNibName:@"YLSettingViewController" bundle:nil];
    [self.navigationController pushViewController:setVC animated:YES];
}

#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    NSTimeInterval time = CFAbsoluteTimeGetCurrent();
    if (time - self.countTime > 1) {
        self.fps.text = [NSString stringWithFormat:@"%.0f FPS", (CGFloat)self.frameCount / (time - self.countTime)];
        self.countTime = time;
        self.frameCount = 0;
    } else {
        self.frameCount++;
    }
    
    self.markTime = time;
    // 设置 connection 方向
    [connection setVideoOrientation:AVCaptureVideoOrientationPortrait];
    
    // track
    CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    uint8_t *data = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
    int iWidth  = (int)CVPixelBufferGetWidth(pixelBuffer);
    int iHeight = (int)CVPixelBufferGetHeight(pixelBuffer);
    CGSize size = CGSizeMake(iWidth, iHeight);
#if PIXEL_FORMAT_BGRA
    LFFaceFeature *feature = [self.tracker detectFace:data iWidth:iWidth iHeight:iHeight CVPixelFormat:LFTrackerPixelFormat_FMT_BGRA8888 orientation:[self getCurrentOrientation] isMirror:(self.captureDevice.position == AVCaptureDevicePositionFront)];
#else
    LFFaceFeature *feature = [self.tracker detectFace:data iWidth:iWidth iHeight:iHeight CVPixelFormat:LFTrackerPixelFormat_FMT_NV12 orientation:[self getCurrentOrientation] isMirror:(self.captureDevice.position == AVCaptureDevicePositionFront)];
#endif
    self.markTimeLabel.text = [NSString stringWithFormat:@"帧时间: %@", @(CFAbsoluteTimeGetCurrent() - self.markTime)];
    if (feature.errorCode == LF_OK) {
        //        获取裁剪的人脸UIImage
        UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
        [self.captureSession stopRunning];
        [self requestWithfaceMatch:image];
        //        CGImageRef imageRef = CGImageCreateWithImageInRect([image CGImage], feature.rect);
        //        UIImage* faceImage = [UIImage imageWithCGImage:imageRef];
        //        CGImageRelease(imageRef);
    }
    
    
    NSMutableArray *faceIDS = [NSMutableArray array];
    if (feature.errorCode == LF_OK) {
        [faceIDS addObject:@(feature.faceId)];
    }
}
#if PIXEL_FORMAT_BGRA
// BGRA buff to UIImage
- (UIImage *)imageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
    // 为媒体数据设置一个CMSampleBuffer的Core Video图像缓存对象
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    // 锁定pixel buffer的基地址
    CVPixelBufferLockBaseAddress(imageBuffer, 0);
    // 得到pixel buffer的基地址
    void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
    // 得到pixel buffer的行字节数
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    // 得到pixel buffer的宽和高
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    // 创建一个依赖于设备的RGB颜色空间
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    // 用抽样缓存的数据创建一个位图格式的图形上下文（graphics context）对象
    CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    // 根据这个位图context中的像素数据创建一个Quartz image对象
    CGImageRef quartzImage = CGBitmapContextCreateImage(context);
    // 解锁pixel buffer
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);
    // 释放context和颜色空间
    CGContextRelease(context); CGColorSpaceRelease(colorSpace);
    // 用Quartz image创建一个UIImage对象image
    UIImage *image = [UIImage imageWithCGImage:quartzImage scale:[UIScreen mainScreen].scale orientation:UIImageOrientationUpMirrored];
    // 释放Quartz image对象
    CGImageRelease(quartzImage);
    return image;
}
#else

#define clamp(a) (a>255?255:(a<0?0:a))

- (UIImage *)imageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer {
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer,0);
    
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    uint8_t *yBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
    size_t yPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
    uint8_t *cbCrBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1);
    size_t cbCrPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 1);
    
    int bytesPerPixel = 4;
    uint8_t *rgbBuffer = malloc(width * height * bytesPerPixel);
    
    for(int y = 0; y < height; y++) {
        uint8_t *rgbBufferLine = &rgbBuffer[y * width * bytesPerPixel];
        uint8_t *yBufferLine = &yBuffer[y * yPitch];
        uint8_t *cbCrBufferLine = &cbCrBuffer[(y >> 1) * cbCrPitch];
        
        for(int x = 0; x < width; x++) {
            int16_t y = yBufferLine[x];
            int16_t cb = cbCrBufferLine[x & ~1] - 128;
            int16_t cr = cbCrBufferLine[x | 1] - 128;
            
            uint8_t *rgbOutput = &rgbBufferLine[x*bytesPerPixel];
            
            int16_t r = (int16_t)roundf( y + cr *  1.4 );
            int16_t g = (int16_t)roundf( y + cb * -0.343 + cr * -0.711 );
            int16_t b = (int16_t)roundf( y + cb *  1.765);
            
            rgbOutput[0] = 0xff;
            rgbOutput[1] = clamp(b);
            rgbOutput[2] = clamp(g);
            rgbOutput[3] = clamp(r);
        }
    }
    
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(rgbBuffer, width, height, 8, width * bytesPerPixel, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipLast);
    CGImageRef quartzImage = CGBitmapContextCreateImage(context);
    UIImage *image = [UIImage imageWithCGImage:quartzImage];
    
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);
    CGImageRelease(quartzImage);
    free(rgbBuffer);
    
    CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
    
    return image;
}

#endif

// 获取方向 (屏幕水平朝上和朝下是获取不到的默认向上)
- (LFTrackerFaceOrientation)getCurrentOrientation
{
    LFTrackerFaceOrientation orientation = LFTrackerFaceOrientationUp;
    UIDevice *device = [UIDevice currentDevice] ;
    switch (device.orientation) {
        case UIDeviceOrientationFaceUp:
        case UIDeviceOrientationFaceDown:
        case UIDeviceOrientationUnknown:
        case UIDeviceOrientationPortrait:
            orientation = LFTrackerFaceOrientationUp;
            break;
        case UIDeviceOrientationLandscapeLeft:
            orientation = LFTrackerFaceOrientationRight;
            break;
        case UIDeviceOrientationLandscapeRight:
            orientation = LFTrackerFaceOrientationLeft;
            break;
        case UIDeviceOrientationPortraitUpsideDown:
            orientation = LFTrackerFaceOrientationDown;
            break;
            
        default:
            break;
    }
    return orientation;
}

extern NSString *kServerHttpBaseUrl;
- (void)requestWithfaceMatch:(UIImage *)image{
    NSString *url = [NSString stringWithFormat:@"%@%@",kServerHttpBaseUrl,kFaceMatch_url];
    NSDictionary *params = @{@"meetNo":[CMBasicData getMerInfo].meetingNo?:@""};
    UploadParam *cardPic = [[UploadParam alloc] init];
    cardPic.data = UIImageJPEGRepresentation(image, 1);
    cardPic.name = @"file";
    cardPic.filename = @"file.jpg";
    cardPic.mimeType = @"image/jpg";
    [HttpRequest uploadWithURLString:url parameters:params uploadParam:cardPic success:^(id responseObject) {
        NSDictionary *dict = responseObject;
        YLHttpModel *httpModel = [YLHttpModel yy_modelWithDictionary:dict];
        if([httpModel.status isEqualToString:@"200"]){
            YLFaceUserInfoModel *userInfoModel = [YLFaceUserInfoModel yy_modelWithDictionary:[dict objectForKey:@"data"]];
            YLSignSuccessViewController *signSuccess = [[YLSignSuccessViewController alloc] initWithNibName:@"YLSignSuccessViewController" bundle:nil];
            signSuccess.userModel = userInfoModel;
            [self.navigationController pushViewController:signSuccess animated:YES];
        }else{
//            [KDAlertView alertWithMessage:httpModel.msg?:@""];
            YLSignFailedViewController *failedVC = [[YLSignFailedViewController alloc] initWithNibName:@"YLSignFailedViewController" bundle:nil];
            [self.navigationController pushViewController:failedVC animated:YES];
            [self.captureSession startRunning];
        }
    } failure:^(NSError *error) {
        [KDLoadFrame withState:KDLoadStateError];
    }];
}

- (IBAction)backBtnClickAction:(id)sender {
    [self.navigationController popViewControllerAnimated:YES];
}

- (BOOL)prefersStatusBarHidden{
    return YES;
}


// 设置 session 并开始 track
- (BOOL)startTracking
{
    NSError *error;
    AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    if (devices.count > 1) {
        for (AVCaptureDevice *device in devices) {
            if ([device position] == AVCaptureDevicePositionFront) {
                captureDevice = device;
                break;
            }
        }
        if (!captureDevice) {
            captureDevice = devices.firstObject;
        }
    } else {
        captureDevice = devices.firstObject;
    }
    
    if ([captureDevice lockForConfiguration:&error]) {
        int frameRate = 25;
        CMTime frameDuration = kCMTimeInvalid;
        frameDuration = CMTimeMake(1, frameRate);
        captureDevice.activeVideoMaxFrameDuration = frameDuration;
        captureDevice.activeVideoMinFrameDuration = frameDuration;
        
        [captureDevice unlockForConfiguration];
    }
    self.captureDevice = captureDevice;
    
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
    self.input = input;
    if (!input) {
        return NO;
    }
    self.captureSession = [[AVCaptureSession alloc] init];
    
    [self.captureSession beginConfiguration];
    if ([self.captureSession canAddInput:input]) {
        [self.captureSession addInput:input];
    }
    if ([self.captureSession canAddOutput:self.videoDataOutput]) {
        [self.captureSession addOutput:self.videoDataOutput];
    }
    if ([self.captureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) {
        [self.captureSession setSessionPreset:AVCaptureSessionPreset640x480];
    }
    [self.captureSession commitConfiguration];
    
    
    self.videoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
    [self.videoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
    [self.videoPreviewLayer setFrame:self.preview.layer.bounds];
    [self.preview.layer addSublayer:self.videoPreviewLayer];
    [self.captureSession startRunning];

    return YES;
}


#pragma mark - Properties

- (AVCaptureVideoDataOutput *)videoDataOutput
{
    if (_videoDataOutput == nil) {
        _videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
        [_videoDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
        //设置像素格式，否则CMSampleBufferRef转换NSImage的时候CGContextRef初始化会出问题
#if PIXEL_FORMAT_BGRA
        [_videoDataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
#else
        [_videoDataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
#endif
    }
    return _videoDataOutput;
}

- (NSMutableDictionary *)rectDict
{
    if (!_rectDict) {
        _rectDict = [NSMutableDictionary dictionary];
    }
    return _rectDict;
}

- (LFFaceTracker *)tracker
{
    if (!_tracker) {
        NSString *licPath = [[NSBundle mainBundle] pathForResource:@"LinkFace_License" ofType:@"lic"];
        _tracker = [[LFFaceTracker alloc] initWithLicensePath:licPath];
        _tracker.facePercentageInImage = FACE_SIZE_IN_IMAGE;
        _tracker.faceScore = FACE_SCORE;
    }
    return _tracker;
}

@end
