//
//  ViewController.m
//  QHVisionDemo
//
//  Created by qihuichen on 2021/8/2.
//

#import "ViewController.h"

#import "simplest_eagl_rgb_render.h"
#import "simplest_eagl_ciimage.h"

#import "QHDanmuView.h"
#import "QHViewUtil.h"
#import "QHBaseUtil.h"

@interface ViewController () <QHDanmuViewDataSource, QHDanmuViewDelegate>

@property (weak, nonatomic) IBOutlet UIView *mainV;
@property (weak, nonatomic) IBOutlet UIImageView *showIV;

@property (nonatomic, strong) EAGLContext *eaglContext;

@property (nonatomic, strong) QHDanmuView *danmuView;

@end

@implementation ViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view.


    _eaglContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
    
    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
        
        UIImage *i = [UIImage imageNamed:@"a1.jpg"];
        UIImage *i2 = [UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"a1" ofType:@"jpg"]];
        unsigned char *rgba_buf = [self getImagePixel:i];
        
//        simplest_eagl *m2 = [simplest_eagl_ciimage createWith:self.mainV ctx:self.eaglContext];
//        [m2 captureOutput2:i2];
        
        simplest_eagl *m3 = [simplest_eagl_rgb_render createWith:self.mainV ctx:nil front:NO];
        [m3 captureOutput:rgba_buf];
        
        
        QHDanmuView *danmuView = [[QHDanmuView alloc] initWithFrame:CGRectZero style:QHDanmuViewStyleCustom];
        danmuView.dataSource = self;
        danmuView.delegate = self;
        danmuView.danmuPoolMaxCount = 10;
        danmuView.searchPathwayMode = QHDanmuViewSearchPathwayModeBreadthFirst;
        [self.mainV addSubview:danmuView];
        [QHViewUtil fullScreen:danmuView];
        [danmuView registerClass:[QHDanmuViewCell class] forCellReuseIdentifier:@"1"];
        self.danmuView = danmuView;
        
        simplest_eagl *m = [simplest_eagl_rgb_render createWith:self.mainV ctx:nil front:YES];
        
        CGRect r = [self p_detectFaceWithImage:i];
        NSMutableArray *aa = [NSMutableArray new];
        [aa addObject:@(CGRectGetMinX(r)/self.mainV.frame.size.width)];
        [aa addObject:@(CGRectGetMinY(r)/self.mainV.frame.size.height)];
        [aa addObject:@(CGRectGetMaxX(r)/self.mainV.frame.size.width)];
        [aa addObject:@(CGRectGetMinY(r)/self.mainV.frame.size.height)];
        [aa addObject:@(CGRectGetMinX(r)/self.mainV.frame.size.width)];
        [aa addObject:@(CGRectGetMaxY(r)/self.mainV.frame.size.height)];
        [aa addObject:@(CGRectGetMaxX(r)/self.mainV.frame.size.width)];
        [aa addObject:@(CGRectGetMaxY(r)/self.mainV.frame.size.height)];

        ((simplest_eagl_rgb_render *)m).v_mesh_a = [aa copy];
        [m captureOutput:rgba_buf];
    });
    
}

/*
 * [人脸识别技术 （一） —— 基于CoreImage实现对静止图片中人脸的识别 - 简书](https://www.jianshu.com/p/15fad9efe5ba)
 * [iOS利用CoreImage实现人脸识别详解_IOS_脚本之家](https://www.jb51.net/article/112893.htm)
 */

- (CGRect)p_detectFaceWithImage:(UIImage *)image {
    // 图像识别能力：可以在CIDetectorAccuracyHigh(较强的处理能力)与CIDetectorAccuracyLow(较弱的处理能力)中选择，因为想让准确度高一些在这里选择CIDetectorAccuracyHigh
    NSDictionary *opts = [NSDictionary dictionaryWithObject:
                          CIDetectorAccuracyHigh forKey:CIDetectorAccuracy];
    // 将图像转换为CIImage
    CIImage *faceImage = [CIImage imageWithCGImage:image.CGImage];
    CIDetector *faceDetector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil options:opts];
    // 识别出人脸数组
    NSArray *features = [faceDetector featuresInImage:faceImage];
    // 得到图片的尺寸
    CGSize inputImageSize = [faceImage extent].size;
    //将image沿y轴对称
    CGAffineTransform transform = CGAffineTransformScale(CGAffineTransformIdentity, 1, -1);
    //将图片上移
    transform = CGAffineTransformTranslate(transform, 0, -inputImageSize.height);
    
    CGRect r = CGRectZero;
    // 取出所有人脸
    for (CIFaceFeature *faceFeature in features) {
        //获取人脸的frame
        CGRect faceViewBounds = CGRectApplyAffineTransform(faceFeature.bounds, transform);
        CGSize viewSize = self.mainV.bounds.size;
        CGFloat scale = MIN(viewSize.width / inputImageSize.width,
                            viewSize.height / inputImageSize.height);
        CGFloat offsetX = (viewSize.width - inputImageSize.width * scale) / 2;
        CGFloat offsetY = (viewSize.height - inputImageSize.height * scale) / 2;
        // 缩放
        CGAffineTransform scaleTransform = CGAffineTransformMakeScale(scale, scale);
        // 修正
        faceViewBounds = CGRectApplyAffineTransform(faceViewBounds,scaleTransform);
        faceViewBounds.origin.x += offsetX;
        faceViewBounds.origin.y += offsetY;
        
//        //描绘人脸区域
//        UIView* faceView = [[UIView alloc] initWithFrame:faceViewBounds];
//        faceView.layer.borderWidth = 2;
//        faceView.layer.borderColor = [[UIColor redColor] CGColor];
//        [self.mainV addSubview:faceView];
//
//        // 判断是否有左眼位置
//        if(faceFeature.hasLeftEyePosition){
//            NSLog(@"检测到左眼");
//        }
//        // 判断是否有右眼位置
//        if(faceFeature.hasRightEyePosition){
//            NSLog(@"检测到右眼");
//        }
//        // 判断是否有嘴位置
//        if(faceFeature.hasMouthPosition){
//            NSLog(@"检测到嘴部");
//        }
        r = faceViewBounds;
    }
    return r;
}

// * [UIImage 取 RGBA , RBGA封装 UIImage - 简书](https://www.jianshu.com/p/ca7718cf26e3)
- (unsigned char *)getImagePixel:(UIImage *)image {
    CFDataRef pixelData = CGDataProviderCopyData(CGImageGetDataProvider(image.CGImage));
    const uint8_t* data = CFDataGetBytePtr(pixelData);
    
    int width = (int)image.size.width;
    int height = (int)image.size.height;
    
    size_t bitsPerPixel = CGImageGetBitsPerPixel(image.CGImage);
    printf("bitsPerPixel = %lu\n", bitsPerPixel);
    size_t gitsPerComponent = CGImageGetBitsPerComponent(image.CGImage);
    printf("gitsPerComponent = %lu\n", gitsPerComponent);
    
    uint8_t *imgData = (uint8_t *)malloc(width*height*4);
    memcpy(imgData, data, width*height*4);
    return imgData;
}

#pragma mark - QHDanmuViewDataSource

- (NSInteger)numberOfPathwaysInDanmuView:(QHDanmuView *)danmuView {
    return 10;
}

- (CGFloat)heightOfPathwayCellInDanmuView:(QHDanmuView *)danmuView {
    CGSize size = [QHBaseUtil getSizeWithString:@"陈" fontSize:15];
    return size.height;
}

- (QHDanmuViewCell *)danmuView:(QHDanmuView *)danmuView cellForPathwayWithData:(NSDictionary *)data {
    QHDanmuViewCell *cell = [danmuView dequeueReusableCellWithIdentifier:@"1"];
    NSString *n = data[@"n"];
    NSString *c = data[@"c"];
    NSString *contentString = [NSString stringWithFormat:@"<font color='#CCCCCC'>%@：</font><font color='#FFFFFF'>%@</font>", n, c];
    NSMutableAttributedString *chatData = [NSMutableAttributedString new];
    [chatData appendAttributedString:[QHBaseUtil toHTML:contentString fontSize:15]];
    cell.textLabel.attributedText = chatData;
    return cell;
}

#pragma mark - QHDanmuViewDelegate

- (CGFloat)danmuView:(QHDanmuView *)danmuView widthForPathwayWithData:(NSDictionary *)data {
    NSAttributedString *c = [QHBaseUtil toHTML:data[@"c"] fontSize:15];
    return c.size.width;
}

#pragma mark - Action

static int pwId = 0;

- (IBAction)sendDanmuAction:(id)sender {
    pwId++;
    [_danmuView insertData:@[@{@"n": @"小白", @"c": [NSString stringWithFormat:@"(%i)-讲得挺好，一听就明白。", pwId]}]];
//    [_danmuView insertDanmuData:@[@{@"n": @"小白", @"c": [NSString stringWithFormat:@"讲得挺好，一听就明白。"]}]];
}

@end
