//
//  ITVisionViewController.m
//  InduceTool
//
//  Created by 魏延龙 on 2020/9/29.
//  Copyright © 2020 魏延龙. All rights reserved.
//

#import "ITVisionViewController.h"
#import <Vision/Vision.h>

@interface ITVisionViewController ()

@property (nonatomic, strong) UIImageView *imageView;
@property CGFloat imageScale;

@property (nonatomic, strong) UITextView *textView;
@end

@implementation ITVisionViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view.
    self.view.backgroundColor = [UIColor whiteColor];

    [self initImageView];
    [self initTextView];
//    [self initVN];
    [self initTextVN];
}

- (void)initImageView {
    UIImage *tempImage = [UIImage imageNamed:@"WX20200929-112750"];
    CGFloat width = tempImage.size.width;
    CGFloat height = tempImage.size.height;
    self.imageView = [[UIImageView alloc] initWithFrame:CGRectMake(10, 100, 200, 200 / width * height)];
    self.imageView.image = tempImage;
    [self.view addSubview:self.imageView];
    self.imageScale = 200 / width;

    NSLog(@"图片尺寸 width:%f, height:%f, scale:%f", width, height, self.imageScale);
}

-(void)initTextView {
    self.textView = [[UITextView alloc] initWithFrame:CGRectMake(210, 100, 100, 200)];
    [self.view addSubview:self.textView];
}

- (void)initVN {
    UIImage *tempImage = [UIImage imageNamed:@"apic27938.jpg"];
    __weak typeof(self) weakSelf = self;
    // 转换为CIImage
    CIImage *convertImage = [[CIImage alloc] initWithImage:tempImage];
    // 创建图像处理request (携带要处理的对象)
    VNImageRequestHandler *detectRequestHandler = [[VNImageRequestHandler alloc] initWithCIImage:convertImage options:@{}];
    
    // 创建图像处理完成的回调 （处理完成的回调block）
    VNRequestCompletionHandler completionHandler = ^(VNRequest *request, NSError *error) {
        NSArray *observations = request.results;
        // 监测到所有的对象的点位，对每一个检测到的对象创建一个boxView
        for (VNFaceObservation *observation  in observations) {
            CGFloat w = observation.boundingBox.size.width * tempImage.size.width * self.imageScale;
            CGFloat h = observation.boundingBox.size.height * tempImage.size.height * self.imageScale;
            CGFloat x = observation.boundingBox.origin.x * tempImage.size.width * self.imageScale;
            CGFloat y = (tempImage.size.height - (observation.boundingBox.origin.y * tempImage.size.height) - observation.boundingBox.size.height * tempImage.size.height) * self.imageScale;
            CGRect facePointRect = CGRectMake(x, y, w, h);
            NSLog(@"检测到人脸尺寸 %@", NSStringFromCGRect(facePointRect));
            UIView *boxView = [[UIView alloc]initWithFrame:facePointRect];
            boxView.backgroundColor = [UIColor clearColor];
            boxView.layer.borderColor = [UIColor redColor].CGColor;
            boxView.layer.borderWidth = 2;
            [weakSelf.imageView addSubview:boxView];
        }
    };
    // 创建侦测人脸识别请求 （要怎样处理request handler携带的对象）
    // 这里使用的是识别人脸
    VNDetectFaceRectanglesRequest *detectRequest = [[VNDetectFaceRectanglesRequest alloc] initWithCompletionHandler:completionHandler];
    // 执行
    [detectRequestHandler performRequests:@[detectRequest] error:nil];
}


-(void)initTextVN {
    UIImage *tempImage = [UIImage imageNamed:@"WX20200929-112750"];
    __weak typeof(self) weakSelf = self;
    // 转换为CIImage
    CIImage *convertImage = [[CIImage alloc] initWithImage:tempImage];
    // 创建图像处理request (携带要处理的对象)
//    VNImageRequestHandler *detectRequestHandler = [[VNImageRequestHandler alloc] initWithCIImage:convertImage orientation:kCGImagePropertyOrientationUp options:@{}];
    VNImageRequestHandler *detectRequestHandler = [[VNImageRequestHandler alloc] initWithCIImage:convertImage options:@{}];
    // 创建图像处理完成的回调 （处理完成的回调block）
    VNRequestCompletionHandler completionHandler = ^(VNRequest *request, NSError *error) {
        NSArray *observations = request.results;
        // 监测到所有的对象的点位，对每一个检测到的对象创建一个boxView
        NSMutableString *result = [NSMutableString string];
        for (VNRecognizedTextObservation *observation  in observations) {
            NSArray<VNRecognizedText*> *arr = [observation topCandidates:1];
            NSString *text = arr.firstObject.string;
            [result appendString:text];
            CGFloat w = observation.boundingBox.size.width * tempImage.size.width * self.imageScale;
            CGFloat h = observation.boundingBox.size.height * tempImage.size.height * self.imageScale;
            CGFloat x = observation.boundingBox.origin.x * tempImage.size.width * self.imageScale;
            CGFloat y = (tempImage.size.height - (observation.boundingBox.origin.y * tempImage.size.height) - observation.boundingBox.size.height * tempImage.size.height) * self.imageScale;
            CGRect facePointRect = CGRectMake(x, y, w, h);
            NSLog(@"检测到尺寸 %@", NSStringFromCGRect(facePointRect));
            UIView *boxView = [[UIView alloc]initWithFrame:facePointRect];
            boxView.backgroundColor = [UIColor clearColor];
            boxView.layer.borderColor = [UIColor redColor].CGColor;
            boxView.layer.borderWidth = 2;
            [weakSelf.imageView addSubview:boxView];
        }
        weakSelf.textView.text = [NSString stringWithFormat:@"识别结果：%@",result ];
    };
    
    VNRecognizeTextRequest *detectRequest = [[VNRecognizeTextRequest alloc] initWithCompletionHandler:completionHandler];
    detectRequest.recognitionLevel = VNRequestTextRecognitionLevelFast;
    // 执行
    [detectRequestHandler performRequests:@[detectRequest] error:nil];
}
@end
