//
//  ViewController.m
//  CameraCustom
//
//  Created by wangtietao on 2024/3/28.
//

#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <UIKit/UIKit.h>
#import <UIKit/UIKit.h>
#import <Photos/Photos.h>
#import <AVFoundation/AVFoundation.h>

@import Vision;

@interface ViewController () <AVCaptureMetadataOutputObjectsDelegate>

@property (strong, nonatomic) AVCaptureSession *session;
@property (strong, nonatomic) AVCapturePhotoOutput *photoOutput;
@property (strong, nonatomic) AVCaptureVideoPreviewLayer *previewLayer;

//扫瞄
@property (strong, nonatomic) AVCaptureSession *captureSession;
@property (strong, nonatomic) AVCaptureVideoPreviewLayer *videoPreviewLayer;


@end

@implementation ViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view.
}

- (IBAction)openCamera:(id)sender {
    
   // [self openCamera];
    
//    [self scan];
    
//    [self faceCheck];
    
    [self takePhoto];
    
}

//打开相机
-  (void) openCamera{
    // 初始化捕捉会话
       self.session = [[AVCaptureSession alloc] init];
       
       // 设置相机的分辨率，这里选择的是照片分辨率
       self.session.sessionPreset = AVCaptureSessionPresetPhoto;
       
       // 获取默认的视频捕捉设备（通常是后置相机）
       AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
       NSError *error = nil;
       // 根据捕捉设备创建输入流
       AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
       if (input) {
           // 如果输入流创建成功，添加到会话中
           [self.session addInput:input];
       } else {
           // 如果发生错误，打印错误信息并返回
           NSLog(@"错误: %@", [error localizedDescription]);
           return;
       }
       
       // 初始化照片输出对象，用于捕捉图片
       self.photoOutput = [[AVCapturePhotoOutput alloc] init];
       // 添加输出到会话
       [self.session addOutput:self.photoOutput];
       
       // 创建视频预览层，它用于实时显示相机捕捉到的视频
       self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
       // 设置预览层的大小，这里自定义为视图宽度和一半的高度
       self.previewLayer.frame = CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height / 2);
       // 设置视频填充模式
       self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
       // 将预览层添加到视图的图层上
       [self.view.layer addSublayer:self.previewLayer];
       
       // 开始运行捕捉会话
       [self.session startRunning];
}

//拍照
- (void)takePhoto {
    // 检查相机权限
    AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
    if (authStatus == AVAuthorizationStatusAuthorized) {
        [self showImagePickerForSourceType:UIImagePickerControllerSourceTypeCamera];
    } else {
        // 请求权限
        [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
            if (granted) {
                [self showImagePickerForSourceType:UIImagePickerControllerSourceTypeCamera];
            }
            // 处理未授权情况
        }];
    }
}
//选择照片
- (void)selectPhoto {
    // 检查相册权限
    PHAuthorizationStatus status = [PHPhotoLibrary authorizationStatus];
    if (status == PHAuthorizationStatusAuthorized) {
        [self showImagePickerForSourceType:UIImagePickerControllerSourceTypePhotoLibrary];
    } else {
        // 请求权限
        [PHPhotoLibrary requestAuthorization:^(PHAuthorizationStatus status) {
            if (status == PHAuthorizationStatusAuthorized) {
                [self showImagePickerForSourceType:UIImagePickerControllerSourceTypePhotoLibrary];
            }
            // 处理未授权情况
        }];
    }
}
- (void)showImagePickerForSourceType:(UIImagePickerControllerSourceType)sourceType {
    if ([UIImagePickerController isSourceTypeAvailable:sourceType]) {
        UIImagePickerController *imagePickerController = [[UIImagePickerController alloc] init];
        imagePickerController.sourceType = sourceType;
        imagePickerController.delegate = self;
        [self presentViewController:imagePickerController animated:YES completion:nil];
    }
}
#pragma mark - UIImagePickerControllerDelegate
- (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary<UIImagePickerControllerInfoKey,id> *)info {
    UIImage *selectedImage = info[UIImagePickerControllerOriginalImage];
    // 在这里处理选择或拍摄的照片
    [picker dismissViewControllerAnimated:YES completion:nil];
}

- (void)imagePickerControllerDidCancel:(UIImagePickerController *)picker {
    // 处理用户取消操作
    [picker dismissViewControllerAnimated:YES completion:nil];
}


//自定义拍照
-(void) customPhoto{
    // 初始化捕获会话
        self.session = [[AVCaptureSession alloc] init];
        // 选择一个摄像头作为输入设备
        AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
        NSError *error = nil;
        AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
        if (input) {
            [self.session addInput:input];
        } else {
            NSLog(@"Error creating device input: %@", error.localizedDescription);
            return;
        }
        
        // 创建并添加照片输出
        self.photoOutput = [[AVCapturePhotoOutput alloc] init];
        [self.session addOutput:self.photoOutput];
        
        // 创建预览层并添加到视图
        AVCaptureVideoPreviewLayer *previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
        previewLayer.frame = self.view.bounds;
        [self.view.layer addSublayer:previewLayer];
        
        // 开始会话
        [self.session startRunning];
        // 拍照方法
        AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
        [self.photoOutput capturePhotoWithSettings:settings delegate:self];
    
}

#pragma mark - AVCapturePhotoCaptureDelegate
- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(NSError *)error {
    
    if (error) {
        NSLog(@"Error capturing photo: %@", error.localizedDescription);
        return;
    }

    // 获取照片数据并进行处理
    NSData *photoData = [photo fileDataRepresentation];
    UIImage *image = [UIImage imageWithData:photoData];
    // 在这里，你可以使用这个图片，比如显示它或保存它
    
}


//自定义扫描
- (void) scan{
    
    // 初始化捕捉会话
        self.captureSession = [[AVCaptureSession alloc] init];
        
        // 获取后置摄像头
        AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
        NSError *error = nil;
        
        // 创建输入流
        AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
        if (!input) {
            NSLog(@"%@", [error localizedDescription]);
            return;
        }
        
        // 将输入流添加到会话
        [self.captureSession addInput:input];
        
        // 创建输出流
        AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutput alloc] init];
        [self.captureSession addOutput:captureMetadataOutput];
        
        // 创建一个新的队列，并设置输出对象的代理
        [captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
        [captureMetadataOutput setMetadataObjectTypes:@[AVMetadataObjectTypeQRCode, AVMetadataObjectTypeEAN13Code]];
        
        // 初始化视频预览层并将其作为view的子层
        self.videoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
        self.videoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
        self.videoPreviewLayer.frame = self.view.layer.bounds;
        [self.view.layer addSublayer:self.videoPreviewLayer];
        
        // 开始视频捕获
        [self.captureSession startRunning];
}

// AVCaptureMetadataOutputObjectsDelegate的方法
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
    if (metadataObjects != nil && [metadataObjects count] > 0) {
        AVMetadataMachineReadableCodeObject *metadataObj = [metadataObjects firstObject];
        if ([[metadataObj type] isEqualToString:AVMetadataObjectTypeQRCode]) {
            // 执行找到QR码时的操作
            NSLog(@"QR Code: %@", [metadataObj stringValue]);
            // 注意：停止扫描通常在主线程中异步执行
            dispatch_async(dispatch_get_main_queue(), ^{
                [self.captureSession stopRunning];
            });
        }
    }
}


//时时人脸检测
- (void) faceCheck{
    // 初始化AVCaptureSession
        self.session = [[AVCaptureSession alloc] init];
        [self.session setSessionPreset:AVCaptureSessionPresetHigh];
        
        // 获取视频输入设备（摄像头）
        AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
        
        NSError *error = nil;
        AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
        if (input) {
            [self.session addInput:input];
        } else {
            NSLog(@"Error: %@", error);
            return;
        }
        
        // 设置视频输出
        AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
        [output setSampleBufferDelegate:self queue:dispatch_queue_create("videoQueue", DISPATCH_QUEUE_SERIAL)];
        [self.session addOutput:output];
        
        // 设置预览层
        self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
        self.previewLayer.frame = self.view.bounds;
        self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
        [self.view.layer addSublayer:self.previewLayer];
        
        // 开始捕获
        [self.session startRunning];
}

#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    // 在这里处理每个捕获的帧
    // 使用Vision进行人脸检测
    [self performFaceDetectionOnSampleBuffer:sampleBuffer];
}

- (void)performFaceDetectionOnSampleBuffer:(CMSampleBufferRef)sampleBuffer {
    // 将CMSampleBufferRef转换为CIImage
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CIImage *ciImage = [CIImage imageWithCVImageBuffer:imageBuffer];
    
    // 创建人脸检测请求
    VNDetectFaceRectanglesRequest *request = [[VNDetectFaceRectanglesRequest alloc] initWithCompletionHandler:^(VNRequest *request, NSError * _Nullable error) {
        NSArray *results = request.results;
        for (VNFaceObservation *observation in results) {
            CGRect boundingBox = observation.boundingBox;
            // 处理检测到的人脸，例如在界面上标出人脸位置
            // 注意：需要将Vision框架的坐标转换为UIKit的坐标系统
        }
    }];
    
    // 创建并执行请求
    VNImageRequestHandler *handler = [[VNImageRequestHandler alloc] initWithCIImage:ciImage options:@{}];
    NSError *error = nil;
    [handler performRequests:@[request] error:&error];
    if (error) {
        NSLog(@"Failed to perform Face Detection: %@", error);
    }
}

@end
