//
//  ViewController.m
//  FaceRtmpDemo
//
//  Created by acewei on 2021/10/15.
//

#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>

@interface ViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureMetadataOutputObjectsDelegate>

@property (nonatomic, strong) AVCaptureSession *session;
@property (nonatomic, strong) dispatch_queue_t queue;
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;

@property (nonatomic, strong) AVCaptureSession *captureSession;

@end

@implementation ViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view.
    
//    [self capture];
//    [self setupAVFoundation];
    [self setupAVFoundation2];
}

- (void)capture
{
    // 1.创建AVCaputureSession。
    AVCaptureSession *session = [[AVCaptureSession alloc] init];
    
    // 2.创建AVCaptureDevice
    AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    
    // 3.创建AVCaptureDeviceInput,并添加到Session中
    NSError *error;
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
    [session addInput:input];
    
    // 4.创建AVCaptureOutput
    AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc]init];

    // 5.设置output delegate,将output添加至session,在代理方法中分析视频流
    dispatch_queue_t videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
//    [videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
//    [session addOutput:videoDataOutput];
    
    [output setSampleBufferDelegate:self queue:videoDataOutputQueue];

    // 我们可以在delegate方法中分析视频流。
//    captureOutput:didOutputSampleBuffer:fromConnection:,复制代码

    // 6.开始捕捉
    [session startRunning];
}


- (void)setupAVFoundation
{
    //session
    self.session = [[AVCaptureSession alloc] init];
    //device
    AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    NSError *error = nil;
    //input
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
    if(input) {
        [self.session addInput:input];
    } else {
        NSLog(@"%@", error);
        return;
    }
    //output
    AVCaptureMetadataOutput *output = [[AVCaptureMetadataOutput alloc] init];
    [self.session addOutput:output];
    [output setMetadataObjectTypes:@[AVMetadataObjectTypeQRCode]];
    [output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
    //add preview layer
    self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
//    [self.preview.layer addSublayer:self.previewLayer];
    [self.view.layer addSublayer:self.previewLayer];
    //start
    [self.session startRunning];
}


- (AVCaptureSession *)captureSession
{
    if (!_captureSession) {
        _captureSession = [[AVCaptureSession alloc] init];
        [_captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
    }
    return _captureSession;
}

- (void)setupAVFoundation2
{
//    AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
//    [captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
    
    
    /**
        该方法会返回当前能够输入视频的全部设备，包括前后摄像头和外接设备
        NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];

        该方法会返回当前能够输入音频的全部设备
        NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
        */

    // 获取视频输入设备，该方法默认返回iPhone的后置摄像头
    AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    // 将捕捉设备加入到捕捉会话中
    AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
    if (videoInput) {
      if ([_captureSession canAddInput:videoInput]){
          [_captureSession addInput:videoInput];
      }
    }

    // 音频输入
    AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
    AVCaptureDeviceInput *audioIn = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:nil];
    if ([_captureSession canAddInput:audioIn]){
      [_captureSession addInput:audioIn];
    }
    
    
    // 创建一个previewLayer
    AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
    previewLayer.frame = self.view.bounds;
    [self.view.layer addSublayer:previewLayer];
//    [self.view.layer setVideoGravity:AVLayerVideoGravityResizeAspect];
//    [self.view.layer setSession:session];
}


#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    NSLog(@"%@", sampleBuffer);
}


#pragma mark - AVCaptureMetadataOutputObjectsDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
    NSLog(@"%@", metadataObjects);
//    for (AVMetadataMachineReadableCodeObject *metadata in metadataObjects) {
//        if ([metadata.type isEqualToString:AVMetadataObjectTypeQRCode]) {
//            self.borderView.hidden = NO;
//            if ([metadata.stringValue isURL])
//            {
//                [[UIApplication sharedApplication] openURL:[NSString HTTPURLFromString:metadata.stringValue]];
//                [self insertURLEntityWithURL:metadata.stringValue];
//                self.stringLabel.text = metadata.stringValue;
//            }
//            else
//            {
//                self.stringLabel.text = metadata.stringValue;
//            }
//        }
//    }
}

@end
