//
//  QHAVFVideoCapture.m
//  QHWhiteBoardMan
//
//  Created by qihuichen on 2022/10/31.
//

#import "QHAVFVideoCapture.h"

#define kVideoCaptureQueue @"com.qh.video.captureQueue"

@interface QHAVFVideoCapture () <AVCaptureVideoDataOutputSampleBufferDelegate>

@property (nonatomic) dispatch_queue_t vQueue;
@property (nonatomic, readwrite) BOOL running;

@property (nonatomic, strong) AVCaptureSession *cSession;
@property (nonatomic, strong) AVCaptureDevice *vDevice;
@property (nonatomic, strong) AVCaptureDeviceInput *vInput;
@property (nonatomic, strong) AVCaptureVideoDataOutput *vOutput;

@property (nonatomic) AVCaptureDevicePosition vPosition;

@property (nonatomic, strong, readwrite) AVCaptureVideoPreviewLayer *previewLayer;
@property (nonatomic, strong) UIView *preview;

@end

@implementation QHAVFVideoCapture

- (instancetype)init {
    self = [super init];
    if (self) {
        [self p_setup];
    }
    return self;
}

- (instancetype)initWith:(NSString *)vPreset cameraPosition:(AVCaptureDevicePosition)vPosition {
    self = [super init];
    if (self) {
        self.vPreset = vPreset;
        self.vPosition = vPosition;
        [self p_setup];
    }
    return self;
}

#pragma mark - Public

- (void)start {
    self.running = YES;
    if (_cSession == nil) {
        [self p_setupAVF];
//        [self p_setupPreview];
    }
    [self.cSession startRunning];
}

- (void)stop {
    [self.cSession stopRunning];
    self.cSession = nil;
    self.running = NO;
}

- (void)pause {
    
}

- (void)resume {
    
}

- (void)rotateCamera {
    
}

- (void)toggleTorch {
    
}

#pragma mark - Private

- (void)p_setup {
    [self p_setupData];
}

- (void)p_setupData {
    _vQueue = dispatch_queue_create([kVideoCaptureQueue cStringUsingEncoding:NSASCIIStringEncoding], DISPATCH_QUEUE_SERIAL);
    _running = NO;
    
    [self p_defaultAVF];
}

- (void)p_defaultAVF {
    if (_vPreset == nil) {
        _vPreset = AVCaptureSessionPresetiFrame960x540;
        _vPosition = AVCaptureDevicePositionFront;
    }
    _outputPixelFmt = kCVPixelFormatType_32BGRA;//kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
}

- (void)p_setupAVF {
    _cSession = [AVCaptureSession new];
    [self.cSession setSessionPreset:self.vPreset];
    
    AVCaptureDevicePosition position = self.vPosition;
    NSArray *devices = nil;
    
//    if (@available(iOS 10.0, *)) {
        AVCaptureDeviceDiscoverySession *deviceDiscoverySession = [AVCaptureDeviceDiscoverySession  discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:position];
        devices = deviceDiscoverySession.devices;
//    }
////    else {
////        devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
////    }
    
    for (AVCaptureDevice *device in devices) {
        if ([device position] == position) {
            _vDevice = device;
            _vInput = [[AVCaptureDeviceInput alloc] initWithDevice:device error:nil];
            break;
        }
    }
    
    if (_vDevice == nil) {
        NSLog(@"%s-device error", __func__);
        return;
    }
    
    if ([self.cSession canAddInput:self.vInput]) {
        [self.cSession addInput:self.vInput];
    }
    
    _vOutput = [AVCaptureVideoDataOutput new];
    self.vOutput.alwaysDiscardsLateVideoFrames = YES;
    NSDictionary *settings = [[NSDictionary alloc] initWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:self.outputPixelFmt], kCVPixelBufferPixelFormatTypeKey, nil];
    [self.vOutput setVideoSettings:settings];
    [self.vOutput setSampleBufferDelegate:self queue:self.vQueue];
    
    if ([self.cSession canAddOutput:self.vOutput]) {
        [self.cSession addOutput:self.vOutput];
    }
    
    AVCaptureConnection *connection = [self.vOutput connectionWithMediaType:AVMediaTypeVideo]; // AVCaptureConnection 用于把输入和输出连接起来。
    if ([connection isVideoOrientationSupported] && connection.videoOrientation != AVCaptureVideoOrientationPortrait) {
        connection.videoOrientation = AVCaptureVideoOrientationPortrait;
    }
    if (position == AVCaptureDevicePositionFront && [connection isVideoMirroringSupported]) {
        connection.videoMirrored = YES;
    }
    
    [self p_setupFrameDuration];
}

- (BOOL)p_setupFrameDuration {
    [self.vDevice lockForConfiguration:nil];
    // 更新采集实时帧率。
    int32_t fps = self.frameRate;
    
    // 1、帧率换算成帧间隔时长。
    CMTime frameDuration = CMTimeMake(1, (int32_t) fps);
    
    // 2、设置帧率大于 30 时，找到满足该帧率及其他参数，并且当前设备支持的 AVCaptureDeviceFormat。
    if (fps > 30) {
        for (AVCaptureDeviceFormat *vFormat in [self.vDevice formats]) {
            CMFormatDescriptionRef description = vFormat.formatDescription;
            CMVideoDimensions dims = CMVideoFormatDescriptionGetDimensions(description);
            float maxRate = ((AVFrameRateRange *) [vFormat.videoSupportedFrameRateRanges objectAtIndex:0]).maxFrameRate;
            CMVideoDimensions size = [self vPreset2Size:self.vPreset];
            if (maxRate >= fps /*&& CMFormatDescriptionGetMediaSubType(description) == self.config.pixelFormatType*/ && size.width * size.height == dims.width * dims.height) {
                self.vDevice.activeFormat = vFormat;
                break;
            }
        }
    }
    
    // 3、检查设置的帧率是否在当前设备的 activeFormat 支持的最低和最高帧率之间。如果是，就设置帧率。
    __block BOOL support = NO;
    [self.vDevice.activeFormat.videoSupportedFrameRateRanges enumerateObjectsUsingBlock:^(AVFrameRateRange * _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
        if (CMTimeCompare(frameDuration, obj.minFrameDuration) >= 0 && CMTimeCompare(frameDuration, obj.maxFrameDuration) <= 0) {
            support = YES;
            *stop = YES;
        }
    }];
    if (support) {
        [self.vDevice setActiveVideoMinFrameDuration:frameDuration];
        [self.vDevice setActiveVideoMaxFrameDuration:frameDuration];
        return YES;
    }
    
    [self.vDevice unlockForConfiguration];
    return NO;
}

- (void)p_setupPreview {
    _previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.cSession];
    [self.previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
}

#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate

- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    self.vProcessingCallback(sampleBuffer);
}

#pragma mark - Uitl

- (CMVideoDimensions)vPreset2Size:(AVCaptureSessionPreset)vPreset {
//    if ([vPreset isEqualToString:AVCaptureSessionPresetiFrame960x540]) {
    CMVideoDimensions size;
    size.width = 540;
    size.height = 960;
    return size;
}

#pragma mark - Get

- (void)setVPreset:(AVCaptureSessionPreset)vPreset {
    _vPreset = vPreset;
}

@end
