//
//  CaptureView.m
//  VideoChat
//
//  Created by user on 2019/3/20.
//  Copyright © 2019 烧烤有点辣. All rights reserved.
//

#import "CaptureView.h"

@interface CaptureView ()<AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate>

@property (nonatomic, strong) AVCaptureSession *session;
@property (nonatomic, strong) AVCaptureDeviceInput *captureDeviceInput;
@property (strong, nonatomic) AVCaptureVideoDataOutput *captureMovieFileOutput;//视频输出流
@property (strong, nonatomic) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;//相机拍摄预览图层

/** 音频 */
@property (nonatomic, strong) AVCaptureConnection *audioConnection;
/** 输出流 */
@property (nonatomic, strong) AVCaptureConnection *videoConnection;

@end

@implementation CaptureView

- (instancetype)initWithFrame:(CGRect)frame DidOutputSampleBufferBlock:(CallBackdidOutputSampleBuffer)didOutputSampleBufferBlock {
    
    if (self = [super initWithFrame:frame]) {
        self.didOutputSampleBufferBlock = didOutputSampleBufferBlock;
        [self setUI];
    }
    return self;
}

- (void)starVideo {
    [self.session startRunning];
}

- (void)stopVideo {
    [self.session stopRunning];
}

- (void)setUI {
    
    dispatch_queue_t captureQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
    dispatch_queue_t audioQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
    
    _session = [[AVCaptureSession alloc] init];
    
    if ([_session canSetSessionPreset:AVCaptureSessionPreset1280x720]) {//设置分辨率
        _session.sessionPreset=AVCaptureSessionPreset1280x720;
    }
    
    NSError *error=nil;
    
    //获得输入设备
    AVCaptureDevice *captureDevice = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];//取得后置摄像头
    if (!captureDevice) {
        NSLog(@"取得后置摄像头时出现问题.");
        return;
    }
    
    //根据输入设备初始化设备输入对象，用于获得输入数据
    _captureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:captureDevice error:&error];
    
    if (error) {
        NSLog(@"取得设备输入对象时出错，错误原因：%@",error.localizedDescription);
        return;
    }
    
    //将设备输入添加到会话中
    if ([_session canAddInput:_captureDeviceInput]) {
        [_session addInput:_captureDeviceInput];
    }
    
    //添加一个音频输入设备
    AVCaptureDevice *audioCaptureDevice=[[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
    AVCaptureDeviceInput *audioCaptureDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:audioCaptureDevice error:&error];
    if (error) {
        NSLog(@"取得设备输入对象时出错，错误原因：%@",error.localizedDescription);
        return;
    }
    if ([_session canAddInput:audioCaptureDeviceInput]) {
        [_session addInput:audioCaptureDeviceInput];
    }
    
    //初始化设备输出对象，用于获得输出数据
    _captureMovieFileOutput=[[AVCaptureVideoDataOutput alloc]init];
    /**
     如果队列被阻塞，新的图像帧到达后会被自动丢弃(默认alwaysDiscardsLateVideoFrames = YES)。这允许app处理当前的图像帧，不需要去管理不断增加的内存，因为处理速度跟不上采集的速度，等待处理的图像帧会占用内存，并且不断增大。必须使用同步队列处理图像帧，保证帧的序列是顺序的。
     */
    [_captureMovieFileOutput setAlwaysDiscardsLateVideoFrames:NO];
    
    [_captureMovieFileOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
    
    [_captureMovieFileOutput setSampleBufferDelegate:self queue:captureQueue];
    
    // 获取音频数据输出设备
    AVCaptureAudioDataOutput *audioOutput = [AVCaptureAudioDataOutput new];
    // 设置代理
    [audioOutput setSampleBufferDelegate:self queue:audioQueue];
    
    if ([self.session canAddOutput:audioOutput]) {
        [self.session addOutput:audioOutput];
    }
    
    //将设备输出添加到会话中
    if ([_session canAddOutput:_captureMovieFileOutput]) {
        [_session addOutput:_captureMovieFileOutput];
    }
    
    //创建视频预览层，用于实时展示摄像头状态
    _captureVideoPreviewLayer=[[AVCaptureVideoPreviewLayer alloc]initWithSession:self.session];
    
    CALayer *layer = self.layer;
    layer.masksToBounds=YES;
    
    _captureVideoPreviewLayer.frame=layer.bounds;
    _captureVideoPreviewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;//填充模式
    //将视频预览层添加到界面中
    [layer addSublayer:_captureVideoPreviewLayer];
    
    // 获取视频输入与输出连接，用于分辨音视频数据
    self.videoConnection = [_captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
    [self.videoConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
    
    // 获取视频输入与输出连接，用于分辨音视频数据
    self.audioConnection = [audioOutput connectionWithMediaType:AVMediaTypeAudio];
    
    [self.session commitConfiguration];
}

- (void)swipButtonClick:(UIButton *)sender {
    
    // 获取当前设备方向
    AVCaptureDevicePosition currentPosition = _captureDeviceInput.device.position;
    
    // 获取需要改变的方向
    AVCaptureDevicePosition changePosition = (currentPosition == AVCaptureDevicePositionFront) ? AVCaptureDevicePositionBack : AVCaptureDevicePositionFront;
    
    // 获取改变的摄像头设备
    AVCaptureDevice *changeDevice = [self getCameraDeviceWithPosition:changePosition];
    
    // 获取改变的摄像头输入设备
    AVCaptureDeviceInput *changeDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:changeDevice error:nil];
    
    // 移除之前摄像头输入设备
    [self.session removeInput:_captureDeviceInput];
    
    // 添加新的摄像头输入设备
    [self.session addInput:changeDeviceInput];
    
    // 记录当前摄像头输入设备
    _captureDeviceInput = changeDeviceInput;
}


- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    
    if (connection == self.videoConnection) {
        if (self.didOutputSampleBufferBlock) {
            self.didOutputSampleBufferBlock(sampleBuffer, AVMediaTypeVideo);
        }
    } else if (connection == self.audioConnection) {
        if (self.didOutputSampleBufferBlock) {
            self.didOutputSampleBufferBlock(sampleBuffer, AVMediaTypeAudio);
        }
    }
}

#pragma mark - 私有方法

/**
 *  取得指定位置的摄像头
 *
 *  @param position 摄像头位置
 *
 *  @return 摄像头设备
 */
-(AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position{
    NSArray *cameras= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    for (AVCaptureDevice *camera in cameras) {
        if ([camera position]==position) {
            return camera;
        }
    }
    return nil;
}

@end
