////
////  VideoDemoViewController.m
////  FaceRtmpDemo
////
////  Created by acewei on 2021/10/27.
////
//
//#import "VideoDemoViewController.h"
//#import <AVFoundation/AVFoundation.h>
//#import <VideoToolbox/VideoToolbox.h>
//
//@interface VideoDemoViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate>
//{
//    int frameNO;//帧号
//    //录制队列
//    dispatch_queue_t captureQueue;
//    
//    //编码队列
//    dispatch_queue_t encodeQueue;
//    
//    //编码session
//    VTCompressionSessionRef encodingSession;
//}
//
//@property (nonatomic, strong) AVCaptureSession *captureSession;
//@property (nonatomic, strong) AVCaptureVideoDataOutput *videoDataOutput;
//@property (nonatomic, strong) dispatch_queue_t videoDataOutputQueue;
//@property (nonatomic, strong) dispatch_queue_t encodeQueue;
//
//@end
//
//@implementation VideoDemoViewController
//
//- (void)viewDidLoad {
//    [super viewDidLoad];
//    // Do any additional setup after loading the view.
//    
//    [self createCamera];
//    [self initVideoToolBox];
//    
//    [self.captureSession startRunning];
//}
//
//- (void)createCamera
//{
//    //创建AVCaptureDevice的视频设备对象
//    AVCaptureDevice* videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//    
//    NSError* error;
//    //创建视频输入端对象
//    AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
//    if (error) {
//        NSLog(@"创建输入端失败,%@",error);
//        return;
//    }
//    
//    //创建功能会话对象
//    self.captureSession = [[AVCaptureSession alloc] init];
//    //设置会话输出的视频分辨率
//    [self.captureSession setSessionPreset:AVCaptureSessionPreset1280x720];
//    
//    //添加输入端
//    if (![self.captureSession canAddInput:input]) {
//        NSLog(@"输入端添加失败");
//        return;
//    }
//    [self.captureSession addInput:input];
//    
//    //显示摄像头捕捉到的数据
//    AVCaptureVideoPreviewLayer* layer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
//    layer.frame = CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height - 100);
//    [self.view.layer addSublayer:layer];
//    
////    //创建输出端
////    AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
////    //会话对象添加输出端
////    if ([self.captureSession canAddOutput:videoDataOutput]) {
////        [self.captureSession addOutput:videoDataOutput];
////        self.videoDataOutput = videoDataOutput;
////        //创建输出调用的队列
////        dispatch_queue_t videoDataOutputQueue = dispatch_queue_create("videoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
////        self.videoDataOutputQueue = videoDataOutputQueue;
////        //设置代理和调用的队列
////        [self.videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
////        //设置延时丢帧
////        self.videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
////    }
//}
//
//- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
//{
//    @1;
//    NSLog(@"%@", sampleBuffer);
//    
////    sampleBuffer
////    [self encode:sampleBuffer];
//    
//    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
//    CIImage *sourceImage = [CIImage imageWithCVPixelBuffer:(CVPixelBufferRef)imageBuffer options:nil];
//    CGRect sourceExtent = sourceImage.extent;
//    
//    
//    // Image processing
//    CIFilter * vignetteFilter = [CIFilter filterWithName:@"CIVignetteEffect"];
//    [vignetteFilter setValue:sourceImage forKey:kCIInputImageKey];
//    [vignetteFilter setValue:[CIVector vectorWithX:sourceExtent.size.width/2 Y:sourceExtent.size.height/2] forKey:kCIInputCenterKey];
//    [vignetteFilter setValue:@(sourceExtent.size.width/2) forKey:kCIInputRadiusKey];
//    CIImage *filteredImage = [vignetteFilter outputImage];
//    CIFilter *effectFilter = [CIFilter filterWithName:@"CIPhotoEffectInstant"];
//    [effectFilter setValue:filteredImage forKey:kCIInputImageKey];
//    filteredImage = [effectFilter outputImage];
//    
//}
//
//
//- (void)initVideoToolBox
//{
////    if (!self.encodeQueue) {
////        self.encodeQueue = dispatch_queue_create("encodeQueue", DISPATCH_QUEUE_SERIAL);
////    }
////
////    dispatch_queue_t encodeQueue = self.encodeQueue;
//    
//    encodeQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
//    
//    dispatch_sync(encodeQueue, ^{
//        int width = 480, height = 640;
//        
//        OSStatus status = VTCompressionSessionCreate(NULL, width, height, kCMVideoCodecType_H264, NULL, NULL, NULL, didCompressH264, (__bridge void *)(self),  &encodingSession);
//        NSLog(@"H264: VTCompressionSessionCreate %d", (int)status);
//        
//        if (status != 0) {
//            NSLog(@"H264: Unable to create a H264 session");
//            return;
//        }
//        
//        // 设置实时编码输出（避免延迟）
//        VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue);
//        VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_Baseline_AutoLevel);
//        
//        // 设置关键帧（GOPsize)间隔
//        int frameInterval = 24;
//        CFNumberRef frameIntervalRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &frameInterval);
//        VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, frameIntervalRef);
//        
//        //设置期望帧率
//        int fps = 24;
//        CFNumberRef fpsRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &fps);
//        VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_ExpectedFrameRate, fpsRef);
//        
//        //设置码率，均值，单位是byte
//        int bitRate = width * height * 3 * 4 * 8;
//        CFNumberRef bitRateRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &bitRate);
//        VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_AverageBitRate, bitRateRef);
//        
//        //设置码率，上限，单位是bps
//        int bitRateLimit = width * height * 3 * 4;
//        CFNumberRef bitRateLimitRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &bitRateLimit);
//        VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_DataRateLimits, bitRateLimitRef);
//        
//        //开始编码
//        VTCompressionSessionPrepareToEncodeFrames(encodingSession);
//    });
//        
////        OSStatus status = VTCompressionSessionCreate(NULL, width, height, kCMVideoCodecType_H264, NULL, NULL, NULL, didCompressH264, (__bridge void *)(self),  &encodingSession);
//                
//}
//
////编码sampleBuffer
//- (void)encode:(CMSampleBufferRef )sampleBuffer
//{
//    CVImageBufferRef imageBuffer = (CVImageBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
//    // 帧时间，如果不设置会导致时间轴过长。
//    CMTime presentationTimeStamp = CMTimeMake(frameNO++, 1000);
//    VTEncodeInfoFlags flags;
//    OSStatus statusCode = VTCompressionSessionEncodeFrame(encodingSession,
//                                                          imageBuffer,
//                                                          presentationTimeStamp,
//                                                          kCMTimeInvalid,
//                                                          NULL, NULL, &flags);
//    if (statusCode != noErr) {
//        NSLog(@"H264: VTCompressionSessionEncodeFrame failed with %d", (int)statusCode);
//        
//        VTCompressionSessionInvalidate(encodingSession);
//        CFRelease(encodingSession);
//        encodingSession = NULL;
//        return;
//    }
//    NSLog(@"H264: VTCompressionSessionEncodeFrame Success");
//}
//
//// 编码完成回调
//void didCompressH264(void * CM_NULLABLE outputCallbackRefCon,
//                     void * CM_NULLABLE sourceFrameRefCon,
//                     OSStatus status,
//                     VTEncodeInfoFlags infoFlags,
//                     CM_NULLABLE CMSampleBufferRef sampleBuffer) {
//    @1;
//}
//
//@end
