//
//  RealPopView.m
//  markmap
//
//  Created by 甘世清 on 2025/4/1.
//

#import "RealPopView.h"

@implementation RealPopView

- (void)SQ_UIViewForSelfView{
    self.backgroundColor = UIColor.blackColor;
    
    UILabel * label = [[ UILabel alloc]init];
    label.text = SQNSLocalString(@"人脸认证");
    label.textColor = SQColor333333;
    label.font = FONT_MEDIUM(16);
    [self addSubview:label];
    [label mas_makeConstraints:^(MASConstraintMaker *make) {
            make.centerX.equalTo(self);
            make.top.equalTo(self).offset(TopHeight-24);
    }];
    
    _sample = dispatch_queue_create("sample", NULL);
    self.metadatas = [NSArray new];
  

    [self addSubview:self.messagelabel];
    [self.messagelabel mas_makeConstraints:^(MASConstraintMaker *make) {
            make.width.mas_equalTo(SCREEN_WIDETH-60);
            make.top.equalTo(self).offset(SCREEN_WIDETH+130);
        make.centerX.equalTo(self);
    }];
    [self addSubview:self.deteleBtn];
    [self.deteleBtn mas_makeConstraints:^(MASConstraintMaker *make) {
        make.size.mas_equalTo(50);
        make.centerY.equalTo(label);
        make.right.equalTo(self.mas_right).offset(-10);
    }];
    UILabel * number = [[UILabel alloc]initWithFrame:CGRectMake(SCREEN_WIDETH/2.f-100, 120, 200, 200)];
    number.text = SQNSLocalString(@"3");
    number.font = FONT_SEMIBOLD(60);
    number.layer.cornerRadius = 100;
    number.layer.masksToBounds = YES;
    number.layer.borderWidth = 6;
    number.layer.borderColor = SQColor674FD1.CGColor;
    number.backgroundColor = SQColorF5F5F5;
    number.textColor = SQColor333333;
    number.textAlignment = NSTextAlignmentCenter;
    [self addSubview:number];
    WS(weakSelf);
    [LSTTimer addTimerForTime:3 identifier:@"" handle:^(NSString * _Nonnull day, NSString * _Nonnull hour, NSString * _Nonnull minute, NSString * _Nonnull second, NSString * _Nonnull ms) {
        number.text = [second stringByReplacingOccurrencesOfString:@"0" withString:@""];
        if (![number.text isNotEmpty]) {
            number.text = SQNSLocalString(@"开始");
        }
    } finish:^(NSString * _Nonnull identifier) {
        number.hidden = YES;
        [weakSelf setAVCaptureSession];
       
    } pause:^(NSString * _Nonnull identifier) {
        
    }];
   
}
- (void) setAVCaptureSession{
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    AVCaptureDevice *deviceF;
    for (AVCaptureDevice *device in devices )
    {
        if ( device.position == AVCaptureDevicePositionFront )
        {
            deviceF = device;
            break;
        }
    }
    ///拿到采集设备，这里用的是前置摄像头
    AVCaptureDeviceInput*input = [[AVCaptureDeviceInput alloc] initWithDevice:deviceF error:nil];
    AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
    ///设置代理及视频流输出队列，这里采用的是自定义一个子队列，窜行队列
    [output setSampleBufferDelegate:self queue:_sample];
    ///创建AVCaptureSession
    self.session = [[AVCaptureSession alloc] init];
    [self.session beginConfiguration];
    ///添加输入流
    if ([self.session canAddInput:input]) {
        [self.session addInput:input];
    }
    ///设置分辨率
    if ([self.session canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
        [self.session setSessionPreset:AVCaptureSessionPreset1280x720];
    }
    ///添加视频输出流
    if ([self.session canAddOutput:output]) {
        [self.session addOutput:output];
    }
    ///设置输出流参数
    NSString     *key           = (NSString *)kCVPixelBufferPixelFormatTypeKey;
    NSNumber     *value         = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
    NSDictionary *videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
    [output setVideoSettings:videoSettings];
    ///初始化添加AVCaptureMetadataOutput
    AVCaptureMetadataOutput* metaOutput = [AVCaptureMetadataOutput new];
    [self.session addOutput:metaOutput];
    ///设置识别人脸AVMetadataObjectTypeFace（如果是二维码，那就是AVMetadataObjectTypeQRCode）
    [metaOutput setMetadataObjectTypes:@[AVMetadataObjectTypeFace]];
    ///设置人脸识别输出流代理及队列，窜行子队列，检测到人脸，代理输出数据流
    [metaOutput setMetadataObjectsDelegate:self queue:dispatch_queue_create("face", NULL)];
    metaOutput.rectOfInterest = self.bounds;
    ///提交session设置
    [self.session commitConfiguration];
    AVCaptureSession* session = (AVCaptureSession *)self.session;
    //前置摄像头一定要设置一下 要不然画面是镜像
    for (AVCaptureVideoDataOutput* output in session.outputs) {
        for (AVCaptureConnection * av in output.connections) {
            //判断是否是前置摄像头状态
            if (av.supportsVideoMirroring) {
                //镜像设置
                av.videoOrientation = AVCaptureVideoOrientationPortrait;
                av.videoMirrored = YES;
            }
        }
    }
    ///设置输出流展示AVCaptureVideoPreviewLayer
    AVCaptureVideoPreviewLayer* previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
    previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    previewLayer.frame = CGRectMake(SCREEN_WIDETH/2.f-100, 120, 200, 200);
    previewLayer.cornerRadius = 100;
    previewLayer.masksToBounds = YES;
    previewLayer.borderWidth = 6;
    previewLayer.borderColor = SQColor674FD1.CGColor;
    [self.layer insertSublayer:previewLayer atIndex:0];
    self.previewLayer = previewLayer;
    ///开始任务
    [self.session startRunning];
    self.messagelabel.text = SQNSLocalString(@"请将脸部对准摄像头");
}
- (void)dealloc{
    [self.session stopRunning];
    DTLog(@"是否释放----111");
}

#pragma mark - AVCaptureSession Delegate -
///摄像头输出流
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    NSMutableArray * tempArr = [[NSMutableArray alloc]init];
    for (AVMetadataFaceObject *faceobject in self.metadatas) {
        AVMetadataFaceObject *face = (AVMetadataFaceObject*)[self.previewLayer transformedMetadataObjectForMetadataObject:faceobject];
        [tempArr addObject:face];
    }
    if (tempArr.count > 0) {
        dispatch_async(dispatch_get_main_queue(), ^{
            if (self.imagepArr.count%5 == 0) {
                self.backgroundColor = UIColor.SQ_randomColor;
            }
        });
        UIImage *image = [self imageFromPixelBuffer:sampleBuffer];
        [self.imagepArr addObject:image];
        if (self.imagepArr.count > 18) {
            dispatch_async(dispatch_get_main_queue(), ^{
                [self.session stopRunning];
                self.messagelabel.text = SQNSLocalString(@"人脸数据处理中...");
                self.backgroundColor = UIColor.whiteColor;
                [self SQ_CompressionSession];
            });
        }
    }else{
        dispatch_async(dispatch_get_main_queue(), ^{
        self.messagelabel.text = SQNSLocalString(@"未检测到人脸");
       });
    }
}
///人脸识别代理
-(void)captureOutput:(AVCaptureOutput *)output didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
    self.metadatas = metadataObjects;
}

-(AVCaptureVideoOrientation)avOrientationForDeviceOrientation:(UIDeviceOrientation)deviceOrientation
{
    AVCaptureVideoOrientation result = (AVCaptureVideoOrientation)deviceOrientation;
    if ( deviceOrientation == UIDeviceOrientationLandscapeLeft )
        result = AVCaptureVideoOrientationLandscapeRight;
    else if ( deviceOrientation == UIDeviceOrientationLandscapeRight )
        result = AVCaptureVideoOrientationLandscapeLeft;
    return result;
}

//合成视频
-(void)SQ_CompressionSession{
    //设置mov路径
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask,YES);
    NSString *moviePath = [[paths objectAtIndex:0]stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mp4",[UserModel share].userid]];
    //定义视频的大小320 480 倍数
    CGSize size = CGSizeMake(720,1280);
    NSError *error = nil;
    //    转成UTF-8编码
    unlink([moviePath UTF8String]);
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc]initWithURL:[NSURL fileURLWithPath:moviePath]fileType:AVFileTypeQuickTimeMovie error:&error];
    NSParameterAssert(videoWriter);
    if(error) {
        [YJProgressHUD showMsgWithoutView:[error localizedDescription]];
        return;
    }
    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecTypeH264,AVVideoCodecKey,
                                     [NSNumber numberWithInt:size.width],AVVideoWidthKey,
                                      [NSNumber numberWithInt:size.height],AVVideoHeightKey,nil];
    AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB],kCVPixelBufferPixelFormatTypeKey,nil];
    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
    NSParameterAssert(writerInput);
    NSParameterAssert([videoWriter canAddInput:writerInput]);
    [videoWriter addInput:writerInput];
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];
    dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue",NULL);
    int __block frame = 0;
    WS(weakSelf);
    [writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
        while([writerInput isReadyForMoreMediaData]) {
            if(++frame >= [self.imagepArr count]) {
                [writerInput markAsFinished];
                [videoWriter finishWritingWithCompletionHandler:^{
                    dispatch_async(dispatch_get_main_queue(), ^{
                        [weakSelf SQ_UploadeVideo:moviePath];
                    });
                }];
                break;
            }
            CVPixelBufferRef buffer = NULL;
            int idx = frame ;
            buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[[self.imagepArr objectAtIndex:idx]CGImage]size:size];
            if(buffer){
                //设置每秒钟播放图片的个数
                [adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame,30)];
                CFRelease(buffer);
            }
        }
    }];
}
-(void)SQ_UploadeVideo:(NSString*)urlString{
    [YJProgressHUD SQ_showMsgWithoutView:@""];
    WS(weakSelf);
    [HttpTool SQ_Video:urlString successBlock:^(BaseModel * _Nonnull httpModel) {
        NSString * videoUrl = [NSString stringWithFormat:@"%@",httpModel.data];
        if (self.TicketBlock) {
            weakSelf.TicketBlock(videoUrl);
        }
    } andFailBloc:^(BaseModel * _Nonnull httpModel, NSError * _Nullable error) {
        [YJProgressHUD showMsgWithoutView:httpModel.message];
        self.messagelabel.text = SQNSLocalString(@"人脸数据处理失败");
    }];
    
}
-(CVPixelBufferRef)SQ_pixelBufferFromSampleBuffer:(CMSampleBufferRef)sampleBuffer {
    // 获取CMSampleBuffer中的CoreVideo图像缓冲区
    CVPixelBufferRef pixelBuffer = NULL;
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    if (imageBuffer != NULL) {
        // 保持对CVImageBuffer的引用计数增加
        CVBufferRetain(imageBuffer);
        pixelBuffer = (CVPixelBufferRef)imageBuffer;
    }
    return pixelBuffer;
}

//对图片尺寸进行压缩--
-(UIImage*)imageWithImage:(UIImage*)image scaledToSize:(CGSize)newSize
{
    //    新创建的位图上下文 newSize为其大小
    UIGraphicsBeginImageContext(newSize);
    //    对图片进行尺寸的改变
    [image drawInRect:CGRectMake(0,0,newSize.width,newSize.height)];
    
    //    从当前上下文中获取一个UIImage对象  即获取新的图片对象
    UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
    
    UIGraphicsEndImageContext();
    
    return newImage;
    
}
- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size {
    
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                           
                           [NSNumber numberWithBool:YES],kCVPixelBufferCGImageCompatibilityKey,
                           
                           [NSNumber numberWithBool:YES],kCVPixelBufferCGBitmapContextCompatibilityKey,nil];
    
    CVPixelBufferRef pxbuffer = NULL;
    
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,size.width,size.height,kCVPixelFormatType_32ARGB,(__bridge CFDictionaryRef) options,&pxbuffer);
    
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
    
    CVPixelBufferLockBaseAddress(pxbuffer,0);
    
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    
    NSParameterAssert(pxdata !=NULL);
    
    CGColorSpaceRef rgbColorSpace=CGColorSpaceCreateDeviceRGB();
    
    //    当你调用这个函数的时候，Quartz创建一个位图绘制环境，也就是位图上下文。当你向上下文中绘制信息时，Quartz把你要绘制的信息作为位图数据绘制到指定的内存块。一个新的位图上下文的像素格式由三个参数决定：每个组件的位数，颜色空间，alpha选项
    
    CGContextRef context = CGBitmapContextCreate(pxdata,size.width,size.height,8,4*size.width,rgbColorSpace,kCGImageAlphaPremultipliedFirst);
    
    NSParameterAssert(context);
    
    //使用CGContextDrawImage绘制图片  这里设置不正确的话 会导致视频颠倒
    
    //    当通过CGContextDrawImage绘制图片到一个context中时，如果传入的是UIImage的CGImageRef，因为UIKit和CG坐标系y轴相反，所以图片绘制将会上下颠倒
    
    CGContextDrawImage(context,CGRectMake(0,0,CGImageGetWidth(image),CGImageGetHeight(image)), image);
    
    // 释放色彩空间
    
    CGColorSpaceRelease(rgbColorSpace);
    
    // 释放context
    
    CGContextRelease(context);
    
    // 解锁pixel buffer
    
    CVPixelBufferUnlockBaseAddress(pxbuffer,0);
    
    return pxbuffer;
    
}

- (UIImage*)imageFromPixelBuffer:(CMSampleBufferRef)p {
    CVImageBufferRef buffer;
    buffer = CMSampleBufferGetImageBuffer(p);
    
    CVPixelBufferLockBaseAddress(buffer, 0);
    uint8_t *base;
    size_t width, height, bytesPerRow;
    base = (uint8_t *)CVPixelBufferGetBaseAddress(buffer);
    width = CVPixelBufferGetWidth(buffer);
    height = CVPixelBufferGetHeight(buffer);
    bytesPerRow = CVPixelBufferGetBytesPerRow(buffer);
    
    CGColorSpaceRef colorSpace;
    CGContextRef cgContext;
    colorSpace = CGColorSpaceCreateDeviceRGB();
    cgContext = CGBitmapContextCreate(base, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGColorSpaceRelease(colorSpace);
    
    CGImageRef cgImage;
    UIImage *image;
    cgImage = CGBitmapContextCreateImage(cgContext);
    image = [UIImage imageWithCGImage:cgImage];
    CGImageRelease(cgImage);
    CGContextRelease(cgContext);
    CVPixelBufferUnlockBaseAddress(buffer, 0);
    return image;
}
 
- (NSMutableArray *)imagepArr{
    if (!_imagepArr) {
        _imagepArr = [[NSMutableArray alloc]init];
    }
    return _imagepArr;
}
- (UILabel *)messagelabel{
    if (!_messagelabel) {
        _messagelabel = [[UILabel alloc]init];
        _messagelabel.textColor = SQColor(@"#212121");
        _messagelabel.font = FONT_REGULAR(14);
        _messagelabel.numberOfLines = 0;
        _messagelabel.textAlignment = NSTextAlignmentCenter;
    }
    return _messagelabel;
}
- (SQCustomButton *)deteleBtn{
    if (!_deteleBtn) {
        _deteleBtn = [[SQCustomButton alloc]initType:SQCustomButtonTopImageType imageSize:CGSizeMake(30, 30) midmargin:0];
        _deteleBtn.backgroundColor = SQColorF5F5F5;
        _deteleBtn.layer.cornerRadius = 10;
        _deteleBtn.image = SQGetImage(@"取消");
    }
    return _deteleBtn;
}

@end
