//
//  OcrPCaptureManager.m
//  ocr_plugin
//
//  Created by FXY on 2022/1/20.
//

#import "OcrPCaptureManager.h"

#import "UIImage+OcrPExtend.h"
#import "OcrPRectManager.h"

#import "OcrPExcards.h"

#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>

#import <SDWebImage/SDWebImage.h>

#import "NSString+OcrPExtend.h"

#define OcrPisPad ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad)

// iPhone5/5c/5s/SE 4英寸 屏幕宽高：320*568点 屏幕模式：2x 分辨率：1136*640像素
#define iPhone5or5cor5sorSE ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad || [UIScreen mainScreen].bounds.size.height == 568.0)

// iPhone6/6s/7 4.7英寸 屏幕宽高：375*667点 屏幕模式：2x 分辨率：1334*750像素
#define iPhone6or6sor7 ([UIScreen mainScreen].bounds.size.height == 667.0)

@interface OcrPCaptureManager ()<AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureMetadataOutputObjectsDelegate, UIImagePickerControllerDelegate,UINavigationControllerDelegate,AVCapturePhotoCaptureDelegate>

// 摄像头设备
@property (nonatomic,strong) AVCaptureDevice *captureDevice;

@property (nonatomic, strong) AVCaptureDeviceInput *captureDeviceInput;

// AVCaptureSession对象来执行输入设备和输出设备之间的数据传递
@property (nonatomic,strong) AVCaptureSession *captureSession;

// 输出格式
@property (nonatomic,strong) NSNumber *outPutSetting;

// 出流对象
@property (nonatomic,strong) AVCaptureVideoDataOutput *captureVideoOutput;

// 元数据（用于人脸识别）
@property (nonatomic,strong) AVCaptureMetadataOutput *captureMetadataOutput;

// 预览图层
@property (nonatomic,strong) AVCaptureVideoPreviewLayer *previewLayer;

// 队列
@property (nonatomic,strong) dispatch_queue_t captureSessionQueue;

// 相片选择控制器
@property (nonatomic, strong) UIImagePickerController *imagePickerController;

/// 拍照输出流
@property (nonatomic, strong) AVCapturePhotoOutput *capturePhotoOutput API_AVAILABLE(ios(10.0));

@property (nonatomic, strong) AVCaptureStillImageOutput *stillImageOutput;

///
@property (nonatomic,assign) BOOL isSuccess;


/// 是否点击了，防止重复点击
@property (nonatomic,assign) BOOL taped;

/// 取景框位置
@property (nonatomic,assign) CGRect areaFrame;

@property(strong, nonatomic) dispatch_queue_t scanQueue;

@end

@implementation OcrPCaptureManager

- (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue{
    self = [super init];
    NSAssert(self, @"super init cannot be nil");
    
    _captureSessionQueue = captureSessionQueue;
    _scanQueue = dispatch_queue_create("io.flutter.ocr.scanQueue", NULL);
    
    
    return self;
}

/// 相册
-(UIImagePickerController *)imagePickerController{
    if (_imagePickerController == nil){
        _imagePickerController = [[UIImagePickerController alloc]init];
        _imagePickerController.allowsEditing = NO ;
        _imagePickerController.modalPresentationStyle = UIModalPresentationFullScreen;
        _imagePickerController.delegate = self;
    }
    return _imagePickerController;
}


#pragma mark device
-(AVCaptureDevice *)captureDevice {
    if (_captureDevice == nil) {
        _captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
        
        NSError *error = nil;
        if ([_captureDevice lockForConfiguration:&error]) {
//            if ([_device isSmoothAutoFocusSupported]) {// 平滑对焦
//                _device.smoothAutoFocusEnabled = YES;
//            }
            
            // focusMode = AVCaptureFocusModeContinuousAutoFocus是， smoothAutoFocusEnabled，禁止设置为yes，会导致iPad识别到人像，相机停止。
            
            if ([_captureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {// 自动持续对焦
                _captureDevice.focusMode = AVCaptureFocusModeContinuousAutoFocus;
            }
            
            if ([_captureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure ]) {// 自动持续曝光
                _captureDevice.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
            }
            
            if ([_captureDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]) {// 自动持续白平衡
                _captureDevice.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
            }
        
            
            [_captureDevice unlockForConfiguration];
        }
    }
    
    return _captureDevice;
}

- (AVCaptureDeviceInput *)captureDeviceInput{
    if (!_captureDeviceInput) {
        NSError *error = nil;
        _captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:self.captureDevice error:&error];
        
        if (error) {
            [self failureWithTitle:@"没有摄像设备" message:error.localizedDescription];
        }
    }
    return  _captureDeviceInput;
}

#pragma mark outPutSetting
-(NSNumber *)outPutSetting {
    if (_outPutSetting == nil) {
        _outPutSetting = @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
    }
    
    return _outPutSetting;
}

#pragma mark metadataOutput
-(AVCaptureMetadataOutput *)captureMetadataOutput {
    if (_captureMetadataOutput == nil) {
        _captureMetadataOutput = [[AVCaptureMetadataOutput alloc]init];
        
        [_captureMetadataOutput setMetadataObjectsDelegate:self queue:self.captureSessionQueue];
    }
    
    return _captureMetadataOutput;
}

#pragma mark videoDataOutput
-(AVCaptureVideoDataOutput *)captureVideoOutput {
    if (_captureVideoOutput == nil) {
        _captureVideoOutput = [[AVCaptureVideoDataOutput alloc] init];
        
        _captureVideoOutput.alwaysDiscardsLateVideoFrames = YES;
        _captureVideoOutput.videoSettings = @{(id)kCVPixelBufferPixelFormatTypeKey:self.outPutSetting};
        
        [_captureVideoOutput setSampleBufferDelegate:self queue:self.scanQueue];
    }
    
    return _captureVideoOutput;
}

#pragma mark session
-(AVCaptureSession *)captureSession {
    if (_captureSession == nil) {
        _captureSession = [[AVCaptureSession alloc] init];
        
        _captureSession.sessionPreset = AVCaptureSessionPresetHigh;
        
        if (self.captureDeviceInput!=nil) {
            if ([_captureSession canAddInput:self.captureDeviceInput]) {
                [_captureSession addInput:self.captureDeviceInput];
            }
            
            if (self.scanEnable) {
                if ([_captureSession canAddOutput:self.captureVideoOutput]) {
                    [_captureSession addOutput:self.captureVideoOutput];
                }
                
                if ([_captureSession canAddOutput:self.captureMetadataOutput]) {
                    [_captureSession addOutput:self.captureMetadataOutput];
                    // 输出格式要放在addOutPut之后，否则奔溃
                    self.captureMetadataOutput.metadataObjectTypes = @[AVMetadataObjectTypeFace];
                }
            }
            
            if(@available(iOS 10.0, *)) {
                if ([_captureSession canAddOutput:self.capturePhotoOutput]) {
                    [_captureSession addOutput:self.capturePhotoOutput];
                }
            } else {
                if ([_captureSession canAddOutput:self.stillImageOutput]) {
                    [_captureSession addOutput:self.stillImageOutput];
                }
            }
            
        }
        
    }
    
    return _captureSession;
}

- (AVCapturePhotoOutput *)capturePhotoOutput{
    if (!_capturePhotoOutput) {
        _capturePhotoOutput = [[AVCapturePhotoOutput alloc] init];
        
    }
    return _capturePhotoOutput;
}

- (AVCaptureStillImageOutput *)stillImageOutput{
    if (!_stillImageOutput) {
        _stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
    }
    return _stillImageOutput;
}

#pragma mark previewLayer
-(AVCaptureVideoPreviewLayer *)previewLayer {
    if (_previewLayer == nil) {
        _previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
        _previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    }
    
    return _previewLayer;
}

- (void)dealloc{
    NSLog(@"OcrPCaptureManager dealloc");
    
    [self stopSession];
}


- (void)configAreaFrame:(CGRect)areaFrame faceFrame:(CGRect)faceFrame{
    self.areaFrame = areaFrame;
    
    // 暂时不用
    faceFrame.size.width = 0;
    
    if (faceFrame.size.width!=0&&faceFrame.size.height!=0) {
        self.detectionFrame = faceFrame;
    } else {
        if (OcrPisPad) {
            if (self.type==0) {
                CGFloat facePathHeight = iPhone5or5cor5sorSE? 125: (iPhone6or6sor7? 150: 180);
                CGFloat facePathWidth = facePathHeight * 0.812;
                self.detectionFrame = (CGRect){CGRectGetMaxX(areaFrame) - facePathWidth - 35,CGRectGetMaxY(areaFrame) - (areaFrame.size.height+facePathHeight)*0.5 ,facePathWidth,facePathHeight};
            } else if (self.type==1) {
                CGFloat facePathWidth = iPhone5or5cor5sorSE? 60: (iPhone6or6sor7? 80: 120);
                CGFloat facePathHeight = facePathWidth;
                self.detectionFrame = (CGRect){CGRectGetMinX(areaFrame) + 25,CGRectGetMinY(areaFrame) + 35,facePathWidth,facePathHeight};
            }
            
        } else {
            if (self.type==0) {
                CGFloat facePathWidth = iPhone5or5cor5sorSE? 125: (iPhone6or6sor7? 150: 180);
                CGFloat facePathHeight = facePathWidth * 0.812;
                self.detectionFrame = (CGRect){CGRectGetMaxX(areaFrame) - facePathWidth - 35,CGRectGetMaxY(areaFrame) - facePathHeight - 25,facePathWidth,facePathHeight};
            } else if (self.type==1) {
                CGFloat facePathWidth = iPhone5or5cor5sorSE? 60: (iPhone6or6sor7? 80: 120);
                CGFloat facePathHeight = facePathWidth;
                self.detectionFrame = (CGRect){CGRectGetMaxX(areaFrame) - facePathWidth - 35,CGRectGetMinY(areaFrame) + 25,facePathWidth,facePathHeight};
            }
        }
        
    }
    
    if (OcrPisPad) {
        if (self.fullArea==NO) {
            // iPad 强制横屏导致 识别区域 设置错误
            // self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspect
            // 布局自适应可以，正确识别，但上下留白，拍照取景框截图也会错误
            self.detectionFrame = areaFrame;
        }
    }
    
    
    NSLog(@"头像位置：%@",NSStringFromCGRect(self.detectionFrame));
    
    self.captureMetadataOutput.rectOfInterest = [self.previewLayer metadataOutputRectOfInterestForRect:self.detectionFrame];
}

#pragma mark - 运行session
// session开始，即输入设备和输出设备开始数据传递
- (void)runSession {
    if (![self.captureSession isRunning]) {
        [self.captureSession startRunning];
    }
}

#pragma mark - 停止session
// session停止，即输入设备和输出设备结束数据传递
-(void)stopSession {
    if ([self.captureSession isRunning]) {
        [self.captureSession stopRunning];
    }
}



#pragma mark - 拍照获得图片
- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhotoSampleBuffer:(nullable CMSampleBufferRef)photoSampleBuffer previewPhotoSampleBuffer:(nullable CMSampleBufferRef)previewPhotoSampleBuffer resolvedSettings:(nonnull AVCaptureResolvedPhotoSettings *)resolvedSettings bracketSettings:(nullable AVCaptureBracketedStillImageSettings *)bracketSettings error:(nullable NSError *)error API_AVAILABLE(ios(10.0)){
    
    if (!error) {
        NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:photoSampleBuffer];
        UIImage *image = [[UIImage alloc] initWithData:imageData];
        
        [self dealImage_captureOutput:image];
    }
    
    self.taped = NO;
}

-(void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(NSError *)error API_AVAILABLE(ios(11.0)){
    if (!error) {
        NSData *imageData = [photo fileDataRepresentation];
        UIImage *image = [UIImage imageWithData:imageData];
        
        [self dealImage_captureOutput:image];
    }
    
    self.taped = NO;
}

/// 中台生成表格，图片会颠倒，要特殊处理，注意注意
- (void)dealImage_captureOutput:(UIImage *)image{
    
    if (self.cropEnable && !CGRectEqualToRect(self.areaFrame, CGRectZero)) {
        image = [self croppedImage:image clipArea:self.areaFrame];
        if (OcrPisPad) {
            // iPad图片正常，不需要处理
        } else {
            image = [self fixOrientation:image];
        }
        
    } else {
        
        if (self.takePhotoRotated) {
            if (OcrPisPad) {
                image = [self pad_fixOrientation:image];
            } else {
                image = [UIImage imageWithCGImage:image.CGImage scale:1.0f orientation:UIImageOrientationUp];
            }
        } else {
            image = [self fixOrientation:image];
        }
        
    }
    
    [self successWithInfo:OcrPModel.new image:image];
}

#pragma mark - 相册选择图片
/// 当确认选择了相片之后调用
- (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info {
    // 退出选择照片视图 获得图片
    UIImage *image = info[UIImagePickerControllerOriginalImage];
    __weak __typeof__(self) weakSelf = self;
    [self.imagePickerController dismissViewControllerAnimated:YES completion:^{
        [weakSelf successWithInfo:OcrPModel.new image:image];
    }];
}

-(void)imagePickerControllerDidCancel:(UIImagePickerController *)picker{
    [self.imagePickerController dismissViewControllerAnimated: YES  completion: nil];
}



#pragma mark - AVCaptureMetadataOutputObjectsDelegate
#pragma mark 从输出的元数据中捕捉人脸
// 检测人脸是为了获得“人脸区域”，做“人脸区域”与“身份证人像框”的区域对比，当前者在后者范围内的时候，才能截取到完整的身份证图像
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
    if (metadataObjects.count) {
        AVMetadataMachineReadableCodeObject *metadataObject = metadataObjects.firstObject;
        
        AVMetadataObject *transformedMetadataObject = [self.previewLayer transformedMetadataObjectForMetadataObject:metadataObject];
        CGRect faceRegion = transformedMetadataObject.bounds;
        
        if (metadataObject.type == AVMetadataObjectTypeFace) {
            NSLog(@"是否包含头像：%d, facePathRect: %@, faceRegion: %@",CGRectContainsRect(self.detectionFrame, faceRegion),NSStringFromCGRect(self.detectionFrame),NSStringFromCGRect(faceRegion));
            
            if (CGRectContainsRect(self.detectionFrame, faceRegion)) {// 只有当人脸区域的确在小框内时，才再去做捕获此时的这一帧图像
                // 为videoDataOutput设置代理，程序就会自动调用下面的代理方法，捕获每一帧图像
                if (!self.captureVideoOutput.sampleBufferDelegate) {
                    [self.captureVideoOutput setSampleBufferDelegate:self queue:self.scanQueue];
                }
            }
        }
    }
    
    
}

#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
#pragma mark 从输出的数据流捕捉单一的图像帧
// AVCaptureVideoDataOutput获取实时图像，这个代理方法的回调频率很快，几乎与手机屏幕的刷新频率一样快
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    if ([self.outPutSetting isEqualToNumber:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]] || [self.outPutSetting isEqualToNumber:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange]]) {
        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        
        if ([captureOutput isEqual:self.captureVideoOutput]) {
            // 身份证信息识别
            [self IDCardRecognit:imageBuffer];
            
            if (self.type==0) {
                if (self.autoStopScan) {
                    // 身份证信息识别完毕后，就将videoDataOutput的代理去掉，防止频繁调用AVCaptureVideoDataOutputSampleBufferDelegate方法而引起的“混乱”
                    if (self.captureVideoOutput.sampleBufferDelegate) {
                        [self.captureVideoOutput setSampleBufferDelegate:nil queue:self.scanQueue];
                    }
                }
            }
        }
    } else {
        NSLog(@"输出格式不支持");
    }
}

#pragma mark - 身份证信息识别
- (void)IDCardRecognit:(CVImageBufferRef)imageBuffer {
    if (self.isSuccess) {
        return;
    }
    
    CVBufferRetain(imageBuffer);
    
    // Lock the image buffer
    if (CVPixelBufferLockBaseAddress(imageBuffer, 0) == kCVReturnSuccess) {
        size_t width= CVPixelBufferGetWidth(imageBuffer);// 1920
        size_t height = CVPixelBufferGetHeight(imageBuffer);// 1080
        
        CVPlanarPixelBufferInfo_YCbCrBiPlanar *planar = CVPixelBufferGetBaseAddress(imageBuffer);
        size_t offset = NSSwapBigIntToHost(planar->componentInfoY.offset);
        size_t rowBytes = NSSwapBigIntToHost(planar->componentInfoY.rowBytes);
        unsigned char* baseAddress = (unsigned char *)CVPixelBufferGetBaseAddress(imageBuffer);
        unsigned char* pixelAddress = baseAddress + offset;
        
        static unsigned char *buffer = NULL;
        if (buffer == NULL) {
            buffer = (unsigned char *)malloc(sizeof(unsigned char) * width * height);
        }
        
        memcpy(buffer, pixelAddress, sizeof(unsigned char) * width * height);
        
        unsigned char pResult[1024];
        int ret = EXCARDS_RecoIDCardData(buffer, (int)width, (int)height, (int)rowBytes, (int)8, (char*)pResult, sizeof(pResult));
        if (ret <= 0) {
            NSLog(@"ret=[%d]", ret);
        } else {
            NSLog(@"ret=[%d]", ret);
            
            char ctype;
            char content[256];
            int xlen;
            int i = 0;
            
            OcrPModel *iDInfo = OcrPModel.new;
            
            ctype = pResult[i++];
            
            
            iDInfo.type = ctype;
            
            NSLog(@"ctype====%c",iDInfo.type);
            
            while(i < ret){
                ctype = pResult[i++];
                for(xlen = 0; i < ret; ++i){
                    if(pResult[i] == ' ') { ++i; break; }
                    content[xlen++] = pResult[i];
                }
                
                content[xlen] = 0;
                
                if(xlen) {
                    NSStringEncoding gbkEncoding = CFStringConvertEncodingToNSStringEncoding(kCFStringEncodingGB_18030_2000);
                    if(ctype == 0x21) {
                        iDInfo.num = [NSString stringWithCString:(char *)content encoding:gbkEncoding];
                    } else if(ctype == 0x22) {
                        iDInfo.name = [NSString stringWithCString:(char *)content encoding:gbkEncoding];
                    } else if(ctype == 0x23) {
                        iDInfo.gender = [NSString stringWithCString:(char *)content encoding:gbkEncoding];
                    } else if(ctype == 0x24) {
                        iDInfo.nation = [NSString stringWithCString:(char *)content encoding:gbkEncoding];
                    } else if(ctype == 0x25) {
                        iDInfo.address = [NSString stringWithCString:(char *)content encoding:gbkEncoding];
                    } else if(ctype == 0x26) {
                        iDInfo.issue = [NSString stringWithCString:(char *)content encoding:gbkEncoding];
                    } else if(ctype == 0x27) {
                        iDInfo.valid = [NSString stringWithCString:(char *)content encoding:gbkEncoding];
                    }
                }
            }
            
            if (iDInfo) {// 读取到身份证信息，实例化出IDInfo对象后，截取身份证的有效区域，获取到图像
                // 播放一下“拍照”的声音，模拟拍照
//                AudioServicesPlaySystemSound(1108);
                
                NSLog(@"\n正面\n姓名：%@\n性别：%@\n民族：%@\n住址：%@\n公民身份证号码：%@\n\n反面\n签发机关：%@\n有效期限：%@",iDInfo.name,iDInfo.gender,iDInfo.nation,iDInfo.address,iDInfo.num,iDInfo.issue,iDInfo.valid);
                
                if (self.type==0) {
                    if (![NSString isBlankString:iDInfo.num] && iDInfo.num.length>=17) {
                        self.isSuccess = YES;
                    }
                } else {
                    if (![NSString isBlankString:iDInfo.valid] && iDInfo.valid.length>=16) {
                        self.isSuccess = YES;
                    }
                }
                
                if (self.isSuccess) {
                    
                    if (self.autoStopScan) {
                        if (self.captureVideoOutput.sampleBufferDelegate) {
                            [self.captureVideoOutput setSampleBufferDelegate:nil queue:self.scanQueue];
                        }
                        
                        if ([self.captureSession isRunning]) {
                            [self.captureSession stopRunning];
                        }
                    }
                    
                    
                    UIImage *image = [UIImage getImageStream:imageBuffer];
                    
                    if (self.fullArea==NO) {
                        CGRect effectRect = [OcrPRectManager getEffectImageRect:CGSizeMake(width, height)];
                        CGRect rect = [OcrPRectManager getGuideFrame:effectRect];
                        image = [UIImage getSubImage:rect inImage:image];
                    }
                    
                    // 推出IDInfoVC（展示身份证信息的控制器）
                    __weak __typeof__(self) weakSelf = self;
                    dispatch_async(dispatch_get_main_queue(), ^{
                        // 需要在主线程执行
                        [weakSelf successWithInfo:iDInfo image:image];
                        
                        if (weakSelf.autoStopScan==NO) {
                            weakSelf.isSuccess = NO;
                        }
                    });
                    
                    
                }
                
            }
        }
        
        CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
    }
    
    CVBufferRelease(imageBuffer);
    
}


#pragma mark - 错误信息
-(void)failureWithTitle:(NSString *)title message:(NSString *)message{
    if (self.delegate!=nil&&[self.delegate respondsToSelector:@selector(failureWithTitle:message:)]) {
        [self.delegate failureWithTitle:title message:message];
    }
}
    
#pragma mark - 成功信息
- (void)successWithInfo:(OcrPModel*)info image:(UIImage *)image{
    if (self.delegate!=nil&&[self.delegate respondsToSelector:@selector(successWithInfo:image:)]) {
        [self.delegate successWithInfo:info image:image];
    }
}

#pragma mark - 初始化
- (void)congfigContentView:(UIView *)contentView{
    // 初始化rect
    const char *thePath = [[[NSBundle mainBundle] resourcePath] UTF8String];
    int ret = EXCARDS_Init(thePath);
    if (ret != 0) {
        NSLog(@"初始化失败：ret=%d", ret);
    }
    
    self.previewLayer.frame = contentView.frame;
//    self.previewLayer.backgroundColor = UIColor.yellowColor.CGColor;
    
    if (OcrPisPad) {
        // iPad强制横屏
        self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
    }
    
    // 添加预览图层
    [contentView.layer addSublayer:self.previewLayer];

}

#pragma mark - 点击相册
- (void)takePhotoLibrary{
    
    if (self.taped) {
        return;
    }
    self.taped = YES;
    
    NSLog(@"点击了相册");
    
    __weak __typeof__(self) weakSelf = self;
    dispatch_async(dispatch_get_main_queue(), ^{
        [weakSelf t_takePhotoLibrary];
    });
    
}

- (void)t_takePhotoLibrary{
    //回到主线程，执行UI刷新操作
    if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypePhotoLibrary]) {
        // 设置相片选择控制器的来源
        self.imagePickerController.sourceType = UIImagePickerControllerSourceTypePhotoLibrary;
    }
    
    __weak __typeof__(self) weakSelf = self;
    [[UIApplication sharedApplication].keyWindow.rootViewController presentViewController:self.imagePickerController animated:YES completion:^{
        weakSelf.taped = NO;
    }];
}


#pragma mark - 点击拍照
- (void)takePhotoCamera {
    if (self.taped) {
        return;
    }
    
    self.taped = YES;
    
    NSLog(@"点击了拍照");
    
    __weak __typeof__(self) weakSelf = self;
    dispatch_async(dispatch_get_main_queue(), ^{
        [weakSelf t_takePhotoCamera];
    });
}

- (void)t_takePhotoCamera{
    if (@available(iOS 10.0, *)) {
        AVCaptureConnection * videoConnection = [self.capturePhotoOutput connectionWithMediaType:AVMediaTypeVideo];
        if (videoConnection ==  nil) {
            return;
        }
        
        AVCapturePhotoSettings *set = [AVCapturePhotoSettings photoSettings];
        
        [self.capturePhotoOutput capturePhotoWithSettings:set delegate:self];
    } else {
        
        AVCaptureConnection *videoConnection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
        if (videoConnection ==  nil) {
            return;
        }
        
        __weak __typeof__(self) weakSelf = self;
        
        [self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef  _Nullable imageDataSampleBuffer, NSError * _Nullable error) {
            if (imageDataSampleBuffer != nil) {
                NSData * data = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
                UIImage *image = [[UIImage alloc]initWithData:data];
                
                [weakSelf dealImage_stillImageOutput:image];
            }
            weakSelf.taped = NO;
        }];
    }
}

/// 中台生成表格，图片会颠倒，要特殊处理，注意注意
- (void)dealImage_stillImageOutput:(UIImage *)image{
    if (self.cropEnable && !CGRectEqualToRect(self.areaFrame, CGRectZero)) {
        image = [self croppedImage:image clipArea:self.areaFrame];
        if (OcrPisPad) {
            // iPad图片正常，不需要处理
        } else {
            image = [self fixOrientation:image];
        }
    } else {
        if (self.takePhotoRotated) {
            if (OcrPisPad) {
                image = [self pad_fixOrientation:image];
            } else {
                image = [UIImage imageWithCGImage:image.CGImage scale:1.0f orientation:UIImageOrientationLeft];
            }
        } else {
            image = [self fixOrientation:image];
        }
    }
    
    [self successWithInfo:OcrPModel.new image:image];
}

#pragma mark - 切换摄像头
-(void)switchCamera{
    __weak __typeof__(self) weakSelf = self;
    dispatch_async(dispatch_get_main_queue(), ^{
        [weakSelf t_switchCamera];
    });
}

-(void)t_switchCamera{
    //获取摄像头的数量（该方法会返回当前能够输入视频的全部设备，包括前后摄像头和外接设备）
    NSInteger cameraCount = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count];
    //摄像头的数量小于等于1的时候直接返回
    if (cameraCount <= 1) {
        return;
    }
    AVCaptureDevice *newCamera = nil;
    AVCaptureDeviceInput *newInput = nil;
    //获取当前相机的方向（前/后）
    AVCaptureDevicePosition position = [[self.captureDeviceInput device] position];
    
    if (position == AVCaptureDevicePositionFront) {
        newCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];
    }else if (position == AVCaptureDevicePositionBack){
        newCamera = [self cameraWithPosition:AVCaptureDevicePositionFront];
    }
    //输入流
    newInput = [AVCaptureDeviceInput deviceInputWithDevice:newCamera error:nil];
    if (newInput != nil) {
        [self.captureSession beginConfiguration];
        //先移除原来的input
        [self.captureSession removeInput:self.captureDeviceInput];
        if ([self.captureSession canAddInput:newInput]) {
            [self.captureSession addInput:newInput];
            self.captureDeviceInput = newInput;
        }else{
            //如果不能加现在的input，就加原来的input
            [self.captureSession addInput:self.captureDeviceInput];
        }
        [self.captureSession commitConfiguration];
    }
    
}



- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position{
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    for (AVCaptureDevice *device in devices )
        if ( device.position == position )
            return device;
    return nil;
}

/// 重绘图片，解决 中台接收到的图片，绘制表格时图片倒了问题
- (UIImage *)fixOrientation:(UIImage *)aImage {

    // No-op if the orientation is already correct
    if (aImage.imageOrientation == UIImageOrientationUp)
        return aImage;

    // We need to calculate the proper transformation to make the image upright.
    // We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.
    CGAffineTransform transform = CGAffineTransformIdentity;

    switch (aImage.imageOrientation) {
        case UIImageOrientationDown:
        case UIImageOrientationDownMirrored:
            transform = CGAffineTransformTranslate(transform, aImage.size.width, aImage.size.height);
            transform = CGAffineTransformRotate(transform, M_PI);
            break;

        case UIImageOrientationLeft:
        case UIImageOrientationLeftMirrored:
            transform = CGAffineTransformTranslate(transform, aImage.size.width, 0);
            transform = CGAffineTransformRotate(transform, M_PI_2);
            break;

        case UIImageOrientationRight:
        case UIImageOrientationRightMirrored:
            transform = CGAffineTransformTranslate(transform, 0, aImage.size.height);
            transform = CGAffineTransformRotate(transform, -M_PI_2);
            break;
        default:
            break;
    }

    switch (aImage.imageOrientation) {
        case UIImageOrientationUpMirrored:
        case UIImageOrientationDownMirrored:
            transform = CGAffineTransformTranslate(transform, aImage.size.width, 0);
            transform = CGAffineTransformScale(transform, -1, 1);
            break;

        case UIImageOrientationLeftMirrored:
        case UIImageOrientationRightMirrored:
            transform = CGAffineTransformTranslate(transform, aImage.size.height, 0);
            transform = CGAffineTransformScale(transform, -1, 1);
            break;
        default:
            break;
    }

    // Now we draw the underlying CGImage into a new context, applying the transform
    // calculated above.
    CGContextRef ctx = CGBitmapContextCreate(NULL, aImage.size.width, aImage.size.height,
                                             CGImageGetBitsPerComponent(aImage.CGImage), 0,
                                             CGImageGetColorSpace(aImage.CGImage),
                                             CGImageGetBitmapInfo(aImage.CGImage));
    CGContextConcatCTM(ctx, transform);
    switch (aImage.imageOrientation) {
        case UIImageOrientationLeft:
        case UIImageOrientationLeftMirrored:
        case UIImageOrientationRight:
        case UIImageOrientationRightMirrored:
            // Grr...
            CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.height,aImage.size.width), aImage.CGImage);
            break;

        default:
            CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.width,aImage.size.height), aImage.CGImage);
            break;
    }

    // And now we just create a new UIImage from the drawing context
    CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
    UIImage *img = [UIImage imageWithCGImage:cgimg];
    CGContextRelease(ctx);
    CGImageRelease(cgimg);
    return img;
}

/// iPad，图片旋转处理
- (UIImage *)pad_fixOrientation:(UIImage *)aImage{
    AVCaptureDevicePosition position = [[self.captureDeviceInput device] position];
    
    if (position == AVCaptureDevicePositionFront) {
        // 前置摄像头
        return [self fixOrientation:[UIImage imageWithCGImage:aImage.CGImage scale:1.0f orientation:UIImageOrientationDown]];
    }
    
    return [self fixOrientation:[UIImage imageWithCGImage:aImage.CGImage scale:1.0f orientation:UIImageOrientationUp]];
}

#pragma mark - 图片裁剪
- (UIImage *)croppedImage:(UIImage *)image clipArea:(CGRect)clipArea{
    if (OcrPisPad) {
        return [self pad_croppedImage:image clipArea:clipArea];
    }
    
    
    CGFloat width = self.previewLayer.frame.size.width;
    CGFloat height = self.previewLayer.frame.size.height;
            
    CGSize size = CGSizeMake(width, height);
    //使用的SDWebImage提供的图片缩放处理方法
    UIImage *scaledImage = [image sd_resizedImageWithSize:size scaleMode:(SDImageScaleModeAspectFill)];
    //使用的SDWebImage提供的图片裁切处理方法
    UIImage *targetImage = [scaledImage sd_croppedImageWithRect:CGRectMake(clipArea.origin.x, clipArea.origin.y, clipArea.size.width, clipArea.size.height)];
    //图片翻转
    UIImage *newImage = [UIImage imageWithCGImage:targetImage.CGImage scale:1.0f orientation:UIImageOrientationLeft];
    return newImage;
}

- (UIImage *)pad_croppedImage:(UIImage *)image clipArea:(CGRect)clipArea{
    image = [self pad_fixOrientation:image];
    
    CGFloat width = self.previewLayer.frame.size.width;
    CGFloat height = self.previewLayer.frame.size.height;
            
    CGSize size = CGSizeMake(width, height);
    //使用的SDWebImage提供的图片缩放处理方法
    UIImage *scaledImage = [image sd_resizedImageWithSize:size scaleMode:(SDImageScaleModeAspectFill)];
   
    UIImage *targetImage = [scaledImage sd_croppedImageWithRect:CGRectMake(clipArea.origin.x, clipArea.origin.y, clipArea.size.width, clipArea.size.height)];
    
//    UIImageWriteToSavedPhotosAlbum(sendImage, nil, nil, nil);
    
    return targetImage;
}






@end
