//
//  TPScanViewController.m
//  OCR_Tesseract_Demo
//
//  Created by 谭鹏 on 2017/7/6.
//  Copyright © 2017年 谭鹏. All rights reserved.
//

#import "TPScanViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <Masonry.h>
#import <TesseractOCR/TesseractOCR.h>
@interface TPScanViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate>
@property UIView * viewPreView;//预览视图 ，来摄像头数据
@property UIView * snapView;//识别框
@property AVCaptureSession * captureSession;
@end

@implementation TPScanViewController
- (instancetype)init
{
    self = [super init];
    if (self) {
        NSError * error;
        
        
        
        
        AVCaptureDevice * device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];//??? video
        
        AVCaptureDeviceInput * input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
        
        
        if (error) {
            NSLog(@"%@",error);
        }
        
        AVCaptureVideoDataOutput * output = [AVCaptureVideoDataOutput new];
        
        
        _captureSession = [[AVCaptureSession alloc]init];
        _captureSession.sessionPreset = AVCaptureSessionPresetMedium;
        [_captureSession addInput:input];
        [_captureSession addOutput:output];
        

        
        
        dispatch_queue_t queue;
        queue = dispatch_queue_create("myqueue", NULL);
        
        [output setSampleBufferDelegate:self queue:queue];
        
        output.videoSettings = @{
                                 (id)kCVPixelBufferPixelFormatTypeKey:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
                                 };
//        output.minFrameDuration
//        for (AVCaptureConnection *connection in output.connections)
//        {
//#pragma clang diagnostic push
//#pragma clang diagnostic ignored "-Wdeprecated-declarations"
//            if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])
//                connection.videoMinFrameDuration = CMTimeMake(1, 1);
//            
//            if ([connection respondsToSelector:@selector(setVideoMaxFrameDuration:)])
//                connection.videoMaxFrameDuration = CMTimeMake(1, 1);
//#pragma clang diagnostic pop
//        }
        
        
        
        _snapView = [UIView new];
        _snapView.layer.borderColor = [UIColor redColor].CGColor;
        _snapView.layer.borderWidth = 1;
        
        
        
        
    }
    return self;
}
- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view.
    AVCaptureVideoPreviewLayer * layer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:_captureSession];
    [layer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
    [layer setFrame:self.view.frame];
    [self.view.layer addSublayer:layer];
    
    [self.view addSubview:_snapView];
    
    [_captureSession startRunning];
}
-(void)viewWillAppear:(BOOL)animated{
    [super viewWillAppear:animated];
    [_snapView mas_makeConstraints:^(MASConstraintMaker *make) {
        make.size.mas_equalTo(CGSizeMake(80, 500));
        make.center.mas_equalTo(self.view);
    }];
    
    
}
- (void)didReceiveMemoryWarning {
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}
#pragma mark - delegate
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
    NSLog(@"--->");
//    UIImage * image = [self imageFromSampleBuffer:sampleBuffer];
//    
//    G8RecognitionOperation *operation = [[G8RecognitionOperation alloc] initWithLanguage:@"eng"];
//    
//    // Configure inner G8Tesseract object as described before
//    operation.tesseract.charWhitelist = @"1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ";
//    operation.tesseract.image = [image g8_blackAndWhite];
//    operation.tesseract.maximumRecognitionTime = 2;
//    operation.tesseract.rect = _snapView.frame;
//    operation.tesseract.engineMode = G8OCREngineModeCubeOnly;
//    operation.tesseract.pageSegmentationMode = G8PageSegmentationModeSingleLine;
//    
//    
//    // Setup the recognitionCompleteBlock to receive the Tesseract object
//    // after text recognition. It will hold the recognized text.
//    operation.recognitionCompleteBlock = ^(G8Tesseract *recognizedTesseract) {
//        // Retrieve the recognized text upon completion
////        NSArray * strs = [[recognizedTesseract recognizedText] componentsSeparatedByString:@"\n"];
//        NSString * str = [[recognizedTesseract recognizedText] stringByReplacingOccurrencesOfString:@" " withString:@""];
//        NSLog(@"⚡️%@⚡️", str);
//    };
//    
//    // Add operation to queue
//    NSOperationQueue *queue = [[NSOperationQueue alloc] init];
//    [queue addOperation:operation];
//    
}


#pragma mark - private
//OCR 处理image
-(NSString *) image:(UIImage*)image{

    return nil;
}

// 通过抽样缓存数据创建一个UIImage对象
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
    // 为媒体数据设置一个CMSampleBuffer的Core Video图像缓存对象
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    // 锁定pixel buffer的基地址
    CVPixelBufferLockBaseAddress(imageBuffer, 0);
    
    // 得到pixel buffer的基地址
    void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
    
    // 得到pixel buffer的行字节数
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    // 得到pixel buffer的宽和高
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    
    // 创建一个依赖于设备的RGB颜色空间
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    
    // 用抽样缓存的数据创建一个位图格式的图形上下文（graphics context）对象
    CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
                                                 bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    // 根据这个位图context中的像素数据创建一个Quartz image对象
    CGImageRef quartzImage = CGBitmapContextCreateImage(context);
    // 解锁pixel buffer
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);
    
    // 释放context和颜色空间
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);
    
    // 用Quartz image创建一个UIImage对象image
    UIImage *image = [UIImage imageWithCGImage:quartzImage];
    
    // 释放Quartz image对象
    CGImageRelease(quartzImage);
    
    return (image);  
}
@end
