//
//  ScanPhotoViewController.m
//  SafeFoodManagerDemo
//
//  Created by bob on 2017/10/27.
//  Copyright © 2017年 bob. All rights reserved.
//

#import "ScanPhotoViewController.h"

@interface ScanPhotoViewController ()


@property (nonatomic, strong) UIView *scanWindow;
@property (nonatomic, strong) UIImageView *scanNetImageView;


@end

@implementation ScanPhotoViewController

-(void)viewWillAppear:(BOOL)animated
{
    [super viewWillAppear:YES];
    
    [self resumeAnimation];
    
}

- (void)viewDidLoad {
    [super viewDidLoad];
    
    self.view.clipsToBounds=YES;
    //1.遮罩
    [self setupMaskView];
    //3.提示文本
    [self setupTipTitleView];
    //4.顶部导航
    [self setupNavView];
    //5.扫描区域
    [self setupScanWindowView];
    //6.开始动画
    [self setupCamera];
    
    // Do any additional setup after loading the view.
}


- (void)setupMaskView
{
    UIView *mask = [[UIView alloc] init];
    
    mask.frame=CGRectMake(0, 0, SCREEN_WIDTH, SCREEN_HEIGHT-64);
    
    [self.view addSubview:mask];
    
    CAShapeLayer* cropLayer = [[CAShapeLayer alloc] init];
    
    // 创建一个绘制路径
    CGMutablePathRef path =CGPathCreateMutable();
    // 空心矩形的rect
    CGRect cropRect = CGRectMake(30, (SCREEN_HEIGHT-120-SCREEN_WIDTH+60)/2.0, SCREEN_WIDTH-60, SCREEN_WIDTH-60);
    // 绘制rect
    CGPathAddRect(path, nil, mask.bounds);
    CGPathAddRect(path, nil, cropRect);
    // 设置填充规则(重点)
    [cropLayer setFillRule:kCAFillRuleEvenOdd];
    // 关联绘制的path
    [cropLayer setPath:path];
    // 设置填充的颜色
    [cropLayer setFillColor:[[UIColor colorWithRed:0 green:0 blue:0 alpha:0.7] CGColor]];
    
    [mask.layer addSublayer:cropLayer];

}

-(void)setupTipTitleView{
    //2.操作提示
    UILabel * tipLabel = [[UILabel alloc] initWithFrame:CGRectMake(0, SCREEN_HEIGHT/2.0-70+SCREEN_WIDTH/2.0, SCREEN_WIDTH, 21)];
    tipLabel.text = @"将取景框对准二维码，即可自动扫描";
    tipLabel.textColor = [UIColor whiteColor];
    tipLabel.textAlignment = NSTextAlignmentCenter;
    tipLabel.lineBreakMode = NSLineBreakByWordWrapping;
    tipLabel.numberOfLines = 2;
    tipLabel.font=[UIFont systemFontOfSize:12];
    tipLabel.backgroundColor = [UIColor clearColor];
    [self.view addSubview:tipLabel];
    
}

-(void)setupNavView{
    
    //1.返回
    
    [self addNavigationItemWithImageNames:@[@"flash"] isLeft:NO target:self action:@selector(openFlash:) tags:nil];

}


- (void)setupScanWindowView
{
    CGFloat scanWindowH = SCREEN_WIDTH - 30 * 2;
    CGFloat scanWindowW = SCREEN_WIDTH - 30 * 2;
    _scanWindow = [[UIView alloc] initWithFrame:CGRectMake(30, (SCREEN_HEIGHT-120-scanWindowH)/2.0, scanWindowW, scanWindowH)];
    _scanWindow.clipsToBounds = YES;
    [self.view addSubview:_scanWindow];
    
    _scanNetImageView = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"scan_net"]];
    CGFloat buttonWH = 18;
    
    UIButton *topLeft = [[UIButton alloc] initWithFrame:CGRectMake(0, 0, buttonWH, buttonWH)];
    [topLeft setImage:[UIImage imageNamed:@"scan_1"] forState:UIControlStateNormal];
    [_scanWindow addSubview:topLeft];
    
    UIButton *topRight = [[UIButton alloc] initWithFrame:CGRectMake(scanWindowW - buttonWH, 0, buttonWH, buttonWH)];
    [topRight setImage:[UIImage imageNamed:@"scan_2"] forState:UIControlStateNormal];
    [_scanWindow addSubview:topRight];
    
    UIButton *bottomLeft = [[UIButton alloc] initWithFrame:CGRectMake(0, scanWindowH - buttonWH, buttonWH, buttonWH)];
    [bottomLeft setImage:[UIImage imageNamed:@"scan_3"] forState:UIControlStateNormal];
    [_scanWindow addSubview:bottomLeft];
    
    UIButton *bottomRight = [[UIButton alloc] initWithFrame:CGRectMake(topRight.frame.origin.x, bottomLeft.frame.origin.y, buttonWH, buttonWH)];
    [bottomRight setImage:[UIImage imageNamed:@"scan_4"] forState:UIControlStateNormal];
    [_scanWindow addSubview:bottomRight];
}




- (void)setupCamera
{
    // Device （获取手机设备的硬件 —— 摄像头）
    _device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    
    // Input （获取手机摄像头的输入流设置）
    _input = [AVCaptureDeviceInput deviceInputWithDevice:_device error:nil];
    
    // Output （获取手机摄像头的输出流设置）
    _output = [[AVCaptureMetadataOutput alloc]init];
    // 设置输出流代理AVCaptureMetadataOutputObjectsDelegate
    [_output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
    
    //设置有效扫描区域
    CGRect scanCrop=[self getScanCrop:_scanWindow.bounds readerViewBounds:self.view.frame];
    _output.rectOfInterest = scanCrop;
    
    // Session 硬件配置设置
    _session = [[AVCaptureSession alloc]init];
    [_session setSessionPreset:AVCaptureSessionPresetHigh];
    // 加入硬件配置的输入输出流
    // 策略模式：用策略方法去处理逻辑判断，使用者并不需要知道里面的内部实现，它只需要知道对应返回值去判断下一步处理
    
    if ([_session canAddInput:self.input])
    {
        [_session addInput:self.input];
    }
    
    if ([_session canAddOutput:self.output])
    {
        [_session addOutput:self.output];
    }
    /*
     NSString *const  AVMetadataObjectTypeUPCECode ;
     NSString *const  AVMetadataObjectTypeCode39Code ;
     NSString *const  AVMetadataObjectTypeCode39Mod43Code ;
     NSString *const  AVMetadataObjectTypeEAN13Code ;
     NSString *const  AVMetadataObjectTypeEAN8Code ;
     NSString *const  AVMetadataObjectTypeCode93Code ;
     NSString *const  AVMetadataObjectTypeCode128Code ;
     NSString *const  AVMetadataObjectTypePDF417Code ;
     NSString *const  AVMetadataObjectTypeQRCode ;
     NSString *const  AVMetadataObjectTypeAztecCode ;
     NSString *const  AVMetadataObjectTypeInterleaved2of5Code ;
     NSString *const  AVMetadataObjectTypeITF14Code ;
     NSString *const  AVMetadataObjectTypeDataMatrixCode;
     */
    
    // 条码类型 AVMetadataObjectTypeQRCode
    _output.metadataObjectTypes =@[AVMetadataObjectTypeUPCECode,AVMetadataObjectTypeCode39Code,AVMetadataObjectTypeCode39Mod43Code,AVMetadataObjectTypeCode93Code,AVMetadataObjectTypeCode128Code,AVMetadataObjectTypeQRCode,AVMetadataObjectTypeEAN13Code,AVMetadataObjectTypeEAN8Code,AVMetadataObjectTypeITF14Code,AVMetadataObjectTypeInterleaved2of5Code];
    /*
     线程：
     NSThread
     pthread_attr_t
     队列：
     NSOperation
     dispatch_queue_attr_t
     */
    
    // Preview 二维码图层
    _prelayer =[AVCaptureVideoPreviewLayer layerWithSession:self.session];
    _prelayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    _prelayer.frame =self.view.layer.bounds;
    [self.view.layer insertSublayer:self.prelayer atIndex:0];
    
    // Start 开启摄像头运行
    [_session startRunning];
}


#pragma mark AVCaptureMetadataOutputObjectsDelegate
// 从连接中完成捕捉输出的元数据对象
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
    // 扫描成功后调用，解析二维码
    [_session stopRunning];
    
    NSString *stringValue;
    
    // 元数据对象
    if ([metadataObjects count] >0)
    {
        // 可读码对象
        AVMetadataMachineReadableCodeObject * metadataObject = [metadataObjects objectAtIndex:0];
        stringValue = metadataObject.stringValue;
        
        NSLog(@"%@",stringValue);
        [self resumeAnimation];
        
        if (self.Scantype==1) {
            
            [self RequrlWithStr:stringValue];
            
        }else{
            _resultBlock(stringValue);
            
            [self disMiss];
        }
    }else{
        
        [self disMiss];
        
    }
}



#pragma mark-> 获取扫描区域的比例关系
-(CGRect)getScanCrop:(CGRect)rect readerViewBounds:(CGRect)readerViewBounds
{
    
    CGFloat x,y,width,height;
    
    x = (CGRectGetHeight(readerViewBounds)-CGRectGetHeight(rect))/2/CGRectGetHeight(readerViewBounds);
    y = (CGRectGetWidth(readerViewBounds)-CGRectGetWidth(rect))/2/CGRectGetWidth(readerViewBounds);
    width = CGRectGetHeight(rect)/CGRectGetHeight(readerViewBounds);
    height = CGRectGetWidth(rect)/CGRectGetWidth(readerViewBounds);
    
    return CGRectMake(x, y, width, height);
    
}

#pragma mark-> 闪光灯
-(void)openFlash:(UIButton*)button{
    
    NSLog(@"闪光灯");
    button.selected = !button.selected;
    if (button.selected) {
        [self turnTorchOn:YES];
    }
    else{
        [self turnTorchOn:NO];
    }
    
}

#pragma mark-> 开关闪光灯
- (void)turnTorchOn:(BOOL)on
{
    
    Class captureDeviceClass = NSClassFromString(@"AVCaptureDevice");
    if (captureDeviceClass != nil) {
        AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
        
        if ([device hasTorch] && [device hasFlash]){
            
            [device lockForConfiguration:nil];
            if (on) {
                [device setTorchMode:AVCaptureTorchModeOn];
                [device setFlashMode:AVCaptureFlashModeOn];
                
            } else {
                [device setTorchMode:AVCaptureTorchModeOff];
                [device setFlashMode:AVCaptureFlashModeOff];
            }
            [device unlockForConfiguration];
        }
    }
}

#pragma mark-> 返回
- (void)disMiss
{
    [self.navigationController popViewControllerAnimated:YES];
}


#pragma mark 恢复动画
- (void)resumeAnimation
{
    CAAnimation *anim = [_scanNetImageView.layer animationForKey:@"translationAnimation"];
    if(anim){
        // 1. 将动画的时间偏移量作为暂停时的时间点
        CFTimeInterval pauseTime = _scanNetImageView.layer.timeOffset;
        // 2. 根据媒体时间计算出准确的启动动画时间，对之前暂停动画的时间进行修正
        CFTimeInterval beginTime = CACurrentMediaTime() - pauseTime;
        
        // 3. 要把偏移时间清零
        [_scanNetImageView.layer setTimeOffset:0.0];
        // 4. 设置图层的开始动画时间
        [_scanNetImageView.layer setBeginTime:beginTime];
        
        [_scanNetImageView.layer setSpeed:1.0];
        
    }else{
        
        CGFloat scanNetImageViewH =SCREEN_WIDTH - 30 * 2;
        CGFloat scanWindowH = SCREEN_WIDTH - 30 * 2;
        CGFloat scanNetImageViewW = _scanWindow.frame.size.width;
        
        _scanNetImageView.frame = CGRectMake(0, -scanNetImageViewH, scanNetImageViewW, scanNetImageViewH);
        CABasicAnimation *scanNetAnimation = [CABasicAnimation animation];
        scanNetAnimation.keyPath = @"transform.translation.y";
        scanNetAnimation.byValue = @(scanWindowH);
        scanNetAnimation.duration = 2.0;
        scanNetAnimation.repeatCount = MAXFLOAT;
        [_scanNetImageView.layer addAnimation:scanNetAnimation forKey:@"translationAnimation"];
        [_scanWindow addSubview:_scanNetImageView];
    }
    
    
    
}


-(void)RequrlWithStr:(NSString*)urlstr
{
    
    [self showLoadingAnimation];
    
    BaseReqApi *api=[[BaseReqApi alloc]initWithRequestUrl:@"/index.php/FoodCheckApi/scanDownloadImg.json" andrequestTime:5 andParams:@{@"url":urlstr} andRequestMethod:YTKRequestMethodPOST andCache:NO andCacheTime:0 andPostToken:YES];
    
    [api StarRequest:^(TBResponseStatus responseStatus, NSString *message, id responseObject) {
        
        [self stopLoadingAnimation];
        
        NSDictionary *rdic=responseObject;
        
         if (responseStatus==1) {
  
             _resultBlock(rdic[@"data"]);
             
             [self disMiss];
             
         }else{
             
             [MBProgressHUD showErrorMessage:message];
        
             dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.3 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
                 
                   [self disMiss];
             });
         }
    }];
  
    
}


- (void)didReceiveMemoryWarning {
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

/*
#pragma mark - Navigation

// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
    // Get the new view controller using [segue destinationViewController].
    // Pass the selected object to the new view controller.
}
*/

@end
