//
//  ZWLScanView.m
//  ZWLScanView
//
//  Created by lwz on 2018/5/21.
//

#import "ZWLScanView.h"
#import <AudioToolbox/AudioToolbox.h>

@interface ZWLScanView()<AVCaptureMetadataOutputObjectsDelegate>

@property (nonatomic, strong) AVCaptureSession *session;
@property (nonatomic, strong) AVCaptureMetadataOutput *metadataOutput;

@property (nonatomic, strong) CAShapeLayer *maskLayer;
@property (nonatomic, strong) CAShapeLayer *regionOutLineLayer;
@property (nonatomic, strong) CAShapeLayer *leftTopLayer;
@property (nonatomic, strong) CAShapeLayer *leftBottomLayer;
@property (nonatomic, strong) CAShapeLayer *rightTopLayer;
@property (nonatomic, strong) CAShapeLayer *rightBottomLayer;

@property (nonatomic, strong) CAShapeLayer *controlLayer;
@property (nonatomic, assign) CGRect scanLineRectFrom;
@property (nonatomic, assign) CGRect scanLineRectTo;

@property (nonatomic, weak) id<ZWLScanViewDelegate> delegate;

@end

@implementation ZWLScanView

- (instancetype)initWithFrame:(CGRect)frame {
    self = [super initWithFrame:frame];
    if (self) {
        [self setupSubLayers];
    }
    return self;
}

+ (Class) layerClass {
    return [AVCaptureVideoPreviewLayer class];
}

- (void)awakeFromNib {
    [super awakeFromNib];
    [self setupSubLayers];
}

- (void) setupSubLayers {
    self.maskLayer = [CAShapeLayer layer];
    self.maskLayer.opacity = 0.3f;
    self.maskLayer.fillColor = [UIColor whiteColor].CGColor;
    [self.layer addSublayer:self.maskLayer];
    
    self.controlLayer = [CAShapeLayer layer];
    self.controlLayer.fillColor = [UIColor redColor].CGColor;
    [self.layer addSublayer:self.controlLayer];
    
    self.regionOutLineLayer = [CAShapeLayer layer];
    self.regionOutLineLayer.fillColor = [UIColor yellowColor].CGColor;
    [self.layer addSublayer:self.regionOutLineLayer];
}

- (void)layoutSubviews {
    [super layoutSubviews];
    
    //四个角的大小
    CGFloat cornerLong = 10.0f;
    CGFloat cornerShort = 4.0f;
    
    //四周的间距
    CGFloat spaceXRate = 0.1f;
    CGFloat spaceYRate = 0.2f;
    
    CGFloat regionWidth = self.bounds.size.width * (1 - 2*spaceXRate);
    CGFloat regionHeight = self.bounds.size.height * (1 - 2*spaceYRate);
    CGRect centerRect = CGRectMake(self.bounds.size.width * spaceXRate, self.bounds.size.height * spaceYRate, regionWidth, regionHeight);
    
    
    //扫码的有效区域设置
    AVCaptureVideoPreviewLayer *layer = (AVCaptureVideoPreviewLayer *)self.layer;
    CGRect interestRect = [layer metadataOutputRectOfInterestForRect:centerRect];
    self.metadataOutput.rectOfInterest = interestRect;
    
    
    //扫描线
    CGFloat spaceToSuper = 15.0f;
    CGFloat scanLineWidth = 1.0f;
    CGFloat scanLineHeight = centerRect.size.height - 2 * spaceToSuper;
    CGFloat scanLineY = centerRect.origin.y + spaceToSuper;
    
    self.scanLineRectFrom = CGRectMake(centerRect.origin.x + spaceToSuper, scanLineY, scanLineWidth, scanLineHeight);
    self.scanLineRectTo = CGRectMake(centerRect.origin.x + centerRect.size.width - spaceToSuper, scanLineY, scanLineWidth, scanLineHeight);


    UIBezierPath *maskPath = [UIBezierPath bezierPathWithRect:self.bounds];
    UIBezierPath *centerPath = [UIBezierPath bezierPathWithRect:centerRect];
    [maskPath appendPath:[centerPath bezierPathByReversingPath]];
    
    self.maskLayer.frame = self.bounds;
    self.maskLayer.path = maskPath.CGPath;
    

    UIBezierPath *regionOutLinePath = [UIBezierPath bezierPath];
    
    //左上角
    UIBezierPath *leftTopPath1 = [UIBezierPath bezierPathWithRect:CGRectMake(0, 0, cornerLong, cornerShort)];
    UIBezierPath *leftTopPath2 = [UIBezierPath bezierPathWithRect:CGRectMake(0, 0, cornerShort, cornerLong)];
    [regionOutLinePath appendPath:leftTopPath1];
    [regionOutLinePath appendPath:leftTopPath2];
    
    //做下角
    UIBezierPath *leftBottomPath1 = [UIBezierPath bezierPathWithRect:CGRectMake(0, regionHeight - cornerLong, cornerShort, cornerLong)];
    UIBezierPath *leftBottomPath2 = [UIBezierPath bezierPathWithRect:CGRectMake(0, regionHeight - cornerShort, cornerLong, cornerShort)];
    [regionOutLinePath appendPath:leftBottomPath1];
    [regionOutLinePath appendPath:leftBottomPath2];
    
    
    //右上角
    UIBezierPath *rightTopPath1 = [UIBezierPath bezierPathWithRect:CGRectMake(regionWidth - cornerLong, 0, cornerLong, cornerShort)];
    UIBezierPath *rightTopPath2 = [UIBezierPath bezierPathWithRect:CGRectMake(regionWidth - cornerShort, 0, cornerShort, cornerLong)];
    [regionOutLinePath appendPath:rightTopPath1];
    [regionOutLinePath appendPath:rightTopPath2];
    
    //右下角
    UIBezierPath *rightBottomPath1 = [UIBezierPath bezierPathWithRect:CGRectMake(regionWidth - cornerLong, regionHeight - cornerShort, cornerLong, cornerShort)];
    UIBezierPath *rightBottomPath2 = [UIBezierPath bezierPathWithRect:CGRectMake(regionWidth - cornerShort, regionHeight - cornerLong, cornerShort, cornerLong)];
    [regionOutLinePath appendPath:rightBottomPath1];
    [regionOutLinePath appendPath:rightBottomPath2];
    
    self.regionOutLineLayer.path = regionOutLinePath.CGPath;
    self.regionOutLineLayer.frame = centerRect;
    
    if (self.session.isRunning) {
        UIBezierPath *scanLinePath = [UIBezierPath bezierPathWithRect:CGRectMake(0, 0, scanLineWidth, scanLineHeight)];
        self.controlLayer.path = scanLinePath.CGPath;
        self.controlLayer.frame = self.scanLineRectFrom;
    }
}

- (void)setupCaptureSession {
    self.session = [[AVCaptureSession alloc] init];
    AVCaptureVideoPreviewLayer *layer = (AVCaptureVideoPreviewLayer *)self.layer;
    layer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    layer.masksToBounds = YES;
    layer.session = self.session;
    
    [self.session beginConfiguration];
    
    AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    
    NSError *error;
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
    if ([self.session canAddInput:input]) {
        [self.session addInput:input];
    } else {
        NSLog(@"add input failure");
        return;
    }
    
    AVCaptureAudioDataOutput *output = [[AVCaptureAudioDataOutput alloc] init];
    if ([self.session canAddOutput:output]) {
        [self.session addOutput:output];
    } else {
        NSLog(@"add output failure");
        return;
    }
    
    self.metadataOutput = [[AVCaptureMetadataOutput alloc] init];
    if ([self.session canAddOutput:self.metadataOutput]) {
        [self.session addOutput:self.metadataOutput];
        
        [self.metadataOutput setMetadataObjectsDelegate:self queue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
        [self.metadataOutput setMetadataObjectTypes:self.metadataOutput.availableMetadataObjectTypes];
    } else {
        return;
    }
    [self.session commitConfiguration];
}

- (void)setupWithDelegate:(id<ZWLScanViewDelegate>)delegate {
    
    self.delegate = delegate;
    
    AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
    switch (status) {
        case AVAuthorizationStatusNotDetermined:
        {
            NSLog(@"未向用户获取视频权限");
            [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
               
                if (granted) {
                    NSLog(@"用户允许");
                    [self setupCaptureSession];
                    
                } else {
                    NSLog(@"用户已拒绝");
                }
            }];
        }
            break;

        case AVAuthorizationStatusRestricted:
            NSLog(@"用户受限，无法控制权限");
            break;
            
        case AVAuthorizationStatusAuthorized:
            NSLog(@"用户允许使用权限");
            
            [self setupCaptureSession];
            
            break;
        
        case AVAuthorizationStatusDenied:
            NSLog(@"用户拒绝使用权限");
            break;
            
        default:
            break;
    }
}

- (void)beginScan {
    
    if ([self.session isRunning]) {
        return;
    }
    
    [self layoutIfNeeded];
    CABasicAnimation *animation = [CABasicAnimation animationWithKeyPath:@"position.x"];
    animation.duration = 2.0f;
    animation.fromValue = [NSValue valueWithCGRect:self.scanLineRectFrom];
    animation.toValue = [NSValue valueWithCGRect:self.scanLineRectTo];
    animation.removedOnCompletion = NO;
    animation.repeatCount = NSIntegerMax;
    animation.fillMode = kCAFillModeForwards;
    self.controlLayer.hidden = NO;
    [self.controlLayer addAnimation:animation forKey:nil];
    
    [self.session startRunning];
    
    if ([self.delegate respondsToSelector:@selector(scanViewDidBeginScan)]) {
        [self.delegate scanViewDidBeginScan];
    }
}

- (void)endScan {
    
    if (![self.session isRunning]) {
        return;
    }
    
    [CATransaction begin];
    [self.controlLayer removeAllAnimations];
    self.controlLayer.hidden = YES;
    [CATransaction commit];

    [self.session stopRunning];
    
    if ([self.delegate respondsToSelector:@selector(scanViewDidEndScan)]) {
        [self.delegate scanViewDidEndScan];
    }

}


#pragma mark - AVCaptureMetaObjectOutputDelegate
- (void)captureOutput:(AVCaptureOutput *)output didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
    
    if (metadataObjects.count > 0) {
        AVCaptureVideoPreviewLayer *layer = (AVCaptureVideoPreviewLayer *)self.layer;
        
        AVMetadataObject *transfromdObj = [layer transformedMetadataObjectForMetadataObject:metadataObjects[0]];
        
        if ([transfromdObj isKindOfClass:[AVMetadataMachineReadableCodeObject class]]) {
            AVMetadataMachineReadableCodeObject *barcodeObj = (AVMetadataMachineReadableCodeObject *)transfromdObj;
            if (barcodeObj.stringValue && barcodeObj.stringValue.length > 0) {
                [self.delegate scanView:self DidCaptureDataStr:barcodeObj.stringValue];
            }
            
            //1104 system    1054 positive   1053 negative
#define systemSoundID    1054
            AudioServicesPlaySystemSound (systemSoundID);

        }
    }
}

@end
