//
//  BLCamBarcode.m
//  ITC_iOS
//
//  Created by Benz.L on 06/09/2018.
//  Copyright © 2018 Benz.L. All rights reserved.
//

#import "BLCamBarcode.h"

@interface BLCamBarcode()<AVCaptureMetadataOutputObjectsDelegate>
@property (strong,nonatomic)AVCaptureDevice * device;
@property (strong,nonatomic)AVCaptureDeviceInput * input;
@property (strong,nonatomic)AVCaptureMetadataOutput * output;
@property (strong,nonatomic)AVCaptureSession * session;
@property (strong,nonatomic)AVCaptureVideoPreviewLayer * preview;
@end

@implementation BLCamBarcode

- (void)dealloc {
    NSLog();
}

- (instancetype)initWithMetadataObjectTypes:(NSArray<AVMetadataObjectType> *)metadataObjectTypes {
    self = [super init];
    if (self) {
        if (!metadataObjectTypes.count) {
            metadataObjectTypes = @[AVMetadataObjectTypeQRCode];
        }
        // Device
        _device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
        
        // Input
        _input = [AVCaptureDeviceInput deviceInputWithDevice:self.device error:nil];
        
        // Output
        _output = [[AVCaptureMetadataOutput alloc] init];
        //_output.rectOfInterest
        [_output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
        
        // Session
        _session = [[AVCaptureSession alloc] init];
        [_session setSessionPreset:AVCaptureSessionPresetHigh];
        if ([_session canAddInput:self.input]){
            [_session addInput:self.input];
        }
        
        if ([_session canAddOutput:self.output]) {
            [_session addOutput:self.output];
        }
        
        _output.metadataObjectTypes = metadataObjectTypes;
    }
    return self;
}


#pragma mark - AVCaptureMetadataOutputObjectsDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
    if (metadataObjects.count >0){
        if (_barcodeResult) {
            AVMetadataMachineReadableCodeObject * metadataObject = metadataObjects.firstObject;
            _barcodeResult(metadataObject.stringValue);
        }
    }
}

- (void)setVideoZoomFactor:(CGFloat)videoZoomFactor {
    _videoZoomFactor = videoZoomFactor;
    NSError *error = nil;
    [_device lockForConfiguration:&error]; //锁定相机设备
    _device.videoZoomFactor = videoZoomFactor;
    [_device unlockForConfiguration];
}

- (void)setPreviewLayer:(AVCaptureVideoPreviewLayer *)previewLayer {
    _previewLayer = previewLayer;
    if (previewLayer) {
        previewLayer.session = _session;
        previewLayer.videoGravity =AVLayerVideoGravityResizeAspectFill;
    }
}

- (void)setScanLayer:(CALayer *)scanLayer {
    _scanLayer = scanLayer;
    [_previewLayer addSublayer:scanLayer];
    CGSize fullSize = _previewLayer.bounds.size;
    CGRect frame = [self.previewLayer convertRect:scanLayer.frame fromLayer:_previewLayer];
    frame = (CGRect){frame.origin.x/fullSize.width, (fullSize.height-CGRectGetMaxY(frame))/fullSize.height, frame.size.width/fullSize.width, frame.size.height/fullSize.height};
//    self.output.rectOfInterest = frame;
    
    UIBezierPath *path = [UIBezierPath bezierPath];
    [path moveToPoint:scanLayer.frame.origin];
    [path addLineToPoint:(CGPoint){CGRectGetMaxX(scanLayer.frame), CGRectGetMinY(scanLayer.frame)}];
    [path addLineToPoint:(CGPoint){CGRectGetMaxX(scanLayer.frame), CGRectGetMaxY(scanLayer.frame)}];
    [path addLineToPoint:(CGPoint){CGRectGetMinX(scanLayer.frame), CGRectGetMaxY(scanLayer.frame)}];
    [path addLineToPoint:(CGPoint){CGRectGetMinX(scanLayer.frame), CGRectGetMinY(scanLayer.frame)+0.001}];
    [path addLineToPoint:(CGPoint){0,CGRectGetMinY(scanLayer.frame)+0.001}];
    [path addLineToPoint:(CGPoint){0, CGRectGetHeight(_previewLayer.bounds)}];
    [path addLineToPoint:(CGPoint){CGRectGetWidth(_previewLayer.bounds),  CGRectGetHeight(_previewLayer.bounds)}];
    [path addLineToPoint:(CGPoint){CGRectGetWidth(_previewLayer.bounds),  0}];
    [path addLineToPoint:(CGPoint){0, 0}];
    [path addLineToPoint:(CGPoint){0,CGRectGetMinY(scanLayer.frame)}];
    path.lineWidth = 1;
    [path closePath];
    CAShapeLayer *shapeLayer = [CAShapeLayer layer];
    shapeLayer.frame = _previewLayer.bounds;
    shapeLayer.bounds = _previewLayer.bounds;
    shapeLayer.path = path.CGPath;
//    shapeLayer.strokeColor = NumberColor().CGColor;
    shapeLayer.fillColor = [UIColor.blackColor colorWithAlphaComponent:0.5].CGColor;
    [_previewLayer addSublayer:shapeLayer];
}

- (NSArray<AVMetadataObjectType> *)metadataObjectTypes {
    return self.output.metadataObjectTypes;
}

- (void)startScan {
    [_session startRunning];
}
- (void)stop {
    [_session stopRunning];
}

@end


@implementation BLCamBarcode(QR)

/*
 引自：
 作者：WWest
 链接：https://www.jianshu.com/p/1919b240387b
 來源：简书
 简书著作权归作者所有，任何形式的转载都请联系作者获得授权并注明出处。
 */

#pragma mark - imageToTransparent
void ProviderReleaseData (void *info, const void *data, size_t size){
    free((void*)data);
}
UIImage *TransparentImage(UIImage *image, CGFloat red, CGFloat green, CGFloat blue) {
    const int imageWidth = image.size.width;
    const int imageHeight = image.size.height;
    size_t      bytesPerRow = imageWidth * 4;
    uint32_t* rgbImageBuf = (uint32_t*)malloc(bytesPerRow * imageHeight);
    // create context
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(rgbImageBuf, imageWidth, imageHeight, 8, bytesPerRow, colorSpace,
                                                 kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipLast);
    CGContextDrawImage(context, CGRectMake(0, 0, imageWidth, imageHeight), image.CGImage);
    // traverse pixe
    int pixelNum = imageWidth * imageHeight;
    uint32_t* pCurPtr = rgbImageBuf;
    for (int i = 0; i < pixelNum; i++, pCurPtr++){
        if ((*pCurPtr & 0xFFFFFF00) < 0x99999900){
            // change color
            uint8_t* ptr = (uint8_t*)pCurPtr;
            ptr[3] = red; //0~255
            ptr[2] = green;
            ptr[1] = blue;
        }else{
            uint8_t* ptr = (uint8_t*)pCurPtr;
            ptr[0] = 0;
        }
    }
    // context to image
    CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, rgbImageBuf, bytesPerRow * imageHeight, ProviderReleaseData);
    CGImageRef imageRef = CGImageCreate(imageWidth, imageHeight, 8, 32, bytesPerRow, colorSpace,
                                        kCGImageAlphaLast | kCGBitmapByteOrder32Little, dataProvider,
                                        NULL, true, kCGRenderingIntentDefault);
    CGDataProviderRelease(dataProvider);
    UIImage* resultUIImage = [UIImage imageWithCGImage:imageRef];
    // release
    CGImageRelease(imageRef);
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);
    return resultUIImage;
}

+ (UIImage *)QRImageWithContent:(NSString *)content size:(CGFloat)size fillColor:(UIColor *)color {
    // Need to convert the string to a UTF-8 encoded NSData object
    NSData *data = [content dataUsingEncoding:NSUTF8StringEncoding];
    // Create the filter
    CIFilter *filter = [CIFilter filterWithName:@"CIQRCodeGenerator"];
    // Set the message content and error-correction level
    [filter setValue:data forKey:@"inputMessage"];
    [filter setValue:@"M" forKey:@"inputCorrectionLevel"];
    // Send the image back
    CIImage *cimage = filter.outputImage;
    
    CGRect extent = CGRectIntegral(cimage.extent);
    CGFloat scale = MIN(size/CGRectGetWidth(extent), size/CGRectGetHeight(extent));
    // create a bitmap image that we'll draw into a bitmap context at the desired size;
    size_t width = CGRectGetWidth(extent) * scale;
    size_t height = CGRectGetHeight(extent) * scale;
    CGColorSpaceRef cs = CGColorSpaceCreateDeviceGray();
    CGContextRef bitmapRef = CGBitmapContextCreate(nil, width, height, 8, 0, cs, (CGBitmapInfo)kCGImageAlphaNone);
    CIContext *context = [CIContext contextWithOptions:nil];
    CGImageRef bitmapImage = [context createCGImage:cimage fromRect:extent];
    CGContextSetInterpolationQuality(bitmapRef, kCGInterpolationNone);
    CGContextScaleCTM(bitmapRef, scale, scale);
    CGContextDrawImage(bitmapRef, extent, bitmapImage);
    // Create an image with the contents of our bitmap
    CGImageRef scaledImage = CGBitmapContextCreateImage(bitmapRef);
    // Cleanup
    CGContextRelease(bitmapRef);
    CGImageRelease(bitmapImage);
    UIImage *image = [UIImage imageWithCGImage:scaledImage];
    if (color) {
        CGFloat R, G, B;
        CGColorRef colorRef = [color CGColor];
        long numComponents = CGColorGetNumberOfComponents(colorRef);
        if (numComponents == 4) {
            const CGFloat *components = CGColorGetComponents(colorRef);
            R = components[0];
            G = components[1];
            B = components[2];
        }
        
        image = TransparentImage(image, R, G, B);
    }
    return image;
}

+ (NSString *)contentFromQRImage:(UIImage *)image {
    NSString *content = nil;
    NSData *data = UIImagePNGRepresentation(image);
    CIImage *ciimage = [CIImage imageWithData:data];
    if (ciimage) {
        CIDetector *qrDetector = [CIDetector detectorOfType:CIDetectorTypeQRCode context:[CIContext contextWithOptions:@{kCIContextUseSoftwareRenderer:@(YES)}] options:@{CIDetectorAccuracy : CIDetectorAccuracyHigh}];
        NSArray *resultArr = [qrDetector featuresInImage:ciimage];
        if (resultArr.count >0) {
            CIFeature *feature = resultArr[0];
            CIQRCodeFeature *qrFeature = (CIQRCodeFeature *)feature;
            content = qrFeature.messageString;
        }
    }
    return content;
}

@end



