//
//  MZQRCodeManager.swift
//  MZTool
//
//  Created by gby on 2022/3/8.
//

import Foundation
import CoreImage
import AVFoundation

//MARK: 视图
protocol NMQRCodePreviewViewDelegate: NSObjectProtocol {
    func codeScanningViewdidClickedTorchSwitch(scanningView: NMQRCodePreviewView, switchBtn: UIButton)
}

public class NMQRCodePreviewView: UIView {
    
    weak var delegate: NMQRCodePreviewViewDelegate?
    var rectFrame: CGRect {
        get{
            return rectLayer.frame
        }
    }
    
    lazy var rectLayer: CAShapeLayer = CAShapeLayer.init()
    lazy var cornerLayer: CAShapeLayer = CAShapeLayer.init()
    lazy var lineLayer: CAShapeLayer = CAShapeLayer.init()
    lazy var lineAnimation: CABasicAnimation = CABasicAnimation.init(keyPath: "position")
    
    lazy var indicatorView: UIActivityIndicatorView = {
        let v = UIActivityIndicatorView.init()
        v.style = .white
        v.hidesWhenStopped = true
        return v
    }()
    lazy var torchSwithButton: UIButton = {
        let btn = UIButton.init(type: .custom)
        btn.frame = CGRect(x: 0.0, y: 0.0, width: 70, height: 70)
        btn.titleLabel?.font = UIFont.systemFont(ofSize: 12)
        btn.setTitle("打开手电筒", for: .normal)
        btn.setTitle("关闭手电筒", for: .selected)
        btn.tintColor = UIColor.white
        return btn
    }()
    lazy var tipsLabel: UILabel = {
        let lab = UILabel.init(frame: CGRect.zero)
        lab.textColor = UIColor.white.withAlphaComponent(0.6)
        lab.textAlignment = .center
        lab.font = UIFont.systemFont(ofSize: 13)
        lab.text = "将二维码放入取景框中即可自动扫描"
        lab.numberOfLines = 0
        return lab
    }()
    
    
    public convenience required init(frame: CGRect, _ rFrame: CGRect = CGRect.zero, _ rColor: UIColor = UIColor.clear) {
        self.init(frame: frame)
        settingUI(rFrame, rColor)
    }
    
    func settingUI(_ rcframe: CGRect, _ rccolor: UIColor){
        
        var rFrame = rcframe
        var rColor = rccolor
        if rFrame.equalTo(CGRect.zero){
            let rectSide: CGFloat = CGFloat(fminf(Float(layer.bounds.size.width), Float(layer.bounds.size.height))) * 2/3
            rFrame = CGRect(x: (layer.bounds.size.width - rectSide)/2, y: (layer.bounds.size.width - rectSide)/2, width: rectSide, height: rectSide)
        }
        if rColor.cgColor == UIColor.clear.cgColor {
            rColor = UIColor.clear
        }
        
        layer.masksToBounds = true
        clipsToBounds = true
        
        let lineWidth: CGFloat = 0.5
        let rPath = UIBezierPath.init(rect: CGRect(x: lineWidth/2, y: lineWidth/2, width: rFrame.size.width - lineWidth, height: rFrame.size.height - lineWidth))
        rectLayer.fillColor = UIColor.clear.cgColor
        rectLayer.strokeColor = rColor.cgColor
        rectLayer.path = rPath.cgPath
        rectLayer.lineWidth = lineWidth
        rectLayer.frame = rFrame
        layer.addSublayer(rectLayer)
        
        // 根据rectFrame创建矩形拐角路径
        let cornerWith: CGFloat = 2.0
        let cornerLength = CGFloat(fminf(Float(rFrame.size.width), Float(rFrame.size.height))) / 12
        let cornerPath = UIBezierPath.init()
        //左上角
        cornerPath.move(to: CGPoint(x: cornerWith / 2, y: 0))
        cornerPath.addLine(to: CGPoint(x: cornerWith / 2, y: cornerLength))
        cornerPath.move(to: CGPoint(x: 0.0, y: cornerWith / 2))
        cornerPath.addLine(to: CGPoint(x: cornerLength, y: cornerWith / 2))
        //右上角
        cornerPath.move(to: CGPoint(x: rFrame.size.width, y: cornerWith / 2))
        cornerPath.addLine(to: CGPoint(x: rFrame.size.width - cornerLength, y: cornerWith / 2))
        cornerPath.move(to: CGPoint(x: rFrame.size.width - cornerWith / 2, y: 0.0))
        cornerPath.addLine(to: CGPoint(x: rFrame.size.width - cornerWith / 2, y: cornerLength))
        //右下角
        cornerPath.move(to: CGPoint(x: rFrame.size.width - cornerWith / 2, y: rFrame.size.height))
        cornerPath.addLine(to: CGPoint(x: rFrame.size.width - cornerWith / 2, y: rFrame.size.height - cornerLength))
        cornerPath.move(to: CGPoint(x: rFrame.size.width, y: rFrame.size.height - cornerWith / 2))
        cornerPath.addLine(to: CGPoint(x: rFrame.size.width - cornerLength, y: rFrame.size.height - cornerWith / 2))
        //左下角
        cornerPath.move(to: CGPoint(x: 0.0, y: rFrame.size.height - cornerWith / 2))
        cornerPath.addLine(to: CGPoint(x: cornerLength, y: rFrame.size.height - cornerWith / 2))
        cornerPath.move(to: CGPoint(x: cornerWith / 2, y: rFrame.size.height))
        cornerPath.addLine(to: CGPoint(x: cornerWith / 2, y: rFrame.size.height - cornerLength))
        
        // 根据矩形拐角路径画矩形拐角
        cornerLayer.frame = rFrame
        cornerLayer.path = cornerPath.cgPath
        cornerLayer.lineWidth = cornerPath.lineWidth
        cornerLayer.strokeColor = rColor.cgColor
        layer.addSublayer(cornerLayer)
        
        // 遮罩+镂空
        layer.backgroundColor = UIColor.black.cgColor
        let maskPath = UIBezierPath.init(rect: bounds)
        let subPath = UIBezierPath.init(rect: rFrame).reversing()
        maskPath.append(subPath)
        let maskLayer = CAShapeLayer.init()
        maskLayer.fillColor = UIColor.init(white: 0, alpha: 0.6).cgColor
        maskLayer.path = maskPath.cgPath
        layer.addSublayer(maskLayer)
        
        // 根据rectFrame画扫描线
        let lineFrame = CGRect(x: rFrame.origin.x + 5.0, y: rFrame.origin.y, width: rFrame.size.width - 5.0 * 2, height: 1.5)
        let linePath = UIBezierPath.init(ovalIn: CGRect(x: 0.0, y: 0.0, width: lineFrame.size.width, height: lineFrame.size.height))
        lineLayer.frame = lineFrame
        lineLayer.path = linePath.cgPath
        lineLayer.fillColor = rColor.cgColor
        lineLayer.shadowColor = rColor.cgColor
        lineLayer.shadowRadius = 5.0
        lineLayer.shadowOffset = CGSize(width: 0.0, height: 0.0)
        lineLayer.shadowOpacity = 1.0
        layer.addSublayer(lineLayer)
        
        lineAnimation = CABasicAnimation.init(keyPath: "position")
        lineAnimation.fromValue = NSValue.init(cgPoint:CGPoint(x: lineLayer.frame.origin.x + lineLayer.frame.size.width / 2, y: rFrame.origin.y + lineLayer.frame.size.height))
        lineAnimation.toValue = NSValue.init(cgPoint:CGPoint(x: lineLayer.frame.origin.x + lineLayer.frame.size.width / 2, y: rFrame.origin.y + rFrame.size.height - lineLayer.frame.size.height))
        lineAnimation.repeatCount = Float(CGFloat.greatestFiniteMagnitude)
        lineAnimation.autoreverses = true
        lineAnimation.duration = 2.0
        
        //手电筒背景
        addSubview(torchSwithButton)
        torchSwithButton.isHidden = true
        torchSwithButton.addTarget(self, action: #selector(torchSwitchClicked(button:)), for: .touchUpInside)
        
        //标题
        addSubview(tipsLabel)
        tipsLabel.sizeToFit()
        tipsLabel.center = CGPoint(x: rFrame.midX, y: rFrame.maxY + tipsLabel.bounds.minY + 12)
        
        //loading
        addSubview(indicatorView)
        indicatorView.frame = rFrame
        
    }
    
    public override func didAddSubview(_ subview: UIView) {
        if subview == indicatorView {
            indicatorView.startAnimating()
        }
    }
    
    
    public func startScaning(){
        lineLayer.isHidden = false
        lineLayer.add(lineAnimation, forKey: "lineAnimation")
    }
    
    public func stopScaning(){
        lineLayer.isHidden = true
        lineLayer.removeAnimation(forKey: "lineAnimation")
    }
    
    public func startIndicating(){
        indicatorView.stopAnimating()
    }
    
    public func stopIndicating(){
        indicatorView.stopAnimating()
    }
    
    public func showTorchSwitch(){
        
    }
    
    public func hideTorchSwitch(){
        
    }
    
    @objc func torchSwitchClicked(button: UIButton){
        delegate?.codeScanningViewdidClickedTorchSwitch(scanningView: self, switchBtn: button)
    }
    
    //MARK : DEINIT ---- NMQRCodePreviewView
    deinit {
        
    }
}








let QRCodeInputCorrectionLevelL = "L"
let QRCodeInputCorrectionLevelM = "M"
let QRCodeInputCorrectionLevelQ = "Q"
let QRCodeInputCorrectionLevelH = "H"

public typealias CALLBACK = (_: String) -> Void
public typealias LIGHTOBSERVER = (_: Bool, _: Bool) -> Void

//MARK: 操作台
public final class NMQRCodeManager: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureMetadataOutputObjectsDelegate, UIImagePickerControllerDelegate, UINavigationControllerDelegate, UIGestureRecognizerDelegate, NMQRCodePreviewViewDelegate{
    
    var session: AVCaptureSession?
    var previewView: NMQRCodePreviewView?
    var lightObserverHasCalled: Bool = false
    var autoStop: Bool = false
    
    var callback: CALLBACK?
    var lightObserver: LIGHTOBSERVER?
    
    
    public convenience init(previewview: NMQRCodePreviewView, completion: (() -> Void)? ) {
        self.init()
        addNotifications()
        
        self.previewView = previewview
        previewview.delegate = self
        
        DispatchQueue.global().async {
            self.session = AVCaptureSession.init()
            guard let device = AVCaptureDevice.default(for: .video) else {
                return
            }
            let output = AVCaptureMetadataOutput.init()
            output.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
            do {
                let input = try AVCaptureDeviceInput.init(device: device)
                self.session!.sessionPreset = .high
                if self.session!.canAddInput(input){
                    self.session!.addInput(input)
                }
            } catch{}
            if self.session!.canAddOutput(output){
                self.session!.addOutput(output)
                if output.availableMetadataObjectTypes.contains(.qr),
                   output.availableMetadataObjectTypes.contains(.code128),
                   output.availableMetadataObjectTypes.contains(.ean13){
                    output.metadataObjectTypes = [.qr,.code128,.ean13]
                }
            }
            do {
                try device.lockForConfiguration()
            }catch{}
            if device.isFocusPointOfInterestSupported,
               device.isFocusModeSupported(.continuousAutoFocus){
                device.focusMode = .continuousAutoFocus
            }
            device.unlockForConfiguration()
            DispatchQueue.main.async {
                let previewLayer = AVCaptureVideoPreviewLayer.init(session: self.session!)
                previewLayer.frame = previewview.layer.bounds
                previewLayer.videoGravity = .resizeAspectFill
                previewview.layer.insertSublayer(previewLayer, at: 0)
                
                let rectFrame = self.previewView?.frame ?? CGRect.zero
                if rectFrame.equalTo(CGRect.zero) {
                    let x: CGFloat = rectFrame.origin.y / previewview.bounds.size.height
                    let y: CGFloat = (previewview.bounds.size.width - rectFrame.origin.x - rectFrame.size.width) / previewview.bounds.size.width
                    let w: CGFloat = rectFrame.size.width / previewview.bounds.size.height
                    let h: CGFloat = rectFrame.size.width / previewview.bounds.size.width
                    output.rectOfInterest = CGRect(x: x, y: y, width: w, height: h)
                }
                
                let pinchGesture = UIPinchGestureRecognizer.init(target: self, action: #selector(self.pinchAction(gesture:)))
                previewview.addGestureRecognizer(pinchGesture)
                
                self.previewView?.stopIndicating()
                
                completion?()
            }
        }
    }
    
    func addNotifications() {
        NotificationCenter.default.addObserver(self, selector: #selector(applicationDidEnterBackground(notification:)), name: UIApplication.didEnterBackgroundNotification, object: nil)
        NotificationCenter.default.addObserver(self, selector: #selector(applicationWillEnterForeground(notification:)), name: UIApplication.willEnterForegroundNotification, object: nil)
    }
    
    //TODO: 进入后台
    @objc func applicationDidEnterBackground(notification: Notification){
        self.stopScaning()
    }
    
    //TODO: 即将进入前台
    @objc func applicationWillEnterForeground(notification: Notification){
        self.startScaning()
    }
    
    //TODO: 缩放手势
    static var lastZoomFactor: CGFloat = 1.0
    @objc func pinchAction(gesture: UIPinchGestureRecognizer){
        
        guard let device = AVCaptureDevice.default(for: .video) else {
            return
        }
  
        var minZoomFactor: CGFloat = 1.0
        var maxZoomFactor: CGFloat = device.activeFormat.videoMaxZoomFactor
        
        if #available(iOS 11.0, *) {
            minZoomFactor = device.minAvailableVideoZoomFactor
            maxZoomFactor = device.maxAvailableVideoZoomFactor
        }
        
        if gesture.state == .began {
            NMQRCodeManager.lastZoomFactor = device.videoZoomFactor
        }
        else if gesture.state == .changed {
            var zoomFactor: CGFloat = NMQRCodeManager.lastZoomFactor * gesture.scale
            zoomFactor = CGFloat(fmaxf(fminf(Float(zoomFactor), Float(maxZoomFactor)), Float(minZoomFactor)))
            do {
                try device.lockForConfiguration()
            }catch{}
            device.videoZoomFactor = zoomFactor
            device.unlockForConfiguration()
        }
    }
    
    //TODO: 开始扫描+闭包
    public func startScanningWithCallBack(callback: CALLBACK?){
        self.startScanningWithCallBack(callback: callback, autoStop: false)
    }
    
    //TODO: 开始扫描+闭包+自动停止
    public func startScanningWithCallBack(callback: CALLBACK?, autoStop: Bool){
        
        self.callback = callback
        self.autoStop = autoStop
        self.startScaning()
    }
    
    //TODO: 开始扫描
    public func startScaning(){
        if session != nil && !(session?.isRunning ?? false) {
            session?.startRunning()
            previewView?.startScaning()
        }
        
        self.observeLightStatus { [weak self] dimmed , torchon  in
            if dimmed || torchon {
                self?.previewView?.stopScaning()
                self?.previewView?.showTorchSwitch()
            }else{
                self?.previewView?.startScaning()
                self?.previewView?.hideTorchSwitch()
            }
        }
    }
    
    //TODO: 停止扫描
    public func stopScaning(){
        if session != nil && (session?.isRunning ?? false) {
            session?.stopRunning()
            previewView?.stopScaning()
        }
        
        NMQRCodeManager.switchTorch(false)
        NMQRCodeManager.resetZoomFactor()
    }
    
    //TODO: 处理扫描
    func handleCodeString(codeString: String){
        if autoStop {
            self.stopScaning()
        }
        callback?(codeString)
    }
    
    //TODO: 拉起相册
    public func presentPhotoLibraryWithRooter(rooter: UIViewController, callback: CALLBACK?){
        self.callback = callback
        
        let imagePicker = UIImagePickerController.init()
        imagePicker.sourceType = .photoLibrary
        imagePicker.delegate = self
        rooter.present(imagePicker, animated: true, completion: nil)
    }
    
    
    //MARK: UINavigationControllerDelegate
    //MARK: UIImagePickerControllerDelegate
    public func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
        
        var feature: CIQRCodeFeature?
        if let pickedImage = (info[UIImagePickerController.InfoKey.editedImage] != nil ? info[UIImagePickerController.InfoKey.editedImage] : info[UIImagePickerController.InfoKey.originalImage]) as? UIImage,
              let pickedImageData = pickedImage.pngData(),
              let detectImage = CIImage.init(data: pickedImageData){
            let detector = CIDetector.init(ofType: CIDetectorTypeQRCode, context: nil, options: [CIDetectorAccuracy: CIDetectorAccuracyLow])
            feature = detector?.features(in: detectImage, options: nil).first as? CIQRCodeFeature
        }
        
        picker.dismiss(animated: true) { [weak self] in
            if let code = feature?.messageString {
                self?.handleCodeString(codeString: code)
            }
        }
    }
    
    
    //MARK: AVCaptureMetadataOutputObjectsDelegate
    public func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
        guard let code = metadataObjects.first as? AVMetadataMachineReadableCodeObject, let codeString = code.stringValue else {
            return
        }
        self.handleCodeString(codeString: codeString)
    }
    
    //MARK: AVCaptureVideoDataOutputSampleBufferDelegate
    static var lastDimmed: Bool = false
    public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        let metadataDic = CMCopyDictionaryOfAttachments(allocator: nil, target: sampleBuffer, attachmentMode: kCMAttachmentMode_ShouldPropagate)
        let metadata = NSDictionary.init(dictionary: metadataDic as! [AnyHashable : Any], copyItems: true)
        let exifMetadata:NSDictionary = NSDictionary.init(dictionary: metadata.object(forKey: kCGImagePropertyExifDictionary as String) as! [AnyHashable : Any], copyItems: true)
        let brightnessValue:CGFloat = exifMetadata[kCGImagePropertyExifBrightnessValue] as! CGFloat

        let device = AVCaptureDevice.default(for: .video)
        let torchOn = device?.torchMode == .on
        let dimmed = brightnessValue < 1.0
        
        if lightObserver != nil {
            if !lightObserverHasCalled {
                lightObserver?(dimmed, torchOn)
                lightObserverHasCalled = true
                NMQRCodeManager.lastDimmed = dimmed
            }else if dimmed != NMQRCodeManager.lastDimmed {
                lightObserver?(dimmed, torchOn)
                NMQRCodeManager.lastDimmed = dimmed
            }
        }
    }

    
    //MARK: NMQRCodePreviewViewDelegate
    func codeScanningViewdidClickedTorchSwitch(scanningView: NMQRCodePreviewView, switchBtn: UIButton) {
        switchBtn.isSelected = !switchBtn.isSelected
        
        NMQRCodeManager.switchTorch(switchBtn.isSelected)
        lightObserverHasCalled = switchBtn.isSelected
        
    }
    
    //TODO: 手电筒
    func observeLightStatus(lightObserver: LIGHTOBSERVER?){
        self.lightObserver = lightObserver
        
        let lightOutput = AVCaptureVideoDataOutput.init()
        lightOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
        
        if session?.canAddOutput(lightOutput) ?? false {
            session?.addOutput(lightOutput)
        }
    }
    
    static func switchTorch(_ on: Bool){
        guard let device = AVCaptureDevice.default(for: .video) else {
            return
        }
        let torchMode: AVCaptureDevice.TorchMode = on ? .on : .off
        
        if device.hasFlash, device.hasTorch, torchMode != device.torchMode {
            do {
                try device.lockForConfiguration()
            }catch{}
            device.torchMode = torchMode
            device.unlockForConfiguration()
        }
    }
    
    private static func resetZoomFactor() {
        let device = AVCaptureDevice.default(for: .video)
        do {
            try device?.lockForConfiguration()
        }catch{}
        device?.videoZoomFactor = 1.0
        device?.unlockForConfiguration()
    }
    
    //MARK : DEINIT ---- NMQRCodeManager
    deinit {
        NotificationCenter.default.removeObserver(self)
    }
}
