import Flutter
import UIKit
import AVFoundation
import Vision

public class SwiftNDScannerPlugin: NSObject {
    
    public func handle(call: FlutterMethodCall, result: @escaping FlutterResult) {
        if(call.method == "start" || call.method == "resume") {
            debugPrint("begin scanner ---- call")
            SwiftPlatformServicePlugin.factory?.scanView?.start()
            SwiftPlatformServicePlugin.factory?.result = result
            result(nil)
        } else if(call.method == "dispose") {
            debugPrint("dispose ---- call")
            SwiftPlatformServicePlugin.factory?.scanView?.stop()
            SwiftPlatformServicePlugin.factory?.scanView?.disposeResource()
            result(nil)
        } else if(call.method == "stop" || call.method == "pause") {
            SwiftPlatformServicePlugin.factory?.scanView?.stop()
            SwiftPlatformServicePlugin.factory?.result = result
            result(nil)
        } else if(call.method == "ScanImage") {
            let arguments = call.arguments as? Dictionary<String, Any>
            let paths: [String]? = arguments?["path"] as? [String]
            var res = [String]()
            let barcodeRequest = VNDetectBarcodesRequest(completionHandler: {request, error in
                guard let results = request.results else {return}
                for result in results {
                    if let barcode = result as? VNBarcodeObservation {
                        if let payload = barcode.payloadStringValue {
                            res.append(payload)
                        }
                    }
                }
            })
            for path in paths! {
                let ciImage: CIImage? = CIImage(image: UIImage(imageLiteralResourceName: path))
                let context = CIContext(options: nil)
                let detector: CIDetector? = CIDetector(ofType: CIDetectorTypeQRCode, context: context, options: [CIDetectorAccuracy : CIDetectorAccuracyHigh])
                let features = detector?.features(in: ciImage!)
                var finalres: String = ""
                for  feature in features as! [CIQRCodeFeature] {
                    finalres = feature.messageString ?? ""
                }
                if finalres == "" {
                    let handler = VNImageRequestHandler(ciImage: ciImage!, options: [:])
                    let _ = try? handler.perform([barcodeRequest])
                } else {
                    res.append(finalres)
                }
            }
            result(res)
        } else {
            result("iOS " + UIDevice.current.systemVersion)
        }
    }
}


class NativeViewFactory: NSObject, FlutterPlatformViewFactory {
    
    var scanView:NativeScanView?
    
    var channel:FlutterMethodChannel
    
    init(_ channel:FlutterMethodChannel) {
        self.channel = channel
        super.init()
    }
    
    public var result: FlutterResult? {
        didSet{
            if let view = scanView {
                view.result = self.result
            }
        }
    }
    
    func create(withFrame frame: CGRect, viewIdentifier viewId: Int64, arguments args: Any?) -> FlutterPlatformView {
        debugPrint("create ---- call    scanView=\(String(describing: scanView))")
        if(scanView != nil){
            scanView?.disposeResource()
            scanView = nil
            debugPrint("release last scanView resource")
        }
        let argst = args as! Dictionary<String, Any>?
        let width = argst?["width"] as? CGFloat ?? UIScreen.main.bounds.width
        let height = argst?["height"] as? CGFloat ?? UIScreen.main.bounds.height
        let continueScan = argst?["continueScan"] as? Bool ?? false
        debugPrint("flutter input width = \(width), height = \(height)")
        scanView = NativeScanView(boundWidth: width, boundHeight: height, continueScan:continueScan,channel)
        scanView?.result = result
        return scanView!
    }
    
    // args 需要此方法协助才能不为null
    func createArgsCodec() -> FlutterMessageCodec & NSObjectProtocol {
        return FlutterStandardMessageCodec.sharedInstance()
    }
    
}

class NativeScanView: NSObject, FlutterPlatformView, AVCaptureMetadataOutputObjectsDelegate {
    /// 屏幕宽度  屏幕高度
    let boundWidth, boundHeight, scanStartY, scanWidth: CGFloat
    let bounds: CGRect
    let scale: CGFloat
    let continueScan: Bool
    var scanBorderView: ScanBorderView
    var channel:FlutterMethodChannel
    init(boundWidth: CGFloat, boundHeight: CGFloat, continueScan: Bool,_ channel:FlutterMethodChannel) {
        self.boundWidth = boundWidth
        self.boundHeight = boundHeight
        self.continueScan = continueScan
        self.channel = channel
        self.bounds = CGRect(
            x: 0,
            y: 0,
            width: boundWidth,
            height: boundHeight
        )
        //        self.scanStartY = 215 * boundHeight / UIScreen.main.bounds.height
        if (boundHeight > 400) {
            self.scanWidth = 257
            self.scanStartY = (boundHeight - scanWidth) / 2
            self.scale = 1
        } else {
            self.scale = boundHeight / 400
            self.scanWidth = 257 * scale
            self.scanStartY = (boundHeight - 100 * scale - scanWidth) / 2
        }
        debugPrint("NativeScanView init  scanStartY  = \(scanStartY), scanWidth  = \(scanWidth), scale  = \(scale), ")
        self.scanBorderView = ScanBorderView(frame: bounds, scanStartY: scanStartY, scanWidth: scanWidth, scale: scale)
    }
    func start() {
        guard !session.isRunning else {
            return;
        }
        session.startRunning()
        debugPrint("start --- call startAnimation")
        scanBorderView.startAnimation()
    }
    
    func stop(){
        debugPrint("stop --- call")
        if(session.isRunning){
            debugPrint("stop running --- call")
            session.stopRunning()
            self.scanBorderView.stopAnimation()
        }
    }
    
    public var result: FlutterResult?
    
    private let session = AVCaptureSession();
    
    var createdView:UIView? = nil;
    
    var device: AVCaptureDevice?
    var deviceInput: AVCaptureDeviceInput?
    
    var mLightOutput:AVCaptureVideoDataOutput?
    var isLightDetecting = false
    
    func view() -> UIView {
        debugPrint("flutter platform view ------- call")
        /// iPhoneX双摄像头手机会 两次次触发view 方法，两次初始化摄像头input导致crash
        /// 此处用createdView 保存已创建的view，防止重复创建，规避两次设置摄像头input
        if createdView != nil {
            return createdView!;
        }
        
        createdView = UIView();
        createdView!.frame = bounds
        createdView!.addSubview(scanBorderView)
        scanBorderView.frame = bounds;
        setupScanDevice(view: createdView!)
        //开启扫码
        start()
        return createdView!;
    }
    
    func disposeResource(){
        device = nil;
        deviceInput = nil;
        mLightOutput = nil;
    }
    
    private func setupScanDevice(view: UIView) {
        guard let videoDevice = AVCaptureDevice.default(for: .video) else {
            return
        }
        device = videoDevice
        guard let input = try? AVCaptureDeviceInput(device: videoDevice) else {
            UIAlertView(title: nil, message: "请在\"设置\"中打开\"新核云\"的相机权限", delegate: nil, cancelButtonTitle: "OK")
            return
        }
        deviceInput = input;
        let output = AVCaptureMetadataOutput()
        output.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
        if boundHeight < 500 {
            session.sessionPreset = .vga640x480
        } else {
            session.sessionPreset = .high
        }
        session.addInput(input)
        session.addOutput(output)
        output.metadataObjectTypes = [.qr, .ean13, .ean8, .code128, .code39,
                                      .code93, .code39Mod43, .pdf417, .aztec, .upce,
                                      .interleaved2of5, .itf14, .dataMatrix]
        
        /// 计算中间可探测区域
        var scanRect = CGRect(
            x: (UIScreen.main.bounds.size.width - scanWidth) / 2,
            y: scanStartY,
            width: scanWidth,
            height: scanWidth
        )
        // 计算 rect of interest, 注意 x, y 交换位置
        
        scanRect = CGRect(
            x: scanRect.origin.y / boundHeight,
            y: (boundWidth - (scanRect.width + scanRect.origin.x)) / boundWidth,
            width: scanRect.height / boundHeight,
            height: scanRect.width / boundWidth
        )
        // 计算可探测区域
        output.rectOfInterest = scanRect
        
        // 光强度输出
        let lightOutput = AVCaptureVideoDataOutput()
        if session.canAddOutput(lightOutput) {
            session.addOutput(lightOutput)
            lightOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
            mLightOutput = lightOutput
            debugPrint("GTMBarcodeScanner --> add AVCaptureVideoDataOutput")
        } else {
            debugPrint("GTMBarcodeScanner --> Can not add AVCaptureVideoDataOutput")
        }
        
        /// 输出预览 Layer
        let previewLayer = AVCaptureVideoPreviewLayer(session: session)
        previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
        previewLayer.frame = bounds
        view.layer.insertSublayer(previewLayer, at: 0)
        // 显示并开启探测动画
        scanBorderView.isHidden = false
        //        scanBorderView.startAnimation()
        //        session.startRunning()
    }
    
    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
        /// 扫描数据
        guard let data = metadataObjects.first as? AVMetadataMachineReadableCodeObject else {
            return
        }
        /// 扫描数据的字符串值
        guard let strValue = data.stringValue else {
            return
        }
        debugPrint("strValue = \(strValue)")
        //        if let r = result {
        //            debugPrint("result ----- call")
        //            r(strValue);
        //        }
        channel.invokeMethod("onScanResult", arguments: ["result":strValue])
        /// 连续扫描模式 不stop   session, 否则会关闭闪光灯
        if (!continueScan) {
            session.stopRunning()
            scanBorderView.stopAnimation()
        }
    }
    func showFlashlightSwitch(needFlashButton: Bool){
        scanBorderView.showFlashlightSwitch(needFlashButton: needFlashButton, scanView: self)
    }
}

// MARK: - 光强度输出 delegate
extension NativeScanView: AVCaptureVideoDataOutputSampleBufferDelegate {
    
    public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        guard !isLightDetecting else {
            return
        }
        self.isLightDetecting = true
        if let metadata = CMCopyDictionaryOfAttachments(allocator: nil, target: sampleBuffer, attachmentMode: kCMAttachmentMode_ShouldPropagate)  as? [String: Any] {
            let exifKey = kCGImagePropertyExifDictionary as String
            if let exifMetadata = metadata[exifKey] as? [String: Any] {
                let brightlessKey = kCGImagePropertyExifBrightnessValue as String
                if let brightlessVal: Double = exifMetadata[brightlessKey] as? Double {
                    var need = (brightlessVal < 0)
                    if isFlashOn {
                        need = true
                    }
                    debugPrint("NativeScanView --> 光强度 brightless value = \(brightlessVal)")
                    // 通知光线变化
                    scanBorderView.showFlashlightSwitch(needFlashButton: need, scanView: self)
                }
            }
        }
        DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 0.5) { [weak self] in
            self?.isLightDetecting = false
        }
    }
}

// MARK: - 闪光灯管理
extension NativeScanView {
    
    public var isFlashOn: Bool {
        guard let input = deviceInput else {
            return false
        }
        return input.device.torchMode == .on
    }
    
    public func toggleFlashLight() {
        if let input = deviceInput {
            if input.device.torchMode == .on {
                closeFlashLight()
                return
            }
            if input.device.torchMode == .off {
                openFlashLight()
                return
            }
        }
    }
    
    public func openFlashLight() {
        if let input = deviceInput, let vdevice = device, vdevice.hasFlash, vdevice.hasTorch {
            do {
                try input.device.lockForConfiguration()
                input.device.torchMode = .on
                input.device.unlockForConfiguration()
            } catch let error as NSError {
                debugPrint("NativeScanView --> device.lockForConfiguration(): \(error)")
            }
        }
    }
    public func closeFlashLight() {
        if let input = deviceInput, let vdevice = device, vdevice.hasFlash, vdevice.hasTorch {
            do {
                try input.device.lockForConfiguration()
                input.device.torchMode = .off
                input.device.unlockForConfiguration()
            } catch let error as NSError {
                debugPrint("NativeScanView --> device.lockForConfiguration(): \(error)")
            }
        }
    }
}

class ScanBorderView: UIView {
    // MARK: - Subviews
    /// 边框
    let borderImageView = UIImageView(image: UIImage(named: "sw_scan_border"))
    let lineImageView = UIImageView(image: UIImage(named: "sw_scan_line"))
    let topView = UIView()
    let leadingView = UIView()
    let bottomView = UIView()
    let trailingView = UIView()
    let tipLabel = UILabel()
    let flashLightOnImageView = UIImageView(image: UIImage(named: "sw_scan_flashlight_on"))
    let flashLightOffImageView = UIImageView(image: UIImage(named: "sw_scan_flashlight_off"))
    
    // MARK: - Variables
    /// 扫描线移动的定时器
    var timer: Timer?
    /// 扫描线当前位置 Y 轴坐标
    var currentY: CGFloat
    let scanStartY, scanWidth, scale: CGFloat
    var nativeScanView: NativeScanView?
    
    // MARK: - View
    init(frame: CGRect, scanStartY: CGFloat, scanWidth: CGFloat, scale: CGFloat) {
        self.scanStartY = scanStartY
        self.scanWidth = scanWidth
        self.scale = scale
        self.currentY = CGFloat(scanStartY)
        super.init(frame: frame)
        setupUI()
    }
    
    
    required init?(coder aDecoder: NSCoder) {
        fatalError("init(coder:) has not been implemented")
    }
    
    override func layoutSubviews() {
        super.layoutSubviews()
        borderImageView.frame = CGRect(
            x: (bounds.size.width - scanWidth) / 2,
            y: scanStartY,
            width: scanWidth,
            height: scanWidth
        )
        flashLightOnImageView.frame = CGRect(x: (bounds.size.width - 31 * scale) / 2, y: scanStartY + scanWidth + 15 * scale, width: 31 * scale, height: 64 * scale)
        flashLightOffImageView.frame = CGRect(x: (bounds.size.width - 26 * scale) / 2, y: scanStartY + scanWidth + 27 * scale, width: 26 * scale, height: 49 * scale)
        
        tipLabel.frame = CGRect(x: (bounds.size.width - 180 * scale) / 2, y: scanStartY + scanWidth + 94 * scale, width: 180 * scale, height: 20 * scale)
        topView.frame = CGRect(x: 0, y: 0, width: UIScreen.main.bounds.size.width, height: scanStartY + 2 * scale)
        leadingView.frame = CGRect(x: 0, y: scanStartY + 2 * scale, width: borderImageView.frame.minX  + 2 * scale, height: borderImageView.frame.height - 4 * scale)
        bottomView.frame = CGRect(x: 0, y: borderImageView.frame.maxY - 2 * scale, width: UIScreen.main.bounds.size.width, height: UIScreen.main.bounds.size.height - borderImageView.frame.maxY + 2 * scale)
        trailingView.frame = CGRect(x: borderImageView.frame.maxX - 2 * scale, y: borderImageView.frame.minY + 2 * scale, width: UIScreen.main.bounds.size.width - borderImageView.frame.maxX - 2 * scale, height: borderImageView.frame.height - 4 * scale)
        lineImageView.frame = CGRect(
            x: (bounds.size.width - scanWidth) / 2,
            y: scanStartY,
            width: scanWidth,
            height: 20 * scale
        )
    }
    
    // MARK: - Setup
    func setupUI() {
        addSubview(borderImageView)
        addSubview(lineImageView)
        addSubview(topView)
        addSubview(leadingView)
        addSubview(bottomView)
        addSubview(trailingView)
        addSubview(flashLightOnImageView)
        addSubview(flashLightOffImageView)
        addSubview(tipLabel)
        tipLabel.textColor = .white
        tipLabel.font = UIFont.systemFont(ofSize: 14 * scale)
        tipLabel.textAlignment  = .center
        topView.backgroundColor = UIColor(white: 0, alpha: 0.3)
        leadingView.backgroundColor = UIColor(white: 0, alpha: 0.3)
        bottomView.backgroundColor = UIColor(white: 0, alpha: 0.3)
        trailingView.backgroundColor = UIColor(white: 0, alpha: 0.3)
        tipLabel.isUserInteractionEnabled = true
        flashLightOnImageView.isUserInteractionEnabled = true
        flashLightOffImageView.isUserInteractionEnabled = true
        tipLabel.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(touchSelect(sender:))))
        flashLightOnImageView.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(touchSelect(sender:))))
        flashLightOffImageView.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(touchSelect(sender:))))
        
    }
    
    func showFlashlightSwitch(needFlashButton: Bool, scanView: NativeScanView){
        nativeScanView = scanView
        if(needFlashButton){
            if(nativeScanView?.isFlashOn == true){
                tipLabel.text = "轻触关闭"
                addSubview(flashLightOnImageView)
            } else {
                tipLabel.text = "轻触照亮"
                addSubview(flashLightOffImageView)
            }
            addSubview(tipLabel)
        } else {
            flashLightOnImageView.removeFromSuperview()
            flashLightOffImageView.removeFromSuperview()
            tipLabel.removeFromSuperview()
        }
    }
    
    @objc func touchSelect(sender: UITapGestureRecognizer)->Void{
        flashLightOnImageView.removeFromSuperview()
        flashLightOffImageView.removeFromSuperview()
        tipLabel.removeFromSuperview()
        nativeScanView?.toggleFlashLight()
        if(nativeScanView?.isFlashOn == true){
            tipLabel.text = "轻触关闭"
            addSubview(flashLightOnImageView)
        } else {
            tipLabel.text = "轻触照亮"
            addSubview(flashLightOffImageView)
        }
        addSubview(tipLabel)
    }
    
    @objc func lineAnimation() {
        currentY += 1
        if currentY > scanStartY + scanWidth - 20 * scale {
            currentY = scanStartY
        }
        var frame = lineImageView.frame
        frame.origin.y = currentY
        UIView.animate(withDuration: 0.01) {
            self.lineImageView.frame = frame
        }
    }
    
    func startAnimation() {
        timer = .scheduledTimer(
            timeInterval: 0.01,
            target: self,
            selector: #selector(lineAnimation),
            userInfo: nil, repeats: true
        )
        RunLoop.main.add(timer!, forMode: .common)
        timer?.fire()
    }
    
    func stopAnimation() {
        guard let tm = timer else {
            return
        }
        if tm.isValid {
            tm.invalidate()
            timer = nil
        }
    }
    
    deinit {
        debugPrint(("ScanBorderView - deinit"))
    }
}

