//
//  ScanView.swift
//  scan
//
//  Created by chaves on 2020/10/9.
//

import UIKit
import AVFoundation
import Flutter



public class ScanView: UIView,AVCaptureMetadataOutputObjectsDelegate,FlutterPlugin {
    public static func register(with registrar: FlutterPluginRegistrar) {
        //
    }
    
    var loaded: Bool = false;
    var queue: DispatchQueue?;
    var session: AVCaptureSession?;
    var isSessionRun: Bool = false;
    var captureLayer: AVCaptureVideoPreviewLayer?;
    var metadataOutput: AVCaptureMetadataOutput?;
    var channel: FlutterMethodChannel?;
    var _bounds: CGRect = CGRect();
    var areaWidth:CGFloat = 0;
    var areaHeight:CGFloat = 0;
    var scale:CGFloat = 0.7;
    var bottomOffset:CGFloat = 0;
    
    init(_ frame:CGRect, viewId:Int64, args: Any?,registrar: FlutterPluginRegistrar) {
        super.init(frame: frame);
        self.queue = DispatchQueue.init(label: "com.chavesgu.scan", attributes: .concurrent);
        self.session = AVCaptureSession();
        self.channel = FlutterMethodChannel(name: "chavesgu/scan/method_\(viewId)", binaryMessenger: registrar.messenger());
        registrar.addMethodCallDelegate(self, channel: self.channel!);
        //    registrar.addApplicationDelegate(self);
        
        let params = args as! NSDictionary;
        if (params["bottomOffset"] != nil) {
            self.bottomOffset = params["bottomOffset"] as! CGFloat;
        }
        if (params["areaWidth"] != nil && params["areaHeight"] != nil) {
            self.areaWidth = params["areaWidth"] as! CGFloat
            self.areaHeight = params["areaHeight"] as! CGFloat
        }
        
        
        let layer = AVCaptureVideoPreviewLayer(session: self.session!);
        self.captureLayer = layer;
        layer.name = "capture";
        layer.backgroundColor = UIColor.black.cgColor;
        layer.videoGravity = .resizeAspectFill;
        self.layer.addSublayer(layer);
        
        NotificationCenter.default.addObserver(self, selector: #selector(sessionDidStart), name: .AVCaptureSessionDidStartRunning, object: nil);
        
        NotificationCenter.default.addObserver(self, selector: #selector(sessionDidStop), name: .AVCaptureSessionDidStopRunning, object: nil);
    }
    
    private func load() {
        self.loaded = true;
        // 获取相机权限
        AVCaptureDevice.requestAccess(for: .video) { (bool) in
            if (bool) {
                self.configSession();
            }
        }
    }
    
    private func configSession() {
        do {
            
            var defaultVideoDevice: AVCaptureDevice?;
            if let cameraDevice =  AVCaptureDevice.default(for: .video)  {
                defaultVideoDevice = cameraDevice
            }
            guard let videoDevice = defaultVideoDevice else {
                print("Default video device is unavailable.")
                return
            }
            let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice);
            if self.session!.canAddInput(videoDeviceInput) {
                self.session!.addInput(videoDeviceInput);
            }
            
            // add output
            let metadataOutput = AVCaptureMetadataOutput();
            self.metadataOutput = metadataOutput;
            if self.session!.canAddOutput(metadataOutput) {
                self.session!.addOutput(metadataOutput);
                metadataOutput.setMetadataObjectsDelegate(self, queue: .main);
                metadataOutput.metadataObjectTypes = [ .qr];
            } else {
                print("Could not add photo output to the session")
                return
            }
            
            self.session!.sessionPreset = AVCaptureSession.Preset.high;
            self.setScanArea();
            self.session!.startRunning();
            
        } catch {
            print("Couldn't create video device input: \(error)")
        }
    }
    
    // 设置扫描区域
    private func setScanArea() {
        let scale:CGFloat = self.scale;
        let vw = self.bounds.width;
        let   vh = self.bounds.height;
        let x = (vw - areaWidth) / 2;
        let y = (vh - areaHeight) / 2;
        if let output = self.metadataOutput,let captureLayer = self.captureLayer {
            let originRect = CGRect(x: x, y: y-bottomOffset, width: areaWidth, height: areaWidth);
            let rect = captureLayer.metadataOutputRectConverted(fromLayerRect: originRect);
            output.rectOfInterest = rect;
        }
    }
    
    @objc func sessionDidStart() {
        self.isSessionRun = true;
        
    }
    
    @objc func sessionDidStop() {
        self.isSessionRun = false;
        
    }
    
    
    public override func layoutSubviews() {
        super.layoutSubviews();
        self.captureLayer?.frame = self.bounds;
        self._bounds = self.bounds;
        
        if !self.loaded {
            self.load();
        }
    }
    
    public override func removeFromSuperview() {
        // clear
        self.session?.stopRunning();
        NotificationCenter.default.removeObserver(self);
        self.loaded = false;
        self.session = nil;
        self.queue = nil;
        super.removeFromSuperview();
    }
    
    required init?(coder: NSCoder) {
        fatalError("init(coder:) has not been implemented")
    }
    
    // 扫码结果
    public func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
        self.session!.stopRunning();
        if let metadataObject = metadataObjects.first {
            guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
            guard let stringValue = readableObject.stringValue else { return }
            self.channel!.invokeMethod("onCaptured", arguments: stringValue);
        }
    }
    
    public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
        if call.method=="resume" {
            self.resume();
        } else if call.method=="pause" {
            self.pause();
        } else if call.method=="toggleTorchMode" {
            self.toggleTorchMode();
        }
    }
    
    private func resume() {
        if !self.isSessionRun {
            self.session?.startRunning();
        }
    }
    
    private func pause() {
        if self.isSessionRun {
            self.session?.stopRunning();
        }
    }
    
    private func toggleTorchMode() {
        guard let device = AVCaptureDevice.default(for: .video) else { return }
        guard device.hasTorch else { return }
        do {
            try device.lockForConfiguration();
            
            if (device.torchMode == AVCaptureDevice.TorchMode.on) {
                device.torchMode = AVCaptureDevice.TorchMode.off;
            } else {
                do {
                    try device.setTorchModeOn(level: 1.0);
                } catch {
                    print(error);
                }
            }
            
            device.unlockForConfiguration();
        } catch {
            print(error);
        }
    }
}
