//
//  ScanCodeVC.swift
//  CardBag
//
//  Created by hl on 2019/5/21.
//  Copyright © 2019 hlznj. All rights reserved.
//

import UIKit
import AVFoundation

protocol ScanCodeVCDelegate {
    func getCodeString(code: String, codeType: CodeType)
}

class ScanCodeVC: BaseViewController,AVCaptureMetadataOutputObjectsDelegate {

    var backBtn: UIButton?

    var codeDelegate: ScanCodeVCDelegate?

    var captureSession: AVCaptureSession?
    var videoPreviewLayer: AVCaptureVideoPreviewLayer?

    override func viewDidLoad() {
        super.viewDidLoad()

        self.showScanCode()

        self.backBtn = UIButton.init()
        self.backBtn?.setTitle("取消", for: .normal)
        self.backBtn?.backgroundColor = MainColor1
        self.backBtn?.layer.masksToBounds = true
        self.backBtn?.layer.cornerRadius = 4
        self.backBtn?.addTarget(self, action: #selector(backBtnAction), for: .touchUpInside)
        self.view.addSubview(self.backBtn!)

        self.backBtn?.snp.makeConstraints({ (make) in
            make.size.equalTo(CGSize.init(width: kScreenW/2, height: 40))
            make.bottom.equalTo(-kSafeAreaBottomHeight-15)
            make.centerX.equalTo(self.view.snp_centerX)
        })
    }

    @objc override func backBtnAction() {
        self.dismiss(animated: true, completion: nil)
    }

    //显示扫描视图
    func showScanCode() {
        if self.captureSession == nil {
            let captureDevice = AVCaptureDevice.default(for: .video)!
            var input: AVCaptureInput!
            do {
                input = try AVCaptureDeviceInput(device: captureDevice)
            } catch {
                return
            }
            self.captureSession = AVCaptureSession()
            self.captureSession?.addInput(input)

            let captureMetadataOutput = AVCaptureMetadataOutput()
            self.captureSession?.addOutput(captureMetadataOutput)
            captureMetadataOutput.setMetadataObjectsDelegate(self , queue: DispatchQueue.main)
            captureMetadataOutput.metadataObjectTypes = [.ean13,
                                                         .code39,
                                                         .code93,
                                                         .code128,
                                                         .code39Mod43,
                                                         .qr]
            self.videoPreviewLayer = AVCaptureVideoPreviewLayer.init(session: self.captureSession!)
            self.videoPreviewLayer?.videoGravity = .resizeAspectFill
            self.videoPreviewLayer?.frame = CGRect.init(x: 0, y: 0, width: self.view.frame.width, height: self.view.frame.height)
            captureMetadataOutput.rectOfInterest = self.calculateScanRect()

            self.videoPreviewLayer?.backgroundColor = UIColor.red.cgColor
            self.view.layer.addSublayer(self.videoPreviewLayer!)

            //放大
            do {
                try captureDevice.lockForConfiguration()
            } catch {
                return
            }
            let maxZoomFactor = captureDevice.activeFormat.videoMaxZoomFactor
            let zoomFactor = maxZoomFactor < 2.0 ? maxZoomFactor : 2.0
            captureDevice.videoZoomFactor = zoomFactor
            captureDevice.unlockForConfiguration()
        }
        self.captureSession?.startRunning()
    }

    func calculateScanRect() -> CGRect {
        let previewSize: CGSize = (self.videoPreviewLayer?.frame.size)!
        let scanSize: CGSize = CGSize.init(width: previewSize.width * 3/4, height: previewSize.height * 3 / 4)
        var scanRect: CGRect = CGRect.init(x: (previewSize.width-scanSize.width) / 2.0, y: (previewSize.height-scanSize.height) / 2.0, width: scanSize.width, height: scanSize.height)
        //AVCapture输出的图片大小都是横着的，而iPhone的屏幕是竖着的，那么我把它旋转90°
        scanRect = CGRect.init(x: scanRect.origin.y/previewSize.height, y: scanRect.origin.x/previewSize.width, width: scanRect.size.height/previewSize.height, height: scanRect.size.height/previewSize.width);
        return scanRect
    }

    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
        if let metadataObject = metadataObjects.first {
            guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else {return}
            guard let stringValue = readableObject.stringValue else {return}
            self.captureSession?.stopRunning()
            self.codeDelegate?.getCodeString(code: stringValue,codeType: readableObject.type == .qr ? .Qr : .bar)
            self.dismiss(animated: true, completion: nil)
        }
    }

}
