//
//  ScanUnit.swift
//  AESOLAR_IOS
//
//  Created by Mr.wang on 2019/1/2.
//  Copyright © 2019 Mr.wang. All rights reserved.
//

import UIKit
import AVFoundation
import CoreAudioKit
import RxSwift
import RxCocoa

class ScanUnit: NSObject {

    var session: AVCaptureSession?
    var device: AVCaptureDevice?
    var preViewLayer: AVCaptureVideoPreviewLayer!
    
    let flushOn = PublishSubject<Bool>()
    let flushButtonHide = PublishSubject<Bool>()
    
    var completeHandler: (String) -> () = {_ in }
    
    init(scanRect: CGRect, on view: UIView) {
        super.init()
        
        guard let device = AVCaptureDevice.devices(for: .video).first,
            let input = try? AVCaptureDeviceInput.init(device: device) else {
                return
        }
        self.device = device
        
        session = AVCaptureSession()
        
        if session!.canSetSessionPreset(.high) {
            session!.canSetSessionPreset(.high)
        }
        
        let videoDataOut = AVCaptureVideoDataOutput()
        videoDataOut.setSampleBufferDelegate(self, queue: DispatchQueue.init(label: "videoDataOut"))
        if session!.canAddOutput(videoDataOut) {
            session!.addOutput(videoDataOut)
        }
        
        let output = AVCaptureMetadataOutput()
        output.setMetadataObjectsDelegate(self, queue: DispatchQueue.init(label: "outputQueue"))
        
        if session!.canAddInput(input) {
            session!.addInput(input)
        }
        if session!.canAddOutput(output) {
            session!.addOutput(output)
        }
        
        preViewLayer = AVCaptureVideoPreviewLayer.init(session: session!)
        preViewLayer.videoGravity = .resizeAspectFill
        preViewLayer.frame = view.bounds
        view.layer.insertSublayer(preViewLayer, at: 0)
        
        output.metadataObjectTypes = [.ean13, .code128]
        
        DispatchQueue.global(qos: DispatchQoS.QoSClass.default).async {
            self.session!.startRunning()
            output.rectOfInterest = self.preViewLayer.metadataOutputRectConverted(fromLayerRect: scanRect)
        }
        
        try? device.lockForConfiguration()
        if device.isFocusModeSupported(.continuousAutoFocus) {
            device.focusMode = .continuousAutoFocus
        }
        
        if device.isExposureModeSupported(.continuousAutoExposure) {
            device.exposureMode = .continuousAutoExposure
        }
        device.unlockForConfiguration()
        
        try? device.lockForConfiguration()
        device.videoZoomFactor = 1.5
        device.unlockForConfiguration()
        
        device.rx.torchMode
            .map{ $0 == .on }
            .bind(to: flushOn)
            .disposed(by: rx.disposeBag)
    }
    
    func startRunning() {
        DispatchQueue.global(qos: DispatchQoS.QoSClass.default).async { [weak self] in
            self?.session?.startRunning()
        }
    }
    
    func stopRunning() {
        DispatchQueue.global(qos: DispatchQoS.QoSClass.default).async { [weak self] in
            self?.session?.stopRunning()
        }
    }
    
    func changeFlush() {
        guard let device = device else {
            return
        }
        try? device.lockForConfiguration()
        if device.torchMode == .on {
            device.torchMode = .off
        } else {
            device.torchMode = .on
        }
        device.unlockForConfiguration()
    }
    
}

extension ScanUnit: AVCaptureMetadataOutputObjectsDelegate {
    
    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
        guard let metatdataObject = metadataObjects.first,
            let readableObject = metatdataObject as? AVMetadataMachineReadableCodeObject,
            let stringValue = readableObject.stringValue else { return }
        self.session?.stopRunning()
        
        if let sound = Bundle.main.path(forResource: "scan", ofType: "wav") {
            let soundUrl = URL.init(fileURLWithPath: sound)
            var soundId: SystemSoundID = 0
            AudioServicesCreateSystemSoundID(soundUrl as CFURL, &soundId)
            AudioServicesPlaySystemSound(soundId)
        }
        
        DispatchQueue.main.async {
            self.completeHandler(stringValue)
        }
    }
    
}

extension ScanUnit: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
    
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        
        guard let metadata = CMCopyDictionaryOfAttachments(allocator: nil, target: sampleBuffer, attachmentMode: kCMAttachmentMode_ShouldPropagate),
            let exifMetadata = (metadata as NSDictionary).object(forKey: kCGImagePropertyExifDictionary) as? NSDictionary,
            let brightnessValue = exifMetadata.object(forKey: kCGImagePropertyExifBrightnessValue) as? Double else {
                return
        }
        self.flushButtonHide.onNext(!(brightnessValue < -1 || self.device!.torchMode == .on))
    }
    
}
