//
//  ViewController.swift
//  eHealth
//
//  Created by QFX on 2023/10/24.
//

import UIKit
import AVFoundation
import MHLoadingButton
import SnapKit
import SwiftyJSON
import TagListView

class ViewController: UIViewController {
 
    ///使用相机或麦克风实时采集音视频数据流.
    var captureSession = AVCaptureSession()
    ///相机硬件的接口,用于控制硬件特性，诸如镜头的位置(前后摄像头)、曝光、闪光灯等。后置摄像头
    var backCamera: AVCaptureDevice?
    ///相机硬件的接口,用于控制硬件特性，诸如镜头的位置(前后摄像头)、曝光、闪光灯等。前置摄像头
    var frontCamera: AVCaptureDevice?
    ///相机硬件的接口,用于控制硬件特性，诸如镜头的位置(前后摄像头)、曝光、闪光灯等。当前摄像头
    var currentCamera: AVCaptureDevice?
    ///加载按钮以及动画
    var loadingButton: LoadingButton!
    ///拍照捕获照片对象
    var photoOutput: AVCapturePhotoOutput?
    ///Google vision api 返回Json数据
    var dataList:[JSON] = []
    ///食物卡路里数据库数据列表
    var healthList:[eHealth] = []
    ///Google vision api返回结果匹配后数据对象
    var fullHealth:eHealth?
    
    @IBOutlet weak var biggestTagListView: TagListView!
    @IBOutlet weak var resultLabel: UILabel!

    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view.
        ///配置相机或麦克风实时采集音视频数据流.
        setupCaptureSession()
        ///配置相机后置摄像头
        setupDevice()
        ///配置照片输入输出对象
        setupInputOutput()
        ///配置相机拍照界面
        setupPreviewLayer()
        ///开始运行相机实时采集数据流
        startRunningCaptureSession()
        ///获取数据库列表，便于数据匹配
        if let list = eHealth.getHealths() {
            self.healthList.append(contentsOf: list)
        }
    }

    func setupCaptureSession() {
        captureSession.sessionPreset = AVCaptureSession.Preset.photo
    }
    
    func setupDevice() {
        ///创建相机或麦克风实时采集音视频数据流.
        let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
        let devices = deviceDiscoverySession.devices
        ///前置后置摄像头赋值对象
        for device in devices {
            if device.position == AVCaptureDevice.Position.back {
                backCamera = device
            } else if device.position == AVCaptureDevice.Position.front{
                frontCamera = device
            }
            
            currentCamera = backCamera
        }
    }
    
    func setupInputOutput() {
        do {
            ///相机输入对象以及输出对象
            let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
            captureSession.addInput(captureDeviceInput)
            photoOutput = AVCapturePhotoOutput()
            photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey : AVVideoCodecType.jpeg])], completionHandler: nil)
            captureSession.addOutput(photoOutput!)
        } catch {
            print(error.localizedDescription)
        }
        
    }
    
    func setupPreviewLayer() {
        self.isAccessibilityElement = true
        self.accessibilityTraits = UIAccessibilityTraits(rawValue: super.accessibilityTraits.rawValue | UIAccessibilityTraits.adjustable.rawValue)

        ///摄像头预览图层配置
        var cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        cameraPreviewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
        cameraPreviewLayer.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
        cameraPreviewLayer.frame = self.view.frame
        self.view.layer.insertSublayer(cameraPreviewLayer, at: 0)
        
        //加载按钮动画配置
        loadingButton = LoadingButton(text: "「 Tap Me Snap 」", font:UIFont.boldSystemFont(ofSize: 30),cornerRadius:0,withShadow:false,buttonStyle: .fill)
        loadingButton.backgroundColor = .clear
        loadingButton.isHighlighted = false
        loadingButton.setTitleColor(.white, for: .normal)
        loadingButton.addTarget(self, action: #selector(camera(_:)), for: .touchUpInside)
        loadingButton.indicator = LineScaleIndicator(color: .white)
        self.view.addSubview(loadingButton)
        loadingButton.snp.makeConstraints { make in
            make.edges.equalToSuperview()
        }
        ///标签列表视图置顶视图最上层
        self.view.bringSubviewToFront(self.biggestTagListView)
        ///识别结果文本置顶视图最上层
        self.view.bringSubviewToFront(self.resultLabel)
        
        biggestTagListView.isHidden = true
        biggestTagListView.delegate = self
        biggestTagListView.textFont = .systemFont(ofSize: 24)
        biggestTagListView.alignment = .center
    }
    
    func startRunningCaptureSession() {
        captureSession.startRunning()
    }
    
    ///开始拍照，点击屏幕开始拍照
    @objc func camera(_ sender: Any) {        
        let settings = AVCapturePhotoSettings()
        print("camera button pressed")
        ///数据置空
        self.resultLabel.text = ""
        self.dataList.removeAll()
        self.biggestTagListView.removeAllTags()
        ///图片流输出
        photoOutput?.capturePhoto(with: settings, delegate: self)
        ///开始加载动画
        loadingButton.showLoaderWithImage(userInteraction: false)
    }
    
    override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
    
    }    
}
extension ViewController:TagListViewDelegate{
    // MARK: TagListViewDelegate
    func tagPressed(_ title: String, tagView: TagView, sender: TagListView) {
        print("Tag pressed: \(title), \(sender)")
        tagView.isSelected = !tagView.isSelected
        ///点击标签发音
        if tagView.isSelected {
            SpeechSynthesizer.Shared.speak(title)
        }
    }
    
    func tagRemoveButtonPressed(_ title: String, tagView: TagView, sender: TagListView) {
        print("Tag Remove pressed: \(title), \(sender)")
        sender.removeTagView(tagView)
    }
}

extension ViewController : AVCapturePhotoCaptureDelegate {
    
    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        if let imageData = photo.fileDataRepresentation() {
            print(imageData)
            if let image = UIImage(data: imageData){
//                HUD.show(.label("loading..."))
                ///利用Google vision  api 开始识别
                self.applyOCR(image: image) { error, data in
                  if let er = error{
                      print(error)
                    }
                }
            }
        }
    }
    
    
    func applyOCR(image: UIImage, completionHandler: @escaping(_ error: Error? , _ data : String? ) -> Void) {
        ///获取拍照的照片Base64流
        let imageData: NSData = image.jpegData(compressionQuality: 0.2)!as NSData
        let base64 = imageData.base64EncodedString(options: .endLineWithCarriageReturn)
        ///Google vision  api 目前是使用标签识别 type ：LABEL_DETECTION 语言：languageHints：en
        let body = "{ 'requests': [ { 'image': { 'content': '\(base64)' }, 'features': [ { 'type': 'LABEL_DETECTION' } ],  'imageContext': {'languageHints': ['en']} } ] }";
        let session = URLSession.shared
        ///Google vision  api 请求地址 AIzaSyC8Jpl77-7SBBw_YDd3voyjYzb3JLYe6Yo 是 key值 POST请求
        let url = URL(string: "https://vision.googleapis.com/v1/images:annotate?key=AIzaSyC8Jpl77-7SBBw_YDd3voyjYzb3JLYe6Yo")
        let request = NSMutableURLRequest(url: url!, cachePolicy:
                                            NSURLRequest.CachePolicy.reloadIgnoringLocalAndRemoteCacheData,
                                          timeoutInterval: 30.0)
        request.httpMethod = "POST"
        request.setValue("application/json", forHTTPHeaderField: "Content-Type")
        request.httpBody = body.data(using: .utf8)
        ///dataTask 请求返回数据
        let task = session.dataTask(with: request as URLRequest, completionHandler: {
            data,
            response,
            error in
            if let error = error {
                print(error.localizedDescription)
                completionHandler(error, "")
            }
            ///判断是否有数据
            if let data = data {
                do {
                    ///停止动画加载
                    self.loadingButton.hideLoader()
                    let string1 = String(data: data, encoding: String.Encoding.utf8) ?? "Data could not be printed"
                    print(string1)
                    ///json数据解析
                    let json =
                    try JSONSerialization.jsonObject(with: data, options: .allowFragments) as![String: Any]
                    let parseJson = JSON(json)
                    if let list = parseJson["responses"][0]["labelAnnotations"].array,list.count > 0{
                        ///获取解析labelAnnotations 数据
                        self.dataList.append(contentsOf: list)
                        DispatchQueue.main.async {
                            self.biggestTagListView.isHidden = false
                            var foods:[JSON] = []
                            ///遍历api 返回的数据，description匹配内容，检索食物卡路里数据库数据
                            for item in self.dataList{
                                let content = item["description"].stringValue
                                if content.count > 0{
                                    self.biggestTagListView.addTag(content)
                                    self.fullHealth = self.healthList.filter { health in
                                        return health.Food == content
                                    }.first
                                    if self.fullHealth != nil {
                                         foods.append(item)
                                    }
                                }
                            }
                            ///匹配后，对description 高辨识度的食物，进行文本展示，语音播报
                            if foods.count > 0 {
                                if let item = foods.first{
                                    let content = item["description"].stringValue
                                    if  content.count > 0{
                                        if let healths = self.healthList.filter({ health in
                                            return health.Food?.range(of: content) != nil
                                        }).first{
                                            self.resultLabel.text = String(format: "%@\n%@\n%@", healths.Food!,healths.Serving!,healths.Calories!)
                                            if let txt = self.resultLabel.text {
                                                ///语音播报
                                                SpeechSynthesizer.Shared.speak(txt)
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                    if let responseData = json["responses"] as? NSArray {
                        if let levelB = responseData[0] as? [String: Any] {
                            if let levelC = levelB["fullTextAnnotation"] as? [String: Any] {
                                if let text = levelC["text"] as? String {
                                    completionHandler(nil, text)
                                    return
                                }
                            }
                        }
                    }
                    let error = NSError(domain: "", code: 401, userInfo: [NSLocalizedDescriptionKey: "Invaild access token"])
                    completionHandler(error, nil)
                    return
                } catch {
                    print("error parsing \(error)")
                    completionHandler(error, nil)
                    return
                }
            }
        })
        task.resume()
    }
}


