//
//  SpeakEmojiViewController.swift
//  SwiftStudy
//
//  Created by chang on 2018/3/2.
//  Copyright © 2018年 chang. All rights reserved.
//

import UIKit
import Speech

@available(iOS 10.0, *)
class SpeakEmojiViewController: UIViewController {

    fileprivate var talkBtn : UIButton!
    fileprivate var talkLabel : UILabel!
    fileprivate var talkImage : UIImageView!
    fileprivate var recordRequest: SFSpeechAudioBufferRecognitionRequest? //识别请求
    fileprivate var recordTask: SFSpeechRecognitionTask?
    fileprivate let audioEngine = AVAudioEngine() //录音引擎
    fileprivate lazy var recognizer: SFSpeechRecognizer = { //识别器
        let recognize = SFSpeechRecognizer(locale: Locale(identifier: "zh-CN"))
        recognize?.delegate = self
        return recognize!
    }()
    
    override func viewDidLoad() {
        super.viewDidLoad()
        self.view.backgroundColor = UIColorFromRGB(rgbValue: 0xf0f8ff)
        self.navigationItem.title = "说话表情"
        
        if SystemVersionString.doubleValue < 10.0 {
            self.view.makeToast("语音识别需要支持系统iOS10及以后的版本", duration: 6.6, position: .center, title: "提示")
            return;
        }
        
        talkBtn = UIButton.init(type: .custom)
        talkBtn.frame = CGRect.init(x: 0, y: MAINSCREEN_HEIGHT-50, width: MAINSCREEN_WIDTH, height: 50)
        talkBtn.setTitleColor(UIColor.white, for: .normal)
        talkBtn.titleLabel?.font = UIFont.boldSystemFont(ofSize: 18)
        talkBtn.addTarget(self, action: #selector(clickBottomBtn(sender:)), for: .touchUpInside)
        self.view.addSubview(talkBtn)
        
        talkLabel = UILabel.init(frame: CGRect.init(x: 0, y: self.navgationBarBottom, width: MAINSCREEN_WIDTH, height: 200))
        talkLabel.backgroundColor = UIColorFromRGB(rgbValue: 0xfaebd7)
        talkLabel.font = UIFont.systemFont(ofSize: 15)
        talkLabel.textColor = UIColorFromRGB(rgbValue: 0x20b2aa)
        talkLabel.numberOfLines = 0;
        self.view.addSubview(talkLabel)
        
        talkImage = UIImageView.init(frame: CGRect.init(x: (MAINSCREEN_WIDTH-116)/2, y: talkLabel.bottom, width: 116, height: 116))
        self.view.addSubview(talkImage)
        
        //status
        self.speechAuthorizationStatusAction()
    }
    
    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)
        stopRecognize()
    }
    
    //status
    func speechAuthorizationStatusAction() {
        self.view.makeToastActivity(.center)
        SFSpeechRecognizer.requestAuthorization {
            [weak self] (status) in
            DispatchQueue.init(label: "com.speechRecognizer.sync").sync {
                self?.view.hideToastActivity()
                switch status {
                case .notDetermined :
                    self?.talkBtn.isEnabled = false
                    self?.talkBtn.backgroundColor = UIColor.lightGray
                    self?.talkBtn.setTitle("语音识别未授权", for: .normal)
                    break
                case .denied :
                    self?.talkBtn.isEnabled = false
                    self?.talkBtn.backgroundColor = UIColor.lightGray
                    self?.talkBtn.setTitle("用户未授权使用语音识别", for: .normal)
                    break
                case .restricted :
                    self?.talkBtn.isEnabled = false
                    self?.talkBtn.backgroundColor = UIColor.lightGray
                    self?.talkBtn.setTitle("语音识别在这台设备上受到限制", for: .normal)
                    break
                case .authorized :
                    self?.talkBtn.isEnabled = true
                    self?.talkBtn.backgroundColor = UIColor.defaultRedColor
                    self?.talkBtn.setTitle("开始语音识别", for: .normal)
                    break
                }
            }
        }
    }
    
    @objc func clickBottomBtn(sender: UIButton) {
        let isStart = sender.currentTitle!.contains("开始")
        talkBtn.setTitle(isStart ? "停止录音" : "开始录音", for: .normal)
        DispatchQueue.main.async {
            isStart ? self.startRecognize() : self.stopRecognize()
        }
    }
    
    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }
}


//MARK: 录音识别
@available(iOS 10.0, *)
extension SpeakEmojiViewController {
    
    //开始识别
    fileprivate func startRecognize() {
        //1. 停止当前任务
        stopRecognize()
        //2. 创建音频会话
        let session = AVAudioSession.sharedInstance()
        do {
            try session.setCategory(AVAudioSessionCategoryRecord)
            try session.setMode(AVAudioSessionModeMeasurement)
            //激活Session
            try session.setActive(true, with: .notifyOthersOnDeactivation)
        }catch{
            print("ThrowsError：\(error)")
        }
        //3. 创建识别请求
        recordRequest = SFSpeechAudioBufferRecognitionRequest()
        
        let inputNode = audioEngine.inputNode
        
        //开始识别获取文字
        recordTask = recognizer.recognitionTask(with: recordRequest!, resultHandler: {
            [weak self] (result, error) in
            if result != nil {
                for trans in result!.transcriptions {
                    self?.talkLabel.text = trans.formattedString
                }
                //1-40随机，包括1,40
                let imgStr = String.init(format: "xxy"+"%03d", Int(arc4random()%40)+1)
                self?.talkImage.image = UIImage.init(named: imgStr)
                
                if result!.isFinal {
                    self?.audioEngine.stop()
                    inputNode.removeTap(onBus: 0)
                    self?.recordRequest = nil
                    self?.recordTask = nil
                    self?.talkBtn.isEnabled = true
                }
            }
        })
        let recordFormat = inputNode.outputFormat(forBus: 0)
        inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordFormat, block: { (buffer, time) in
            self.recordRequest?.append(buffer)
        })
        audioEngine.prepare()
        do {
            try audioEngine.start()
        } catch {
            print("ThrowsError：\(error)")
        }
    }
    //停止识别
    fileprivate func stopRecognize() {
        if recordTask != nil {
            recordTask?.cancel()
            recordTask = nil
        }
        removeTask()
    }
    //销毁录音任务
    fileprivate func removeTask() {
        self.audioEngine.stop()
        audioEngine.inputNode.removeTap(onBus: 0)
        self.recordRequest = nil
        self.recordTask = nil
        self.talkBtn.isEnabled = true
    }
}

//MARK: Delegate
@available(iOS 10.0, *)
extension SpeakEmojiViewController: SFSpeechRecognizerDelegate {
    //监视语音识别器的可用性
    public func speechRecognizer(_ speechRecognizer: SFSpeechRecognizer, availabilityDidChange available: Bool) {
        talkBtn.isEnabled = available
    }
}


