//
//  TranslationAudioController.swift
//  GLUZSwift
//
//  Created by xyanl on 2025/5/10.
//

import UIKit
import Reusable
import SnapKit

extension TranslationAudioController: NibLoadable {}

class TranslationAudioController: BaseTableViewController {
    
    @IBOutlet weak var zhView: UIView!
    @IBOutlet weak var ruView: UIView!
    
    @IBOutlet weak var zhTitleLabel: UILabel!
    @IBOutlet weak var ruTitleLabel: UILabel!
    
    private lazy var waveformView: WaveformView = {
        let view = WaveformView()
        view.backgroundColor = .clear
        view.isHidden = true
        return view
    }()
    
    /// 语音识别播放器
    private var synthesizer: AVSpeechSynthesizer?
    private var language: LanguageTpye = .zh
    private var list: [TranslationAudioModel] = []//TranslationAudioModel.loadSavedAccounts()
    
    var resultCallBack: ((String) -> Void)?
    
    // MARK: - 小牛
    private lazy var ruNeoNui = {
        let neoNui = NeoNui()
        neoNui.delegate = self
        return neoNui
    }()
    
    private lazy var zhNeoNui = {
        let neoNui = NeoNui()
        neoNui.delegate = self
        return neoNui
    }()
    
    
    private var emptyCount = 0
    /// 麦克风收录的音频数据
    private var recordedVoiceData = Data()
    /// 同步锁对象
    private let recordedVoiceDataLock = NSObject()
    
    /// 一句话识别结果
    var resultText: String = ""
    /// 中间结果
    var middleText: String = ""
    
    private var feedBack = UIImpactFeedbackGenerator()
    
    // 讯飞
    private let iflyRec = IFlySpeechRecognizerManager.shared
    
    private let isUseIfly = true
    
    override func viewDidLoad() {
        super.viewDidLoad()
        setupNav()
        setupUI()
        feedBack.prepare()
        if (isUseIfly) {
            
            iflyRec.delegate = self
        } else {
            initNeoNui()
        }
        
        
        setupSynthesizer()
    
        zhView.tag = 0
        ruView.tag = 1
        zhView.sg.addLongGesture(self, #selector(handleLongPress))
        ruView.sg.addLongGesture(self, #selector(handleLongPress))
        
        RequestPermission.requestMicrophonePermission()
    }
    
    override func viewDidDisappear(_ animated: Bool) {
        super.viewDidDisappear(animated)
        xLog("[NeoNui] viewDidDisappear 被调用")
        
        // 立即停止所有资源
        stopAllResources()
    }
    
    /// 更新语言
    override func updateLanguageAction() {
        navigationItem.title = "语音翻译".localized(.tran)
        // zhTitleLabel.text = "说中文".localized(.tran)
        // ruTitleLabel.text = "说俄语".localized(.tran)
    }
    
    deinit {
        xLog("[NeoNui] TranslationAudioController deinit 开始")
        stopAllResources()
        xLog("[NeoNui] TranslationAudioController deinit 完成")
    }
    
    private func stopAllResources() {
        xLog("[NeoNui] 开始停止所有资源")
        if (isUseIfly) {
            iflyRec.stopListening()
        } else {
            // 1. 停止音频采集
            stopAudioDataObserver()
            
            // 2. 停止SDK
            deinitNeoNui()
        }
        
        // 3. 停止语音合成
        stopSpeaking()
        
        xLog("[NeoNui] 所有资源停止完成")
    }
    
}

// MARK: - private
private extension TranslationAudioController {
    
    func setupNav() {
        vhl_navBarBackgroundAlpha = 0
        updateLanguageAction()
    }
    
    func setupUI() {
        tableView.backgroundColor = .clear
        tableView.delegate = self
        tableView.dataSource = self
        tableView.separatorStyle = .none
        tableView.rowHeight = 36
        tableView.register(cellType: TranslationAudioZHCell.self)
        tableView.register(cellType: TranslationAudioRUCell.self)
        tableView.snp.remakeConstraints { make in
            make.left.right.equalToSuperview()
            make.top.equalToSuperview().offset(xIphoneNavStatusBarHeight)
            make.bottom.equalTo(-(xIphoneBottomSafeHeight + 98 + 10))
        }
        scrollToBottom()
        
        view.addSubview(waveformView)
        waveformView.snp.makeConstraints { make in
            make.center.equalToSuperview()
            make.size.equalTo(CGSize(width: 200, height: 100))
        }
        
        zhTitleLabel.text = "说中文"
        ruTitleLabel.text = "На русском"
    }
    
    func addItem(type: LanguageTpye, text: String, translation: String) {
        list.append(TranslationAudioModel(type: type == .zh ? 0: 1, text: text, translation: translation))
        tableView.reloadData()
        
        TranslationAudioModel.saveAccounts(list)
    }
    
    /// 滚动到最下
    func scrollToBottom() {
        
        UIView.animate(withDuration: 0, delay: 1.0, options: .allowUserInteraction, animations: {}) { finished in
            
            if self.tableView.isTracking ||
                self.tableView.isDragging ||
                self.tableView.isDecelerating {
                return
            }
            
            let index = self.list.count - 1
            if index >= 0 {
                self.tableView.scrollToRow(at: IndexPath(row: index, section: 0), at: .bottom, animated: true)
            }
        }
    }
    
    /// 翻译
    func translation(_ text: String) {
        if text.isEmpty {
            YLHUD.showMessage("说话时间太短".localized(.tran))
            return
        }
        
        // 翻译
        let from = language == .zh ? "zh": "ru"
        let to = language == .zh ? "ru": "zh"
//        YLHUD.showLottie()
        NeoNuiTranslation.translation(from: from, to: to, text: text) { [weak self] sourceText, result in
            guard let self = self else { return }
            DispatchQueue.main.async {
                if let dst = result {
//                    YLHUD.hideHUD()
                    // 翻译结果 反显
                    self.addItem(type: self.language, text: text, translation: dst)
                    if let result = self.resultCallBack {
                        result("\(text)\n\(dst)")
                    }
                    self.scrollToBottom()
                    
                } else {
                    YLHUD.showMessage("翻译失败")
                }
            }
        }
    }
}

// MARK: - Action
extension TranslationAudioController {
    @objc func handleLongPress(_ gestureRecognizer: UILongPressGestureRecognizer) {
        guard let tag = gestureRecognizer.view?.tag else {
            return
        }
        
        if gestureRecognizer.state == .began {
            xLog("开始长按")
            startTrans(tag: tag)
            
        } else if gestureRecognizer.state == .ended {
            xLog("结束手势")
            endTrans()
        }
    }
    
    func startTrans(tag: Int, result: ((String)->(Void))? = nil) {
        self.resultCallBack = result
        waveformView.isHidden = false
        language = tag == 0 ? .zh: .ru
        feedBack.impactOccurred()
        waveformView.updateAmplitude(0.2)
        if (isUseIfly) {
            iflyRec.setLanguage(language == .zh ? IFlySpeechLanguage.chinese : IFlySpeechLanguage.russian)
            DispatchQueue.global().async {
                self.iflyRec.startListening()
            }
        } else {
            startNeoNui(tag == 0 ? .zh: .ru)
            startAudioDataObserver()
        }
        
    }
    
    func endTrans() {
        feedBack.impactOccurred()
        if (isUseIfly) {
            iflyRec.stopListening()
        } else {
            stopAudioDataObserver()
        }
        waveformView.isHidden = true
    }
    
}

// MARK: - UITableViewDelegate, UITableViewDataSource
extension TranslationAudioController: UITableViewDelegate, UITableViewDataSource {
    func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
        list.count
    }
    
    func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
        let model = list[indexPath.row]
        if model.type == 0 {
            let cell = tableView.dequeueReusableCell(for: indexPath, cellType: TranslationAudioZHCell.self)
            cell.model = model
            cell.delegate = self
            return cell
        }
        
        let cell = tableView.dequeueReusableCell(for: indexPath, cellType: TranslationAudioRUCell.self)
        cell.model = model
        cell.delegate = self
        return cell
    }
    
    func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
        let model = list[indexPath.row]
        return model.cellHeight
    }
    
}

// MARK: - TranslationAudioCellDelegate
extension TranslationAudioController: TranslationAudioCellDelegate {
    func playContent(model: TranslationAudioModel) {
        speakTextWithDelay(text: model.translation, type: model.type == 0 ? .zh: .ru)
    }
}

// MARK: - 语音识别器
extension TranslationAudioController {
    func setupSynthesizer() {
        guard synthesizer == nil else { return }
        synthesizer = AVSpeechSynthesizer()
        synthesizer?.delegate = self
    }
    
    func stopSpeaking() {
        synthesizer?.stopSpeaking(at: .immediate)
        synthesizer = nil
    }
    
    /// 朗读文本
    func speakTextWithDelay(text: String, type: LanguageTpye) {
        
        setupSynthesizer()
        
        if synthesizer!.isSpeaking {
            YLHUD.showMessage("正在朗读中".localized(.tran))
            return
        }
        let utterance = AVSpeechUtterance(string: text)
        // 设置语言，例如使用英语（美国）en-US, 中文 zh-Hans
        let language = type == .zh ? "ru-RU": "zh-CN"
        utterance.voice = AVSpeechSynthesisVoice(language: language) // "zh-Hant-HK" 设置粤语
        // 设置语速 默认:0.5
        utterance.rate = AVSpeechUtteranceDefaultSpeechRate
        // 设置音调
        utterance.pitchMultiplier = 1.0
        // 设置音量
        utterance.volume = 1.0
        synthesizer?.speak(utterance)
        
        //        let averageCharDuration: TimeInterval = 0.5 // 每个字符的平均朗读时间，单位为秒
        //        // 调整基于语速的朗读时间
        //        let totalDuration = TimeInterval(text.count) * averageCharDuration / Double(utterance.rate)
        //        // 播放文本并停顿2秒后停止
        //        DispatchQueue.main.asyncAfter(deadline: .now() + totalDuration) { [weak self] in
        //            xLog("停止")
        //            self?.synthesizer.stopSpeaking(at: .word)
        //        }
    }
    
}

// MARK: - AVSpeechSynthesizerDelegate
extension TranslationAudioController: AVSpeechSynthesizerDelegate {
    func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didFinish utterance: AVSpeechUtterance) {
        xLog("didFinish 播放完")
        xLog("\(synthesizer.isSpeaking ? "正在播放": "结束")")
        if synthesizer.isSpeaking {
            let res = synthesizer.stopSpeaking(at: .immediate)
            xLog("停止 : \(res ? "ok": "no")")
        }
        
    }
    
    func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didStart utterance: AVSpeechUtterance) {
        xLog("didStart \(utterance)")
    }
    
    
    func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didCancel utterance: AVSpeechUtterance) {
        xLog("didCancel \(utterance)")
    }
}

// MARK: - 翻译
extension TranslationAudioController {
    
    func initNeoNui() {
        xLog("[NeoNui] 初始化")
        // 请注意此处的参数配置，其中账号相关需要按照genInitParams的说明填入后才可访问服务
        ruNeoNui.nui_initialize(genParams(.ru), logLevel: NUI_LOG_LEVEL_NONE, saveLog: false)
        ruNeoNui.nui_set_params(configParams(.ru))
        
        zhNeoNui.nui_initialize(genParams(.zh), logLevel: NUI_LOG_LEVEL_NONE, saveLog: false)
        zhNeoNui.nui_set_params(configParams(.zh))
        
    }
    
    func deinitNeoNui() {
        xLog("[NeoNui] 开始销毁NeoNui")
        // 释放SDK
        ruNeoNui.nui_release()
        zhNeoNui.nui_release()
    }
    
    /// 开始小牛
    func startNeoNui(_ language: NeoNuiType) {
        if language == .ru {
            xLog("[NeoNui] 启动俄语")
            DispatchQueue.global().asyncAfter(wallDeadline: .now() + 0) { [weak self] in
                self?.language = .ru
                self?.recordedVoiceData = Data()
                self?.ruNeoNui.nui_dialog_start(MODE_P2T, dialogParam: "".cString(using: .utf8))
            }
            
        } else {
            xLog("[NeoNui] 启动中文")
            DispatchQueue.global().asyncAfter(wallDeadline: .now() + 0) { [weak self] in
                self?.language = .zh
                self?.recordedVoiceData = Data()
                self?.zhNeoNui.nui_dialog_start(MODE_P2T, dialogParam: "".cString(using: .utf8))
            }
        }
    }
    
    private func genParams(_ language: NeoNuiType) -> [CChar]? {
        var params: [String: Any] = [
            "app_key": "default",
            "token": "default",
            "save_wav": "false",
            "device_id": "123",
            "sample_rate": "16000", // 添加sample_rate
            "format": "pcm", // 添加format
            "sr_format": "pcm",
            "service_mode": "4",
            "enable_inverse_text_normalization": language == .zh,
        ]
        
        if language == .zh {
            params["url"] = "ws://multimodal.wshengtu.com:10000/ws/v1"
        } else {
            params["url"] = "ws://multimodal.wshengtu.com:10005/ws/v1"
        }
        
        xLog("[NeoNui] genParams \((language == .zh ? "zh": "ru")): \(params as NSDictionary)")
        
        // 方式 2：直接获取可选项（注意：可能返回 nil）
        if let cString = params.toJSONString()?.cString(using: .utf8) {
            // cString 类型是 [CChar]，且已自动添加 NULL 终止符
            return cString
        }
        
        return nil
    }
    
    private func configParams(_ language: NeoNuiType) -> [CChar]? {
        // 参数可根据实际业务进行配置
        let config: [String: Any] = [
            "enable_intermediate_result": true,
            "enable_punctuation_prediction": true,
            "sample_rate": 16000,
            "sr_format": "pcm",
            "enable_voice_detection": false,
            "max_start_silence": 10000,
            "max_end_silence": 10000,
            "max_sentence_silence": 10000,
        ]
        
        let params: [String: Any] = [
            "nls_config": config,
            "service_type": 0
        ]
        
        // 直接获取可选项（注意：可能返回 nil）
        if let cString = params.toJSONString()?.cString(using: .utf8) {
            // cString 类型是 [CChar]，且已自动添加 NULL 终止符
            return cString
        }
        
        return nil
    }
}

extension TranslationAudioController: NeoNuiSdkDelegate {
    
    func onNuiEventCallback(_ nuiEvent: NuiCallbackEvent, dialog: Int, kwsResult wuw: UnsafePointer<CChar>!, asrResult asr_result: UnsafePointer<CChar>!, ifFinish finish: Bool, retCode code: Int32) {
        xLog("[NeoNui] 收到回调事件: \(nuiEvent), 错误码: \(code), finish: \(finish)")
        switch nuiEvent {
        case EVENT_ASR_PARTIAL_RESULT:
            xLog("[NeoNui] EVENT_ASR_PARTIAL_RESULT 被触发")
            // 实时语音识别事件，检测一句话结束，返回一句的完整结果。
            let result = String(cString: asr_result)
            let dic = result.toDictionary()!
            
            if let payloadDic = dic["payload"] as? [String: Any] {
                if let res = payloadDic["result"] as? String {
                    xLog("[NeoNui] 中间结果 : \(res)");
                    middleText = res
                }
            }
            
            
        case EVENT_ASR_RESULT:
            xLog("[NeoNui] EVENT_ASR_RESULT 被触发")
            // 实时语音识别事件，检测一句话结束，返回一句的完整结果。
            let result = String(cString: asr_result)
            let dic = result.toDictionary()!
            
            if let payloadDic = dic["payload"] as? [String: Any] {
                if let res = payloadDic["result"] as? String {
                    xLog("[NeoNui] EVENT_SENTENCE_END 结果 : \(res)");
                    middleText = ""
                    resultText.append(res)
                    DispatchQueue.main.async { [weak self] in
                        guard let self = self else { return }
                        translation(resultText)
                        resultText = ""
                        middleText = ""
                    }
                    
                }
            }
            
        case EVENT_ASR_ERROR:
            xLog("[NeoNui] 错误 code:\(code)")
            // asr_result在EVENT_ASR_ERROR中为错误信息，搭配错误码code和其中的task_id更易排查问题，请用户进行记录保存。
            let ruStr = String(cString: ruNeoNui.nui_get_all_response())
            xLog("[NeoNui] ru all_response: \(ruStr)")
            
            let zhStr = String(cString: zhNeoNui.nui_get_all_response())
            xLog("[NeoNui] zh all_response: \(zhStr)")
            
            // 52010001 内部服务错误，需要客户端进行重试。
            // 240069 内部服务错误，需要客户端进行重试。
            if code == 240069 || code == 52010001 {
                xLog("[NeoNui] code:\(code) socket已关闭。")
#if DEBUG
                DispatchQueue.main.async {
                    YLHUD.showMessage("code:\(code) socket已关闭。停止音频采集")
                }
#endif
                stopAudioDataObserver()
            }
            
        default:
            break
        }
        
        // finish 为真（可能是发生错误，也可能是完成识别）表示一次任务生命周期结束，可以开始新的识别
        if finish {
            xLog("[NeoNui] 表示一次任务生命周期结束 code:\(code)")
        }
    }
    
    
    func onNuiNeedAudioData(_ audioData: UnsafeMutablePointer<CChar>!, length len: Int32) -> Int32 {
        autoreleasepool {
            objc_sync_enter(recordedVoiceDataLock)
            defer { objc_sync_exit(recordedVoiceDataLock) }
            // xLog("onNuiNeedAudioData \(recordedVoiceData.count)")
            guard !recordedVoiceData.isEmpty else {
                emptyCount += 1
                if emptyCount >= 50 {
                    xLog("_recordedVoiceData length = \(recordedVoiceData.count)! empty 50 times.")
                    emptyCount = 0
                }
                return 0
            }
            
            let recorderLen = min(recordedVoiceData.count, Int(len))
            let range = 0..<recorderLen
            
            // 复制数据到 audioData
            recordedVoiceData.copyBytes(
                to: UnsafeMutableRawBufferPointer(start: audioData, count: recorderLen),
                from: range
            )
            
            // 移除已处理的数据
            recordedVoiceData.removeSubrange(range)
            emptyCount = 0
            
            return Int32(recorderLen)
        }
    }
    
    // 根据音频状态进行录音功能的开关。
    func onNuiAudioStateChanged(_ state: NuiAudioState) {
        switch state {
        case STATE_OPEN:
            recordedVoiceData = Data()
            xLog("[NeoNui] onNuiAudioStateChanged STATE_OPEN \(state)")
            
        case STATE_PAUSE:
            xLog("[NeoNui] onNuiAudioStateChanged STATE_PAUSE \(state)")
            
        case STATE_CLOSE:
            xLog("[NeoNui] onNuiAudioStateChanged STATE_CLOSE \(state)")
            
        default: break
        }
    }
    
    
    func onNuiRmsChanged(_ rms: Float) {
        // 将负数音量值转换为0-1范围（与Android逻辑一致）
        // 音量值通常在-160到0之间，-160是最小值，0是最大值
        
        xLog("即构音量: \(rms)")
        var normalizedAmplitude: CGFloat = 0.0
        if rms > -160.0 {
            // 将-160到0的范围映射到0到1
            normalizedAmplitude = (Double(rms) + 160.0) / 160.0
            normalizedAmplitude = max(0.0, min(1.0, normalizedAmplitude))
        }
        
        // 限制更新频率
        DispatchQueue.main.async {
            self.waveformView.updateAmplitude(normalizedAmplitude)
        }
    }
    
}

// MARK: - 音频采集
extension TranslationAudioController: ZegoAudioDataHandler {
    
    // 开始音频采集
    func startAudioDataObserver() {
        // 需要的音频数据类型 Bitmask，此处示例四个回调都开启
        let bitmask: ZegoAudioDataCallbackBitMask = [.captured, .player]
        let frameParam = ZegoAudioFrameParam()
        frameParam.channel = .mono
        frameParam.sampleRate = .rate16K
        
        // 开启获取原始音频数据功能
        let engine = ZegoExpressEngine.shared()
        engine.startAudioDataObserver(bitmask, param: frameParam)
        engine.setAudioDataHandler(self)
        // 开/关自动增益控制
        engine.enableAGC(true)
        // 是否开启回声消除。
        engine.enableAEC(true)
        // 开/关噪声抑制
        engine.enableANS(true)
        // 开/关瞬态噪声抑制
        engine.enableTransientANS(true)
        
    }
    
    // 停止音频采集
    func stopAudioDataObserver() {
        xLog("[NeoNui] 停止音频采集")
        ruNeoNui.nui_dialog_cancel(false)
        zhNeoNui.nui_dialog_cancel(false)
        ZegoExpressEngine.shared().stopAudioDataObserver()
        ZegoExpressEngine.shared().setAudioDataHandler(nil)
    }
    
    // MARK: ZegoAudioDataHandler
    func onCapturedAudioData(_ data: UnsafePointer<UInt8>, dataLength: UInt32, param: ZegoAudioFrameParam) {
        // 本地采集音频数据，推流后可收到回调
        // let sendData = Data.init(bytes: data, count: Int(dataLength))
        objc_sync_enter(recordedVoiceDataLock)
        recordedVoiceData.append(data, count: Int(dataLength))
        objc_sync_exit(recordedVoiceDataLock)
    }
    
}


extension TranslationAudioController: MyIFlySpeechRecognizerDelegate {
    func speechRecognizerDidReceiveResult(_ result: String, isLast: Bool) {
        resultText.append(result)
        xLog("结果讯飞: \(result) isLast: \(isLast)")
    }
    
    func speechRecognizerDidComplete(_ error: (any Error)?) {
        xLog("完成讯飞")
        waveformView.isHidden = true
        DispatchQueue.main.async { [weak self] in
            guard let self = self else { return }
            translation(resultText)
            resultText = ""
            middleText = ""
        }
        
    }
    
    func speechRecognizerDidBeginRecording() {
        xLog("开始讯飞")
    }
    
    func speechRecognizerDidEndRecording() {
        xLog("结束讯飞")
    }
    
    func speechRecognizerDidChangeVolume(_ volume: Int) {
        var volume = volume
        var normalizedAmplitude: CGFloat = 0.0
        if volume <= 0 { volume = 0 }
        let randomVolume = CGFloat(volume) + CGFloat(arc4random_uniform(10))
        xLog("讯飞随机音量: \(randomVolume)")
        normalizedAmplitude = randomVolume / 40.0
        normalizedAmplitude = max(0.0, min(1.0, normalizedAmplitude))
        DispatchQueue.main.async {
            self.waveformView.updateAmplitude(normalizedAmplitude)
        }
    }
    
    func speechRecognizerDidCancel() {
        xLog("取消讯飞")
    }
}

struct TranslationAudioModel: Codable {
    
    // 输入的语音语言类型,
    var type: Int
    /// 原文
    var text: String
    /// 译文
    var translation: String
    /// 创建时间
    var createtime: Int = 0
    /// cell 高度
    var cellHeight: CGFloat = 0
    
    init(type: Int, text: String, translation: String) {
        self.type = type
        self.text = text
        self.translation = translation
        self.cellHeight = calculateCellHeight()
        self.createtime = Date.sg.timestamp()
    }
    
    func calculateCellHeight() -> CGFloat {
        
        let width = xScreenW*0.6 - 16*2
        
        let textHeight = text.sg.size(width: width, font: .systemFont(ofSize: 14, weight: .semibold)).height
        let translationHeight = translation.sg.size(width: width, font: .systemFont(ofSize: 14, weight: .regular)).height
        
        var height: CGFloat = 16
        height += textHeight
        height += 16
        height += translationHeight
        height += 16
        height += 32 // 播放按钮高度
        height += 16
        
        height += 20
        
        return height
    }
    
    // MARK: - 私有方法
    static let kTranslationAudioDefaultsKey = "kTranslationAudioDefaultsKey-\(UserProfile.userId)"
    /// 加载用户保存的账号
    static func loadSavedAccounts() -> [TranslationAudioModel] {
        guard let data = UserDefaults.standard.data(forKey: kTranslationAudioDefaultsKey) else { return [] }
        do {
            return try JSONDecoder().decode([TranslationAudioModel].self, from: data)
        } catch {
            xLog("账号加载失败: \(error)")
            return []
        }
    }
    
    /// 持久化保存账号列表
    static func saveAccounts(_ accounts: [TranslationAudioModel]) {
        do {
            let data = try JSONEncoder().encode(accounts)
            UserDefaults.standard.set(data, forKey: kTranslationAudioDefaultsKey)
        } catch {
            xLog("账号保存失败: \(error)")
        }
    }
    
}
