// src/utils/asr-xf.ts
// 科大讯飞 ASR（语音识别）前端直连工具（流式识别版）
// 纯前端实现：科大讯飞语音听写 WebSocket API
// 风险提示：在前端保存 apiSecret 会泄露，请仅在测试/内网使用。生产必须后端签名！

import { buildSignedWsUrl, b64encode, b64encodeBytes } from './tts-xf';

// ========== 构造ASR请求参数 ==========
function buildAsrParams(appId: string, audioBase64?: string, status?: number) {
  return {
    common: {
      app_id: appId, // APPID
    },
    business: {
      language: "zh_cn", // 中文
      domain: "iat", // 领域：通用
      accent: "mandarin", // 口音：普通话
      vinfo: 1, // 返回中间结果
      vad_eos: 10000, // 静音检测超时时间（毫秒）
    },
    data: {
      status: status !== undefined ? status : (audioBase64 ? 1 : 0), // 0=第一帧，1=中间帧，2=最后一帧
      format: "audio/L16;rate=16000", // 音频格式：PCM，16kHz采样率，16bit
      encoding: "raw", // 编码：原始PCM
      audio: audioBase64 || "", // 音频数据（base64编码）
    },
  };
}

// ========== 科大讯飞 ASR 流式识别 ==========
export interface AsrOptions {
  apiKey: string; // 讯飞 API Key
  apiSecret: string; // 讯飞 API Secret
  appId: string; // APPID
  onResult?: (text: string, isFinal: boolean) => void; // 识别结果回调：text=识别文本，isFinal=是否最终结果
  onError?: (error: Error | string) => void; // 错误回调
  onStart?: () => void; // 开始识别回调
  onEnd?: () => void; // 识别结束回调
}

export class XfAsrRecognition {
  private ws: WebSocket | null = null;
  private mediaStream: MediaStream | null = null;
  private audioContext: AudioContext | null = null;
  private processor: ScriptProcessorNode | null = null;
  private isActive = false;
  private options: AsrOptions;
  private sessionId = "";
  private currentText = ""; // 累积的识别文本

  constructor(options: AsrOptions) {
    this.options = options;
  }

  // 开始识别
  async start(): Promise<void> {
    if (this.isActive) {
      console.warn("[XF-ASR] Already started");
      return;
    }

    try {
      // 1. 获取麦克风权限并创建音频流
      console.log("[XF-ASR] Requesting microphone permission...");
      this.mediaStream = await navigator.mediaDevices.getUserMedia({
        audio: {
          channelCount: 1,
          echoCancellation: true,
          noiseSuppression: true,
          sampleRate: 16000,
          autoGainControl: true,
        },
        video: false,
      });
      
      // 验证音频流
      const audioTracks = this.mediaStream.getAudioTracks();
      console.log("[XF-ASR] Microphone permission granted!");
      console.log("[XF-ASR] Audio tracks:", audioTracks.length);
      if (audioTracks.length > 0) {
        const track = audioTracks[0];
        console.log("[XF-ASR] Audio track info:", {
          label: track.label,
          enabled: track.enabled,
          muted: track.muted,
          readyState: track.readyState,
          settings: track.getSettings()
        });
      }

      // 2. 创建 AudioContext 用于处理音频数据
      const AudioContextClass = window.AudioContext || (window as any).webkitAudioContext;
      this.audioContext = new AudioContextClass({
        sampleRate: 16000,
      });
      
      console.log("[XF-ASR] AudioContext created:", {
        sampleRate: this.audioContext.sampleRate,
        state: this.audioContext.state
      });

      // 3. 创建音频处理节点
      const source = this.audioContext.createMediaStreamSource(this.mediaStream);
      const bufferSize = 4096; // 缓冲区大小
      this.processor = this.audioContext.createScriptProcessor(bufferSize, 1, 1);
      
      console.log("[XF-ASR] Audio processor created, buffer size:", bufferSize);

      // 4. 构建WebSocket URL
      const wsUrl = await buildSignedWsUrl({
        apiKey: this.options.apiKey,
        apiSecret: this.options.apiSecret,
        host: "iat-api.xfyun.cn", // ASR服务的域名
        path: "/v2/iat", // ASR的WebSocket路径
      });

      // 5. 建立WebSocket连接
      this.ws = new WebSocket(wsUrl);
      this.isActive = true;
      this.currentText = "";

      // 6. WebSocket事件处理
      let isFirstFrame = true;
      
      this.ws.onopen = () => {
        console.log("[XF-ASR] WebSocket connected");
        this.options.onStart?.();

        // 先发送一个空的握手帧（不包含音频数据）
        const handshakeFrame = buildAsrParams(this.options.appId, "", 0);
        console.log("[XF-ASR] Sending handshake frame:", handshakeFrame);
        this.ws?.send(JSON.stringify(handshakeFrame));

        // 开始处理音频流
        let frameCount = 0;
        let totalSamples = 0;
        let maxAmplitude = 0;
        let samplesWithSound = 0;
        
        this.processor!.onaudioprocess = (e) => {
          if (!this.isActive || !this.ws || this.ws.readyState !== WebSocket.OPEN) {
            return;
          }

          const inputData = e.inputBuffer.getChannelData(0); // 获取单声道数据
          const inputLength = inputData.length;
          
          // ========== 音频数据分析和打印 ==========
          // 计算音频数据的统计信息
          let sum = 0;
          let absMax = 0;
          let samplesAboveThreshold = 0;
          const threshold = 0.01; // 声音阈值（1%）
          
          for (let i = 0; i < inputLength; i++) {
            const sample = Math.abs(inputData[i]);
            sum += sample;
            absMax = Math.max(absMax, sample);
            if (sample > threshold) {
              samplesAboveThreshold++;
            }
          }
          
          const average = sum / inputLength;
          maxAmplitude = Math.max(maxAmplitude, absMax);
          totalSamples += inputLength;
          
          if (samplesAboveThreshold > 0) {
            samplesWithSound += inputLength;
          }
          
          // 每50帧打印一次音频统计信息（大约每2-3秒）
          frameCount++;
          if (frameCount % 50 === 0) {
            const soundPercentage = (samplesWithSound / totalSamples) * 100;
            console.log("[XF-ASR] Audio Statistics (last 50 frames):", {
              frameCount,
              totalSamples,
              averageAmplitude: average.toFixed(6),
              maxAmplitude: maxAmplitude.toFixed(6),
              samplesWithSound: samplesWithSound,
              soundPercentage: soundPercentage.toFixed(2) + "%",
              hasAudio: absMax > 0.001 ? "YES" : "NO",
              audioLevel: absMax > 0.1 ? "LOUD" : absMax > 0.01 ? "NORMAL" : absMax > 0.001 ? "QUIET" : "SILENT"
            });
            // 重置统计
            maxAmplitude = 0;
            totalSamples = 0;
            samplesWithSound = 0;
          }
          
          // 打印前几帧的详细数据
          if (frameCount <= 5) {
            console.log("[XF-ASR] Frame", frameCount, "audio data:", {
              sampleCount: inputLength,
              first5Samples: Array.from(inputData.slice(0, 5)).map(v => v.toFixed(6)),
              maxAmplitude: absMax.toFixed(6),
              averageAmplitude: average.toFixed(6),
              hasSound: absMax > 0.001
            });
          }
          
          // ========== 音频处理 ==========
          const pcmData = this.floatTo16BitPCM(inputData); // 转换为16bit PCM
          const base64Audio = b64encodeBytes(pcmData.buffer); // 转为base64

          // 如果音频数据为空或音量太小，记录但继续发送（让服务器判断是否有声音）
          if (!base64Audio || base64Audio.length === 0) {
            if (frameCount % 50 === 0) {
              console.warn("[XF-ASR] Empty audio data in frame", frameCount);
            }
            return;
          }

          // 发送音频数据帧
          const frameStatus = isFirstFrame ? 0 : 1;
          const frame = buildAsrParams(this.options.appId, base64Audio, frameStatus);
          
          if (isFirstFrame) {
            isFirstFrame = false;
            console.log("[XF-ASR] Sending first audio frame:", {
              base64Length: base64Audio.length,
              pcmLength: pcmData.length,
              sampleCount: inputLength,
              maxAmplitude: absMax.toFixed(6)
            });
          }
          
          try {
            this.ws.send(JSON.stringify(frame));
          } catch (sendError) {
            console.error("[XF-ASR] Error sending audio frame:", sendError);
          }
        };

        source.connect(this.processor!);
        this.processor!.connect(this.audioContext!.destination);
        
        console.log("[XF-ASR] Audio processing pipeline connected, ready to capture audio!");
      };

      this.ws.onmessage = (event) => {
        try {
          const result = JSON.parse(event.data);
          console.log("[XF-ASR] Received message:", result); // 添加调试日志
          
          // 科大讯飞可能返回两种格式：
          // 格式1: {header: {code: 0}, payload: {...}}
          // 格式2: {code: 0, message: "success", data: {...}}
          
          let code: number | undefined;
          let message: string | undefined;
          let payload: any;
          let status: number | undefined;
          
          if (result?.header) {
            // 格式1：标准格式
            code = result.header.code;
            message = result.header.message;
            payload = result.payload;
            status = result.header.status;
          } else {
            // 格式2：简化格式（根据控制台日志，实际返回的是这种格式）
            code = result.code;
            message = result.message;
            payload = result.data || result.payload;
            status = result.status || payload?.status;
          }
          
          console.log("[XF-ASR] Parsed:", { code, message, hasPayload: !!payload, status });

          // 检查错误码（code !== 0 表示有错误）
          if (code !== undefined && code !== 0) {
            const errorMsg = message || "ASR识别错误";
            console.error("[XF-ASR] Error:", errorMsg, result);
            this.options.onError?.(errorMsg);
            return;
          }

          // 如果是成功响应但没有payload，可能是握手响应或心跳，忽略
          if (!payload) {
            console.log("[XF-ASR] No payload, ignoring (might be handshake/heartbeat)");
            return;
          }
          
          console.log("[XF-ASR] Payload:", payload); // 添加调试日志
          
          if (payload) {
            // 科大讯飞ASR可能返回的数据结构：
            // 1. payload.result.ws - 标准格式
            // 2. payload.data.result - 备选格式  
            // 3. payload.result - 简化格式
            
            let resultObj = payload.result;
            
            // 如果payload.result不存在，尝试其他路径
            if (!resultObj || Object.keys(resultObj).length === 0) {
              // 尝试 payload.data.result
              resultObj = payload.data?.result;
            }
            
            // 如果还是没有，尝试直接使用payload.data
            if ((!resultObj || Object.keys(resultObj).length === 0) && payload.data && typeof payload.data === 'object') {
              resultObj = payload.data;
            }

            console.log("[XF-ASR] Result object:", resultObj, "keys:", resultObj ? Object.keys(resultObj) : []);

            if (resultObj && Object.keys(resultObj).length > 0) {
              // 提取识别文本
              const ws = resultObj.ws;
              let resultText = "";
              
              if (Array.isArray(ws) && ws.length > 0) {
                resultText = ws
                  .map((item: any) => {
                    if (item.cw && Array.isArray(item.cw)) {
                      return item.cw.map((word: any) => word.w || "").join("");
                    }
                    // 如果cw不存在，尝试直接使用item.text或item.w
                    if (item.text) return item.text;
                    if (item.w) return item.w;
                    return "";
                  })
                  .filter((text: string) => text.length > 0)
                  .join("");
              } else if (resultObj.text) {
                // 如果直接有text字段
                resultText = resultObj.text;
              } else if (typeof resultObj === 'string') {
                // 如果result直接是字符串
                resultText = resultObj;
              }

              console.log("[XF-ASR] Extracted text:", resultText, "Current text:", this.currentText); // 添加调试日志

              // 判断是否为最终结果
              const pgs = resultObj.pgs; // "rpl"=替换, "apd"=追加, 其他=中间结果
              const status = result?.header?.status;
              const isFinal = pgs === "rpl" || pgs === "apd" || status === 2;

              if (resultText) {
                if (isFinal) {
                  // 最终结果：替换或追加到当前文本
                  if (pgs === "rpl") {
                    // 替换模式：替换之前的结果
                    this.currentText = resultText;
                  } else if (pgs === "apd") {
                    // 追加模式：追加到当前文本
                    this.currentText += resultText;
                  } else {
                    // 其他情况：直接使用（如果之前没有累积文本，则替换；否则追加）
                    if (!this.currentText) {
                      this.currentText = resultText;
                    } else {
                      this.currentText += resultText;
                    }
                  }
                  console.log("[XF-ASR] Final result:", this.currentText);
                  this.options.onResult?.(this.currentText, true);
                } else {
                  // 临时结果：显示当前累积文本 + 临时文本
                  const tempText = this.currentText + resultText;
                  console.log("[XF-ASR] Interim result:", tempText);
                  this.options.onResult?.(tempText, false);
                }
              } else if (status === 2 && this.currentText) {
                // 没有新文本但状态为结束，使用已累积的文本
                console.log("[XF-ASR] End with accumulated text:", this.currentText);
                this.options.onResult?.(this.currentText, true);
              }
            } else {
                // resultObj存在但result为空对象，可能是：
                // 1. 还在处理中（中间状态）- status可能是0或1
                // 2. 没有检测到语音
                // 3. 识别结果还未返回
                if (status === 0 || status === 1) {
                  console.log("[XF-ASR] Result object exists but is empty, waiting for recognition result... (status:", status, ")");
                } else if (status === 2) {
                  // 如果status是2（结束），但结果为空，检查是否有累积的文本
                  console.log("[XF-ASR] Recognition ended with empty result, checking accumulated text:", this.currentText);
                  if (this.currentText && this.currentText.trim().length > 0) {
                    // 如果有累积的文本，使用它
                    console.log("[XF-ASR] Using accumulated text:", this.currentText);
                    this.options.onResult?.(this.currentText, true);
                  } else {
                    console.log("[XF-ASR] No text accumulated, might be silence or no speech detected");
                  }
                  this.options.onEnd?.();
                  this.stop();
                }
              }
          } else {
            // 如果没有result字段，记录原始数据以便调试
            console.warn("[XF-ASR] No result field found in payload:", payload);
            
            // 检查是否是结束状态
            if (status === 2) {
              console.log("[XF-ASR] Recognition ended without result field");
              if (this.currentText && this.currentText.trim().length > 0) {
                // 如果有累积的文本，使用它
                console.log("[XF-ASR] Using accumulated text from no result field:", this.currentText);
                this.options.onResult?.(this.currentText, true);
              }
              this.options.onEnd?.();
              this.stop();
            }
          }

          // 统一检查是否结束（status=2表示识别结束）
          // 注意：上面已经在各个分支中处理了status=2的情况，这里只作为兜底
          if (status === 2 && this.isActive) {
            console.log("[XF-ASR] Recognition ended (final check), status:", status, "current text:", this.currentText || "(empty)");
            // 如果有累积的文本，确保最后一次触发回调
            if (this.currentText && this.currentText.trim().length > 0) {
              console.log("[XF-ASR] Triggering final onResult with text:", this.currentText);
              this.options.onResult?.(this.currentText, true);
            } else {
              // 没有识别结果，可能是静音或没有检测到语音
              console.log("[XF-ASR] No recognition result, might be silence or no speech detected");
            }
            this.options.onEnd?.();
            this.stop();
          }
        } catch (error) {
          console.error("[XF-ASR] Parse error:", error, "Raw data:", event.data);
          this.options.onError?.(error as Error);
        }
      };

      this.ws.onerror = (error) => {
        console.error("[XF-ASR] WebSocket error:", error);
        this.options.onError?.("WebSocket连接错误");
        this.stop();
      };

      this.ws.onclose = () => {
        console.log("[XF-ASR] WebSocket closed");
        this.stop();
      };
    } catch (error) {
      console.error("[XF-ASR] Start error:", error);
      this.options.onError?.(error as Error);
      this.stop();
      throw error;
    }
  }

  // 停止识别
  stop(): void {
    if (!this.isActive) return;

    this.isActive = false;

    // 发送最后一帧
    if (this.ws && this.ws.readyState === WebSocket.OPEN) {
      const params = buildAsrParams(this.options.appId);
      params.data.status = 2; // 最后一帧
      params.data.audio = "";
      try {
        this.ws.send(JSON.stringify(params));
      } catch (e) {
        console.error("[XF-ASR] Error sending final frame:", e);
      }
    }

    // 清理资源
    if (this.processor) {
      this.processor.disconnect();
      this.processor = null;
    }

    if (this.audioContext) {
      this.audioContext.close().catch(console.error);
      this.audioContext = null;
    }

    if (this.mediaStream) {
      this.mediaStream.getTracks().forEach((track) => track.stop());
      this.mediaStream = null;
    }

    if (this.ws) {
      try {
        this.ws.close();
      } catch (e) {
        // ignore
      }
      this.ws = null;
    }

    this.options.onEnd?.();
  }

  // 将Float32Array转换为16bit PCM
  private floatTo16BitPCM(float32Array: Float32Array): Int16Array {
    const int16Array = new Int16Array(float32Array.length);
    for (let i = 0; i < float32Array.length; i++) {
      const s = Math.max(-1, Math.min(1, float32Array[i])); // 限制在-1到1之间
      int16Array[i] = s < 0 ? s * 0x8000 : s * 0x7fff; // 转换为16bit整数
    }
    return int16Array;
  }

  // 检查是否正在识别
  get isListening(): boolean {
    return this.isActive;
  }
}

