import React, { useState, useRef, useCallback, useEffect } from 'react';
import { useTranslation } from 'react-i18next';

interface SRControlProps {
  onResult: (text: string) => void;
  onStatusChange: (status: 'idle' | 'listening' | 'processing' | 'error') => void;
  inline?: boolean;
  theme?: 'dark' | 'red';
}

const SRControl: React.FC<SRControlProps> = ({ onResult, onStatusChange, inline, theme = 'dark' }) => {
  const { t } = useTranslation();
  const DEBUG = false;
  const [isListening, setIsListening] = useState(false);
  const [recognizedText, setRecognizedText] = useState('');
  const [error, setError] = useState<string | null>(null);
  
  const wsRef = useRef<WebSocket | null>(null);
  const mediaStreamRef = useRef<MediaStream | null>(null);
  const audioContextRef = useRef<AudioContext | null>(null);
  const processorRef = useRef<ScriptProcessorNode | null>(null);
  const isConnectedRef = useRef(false);
  const heartbeatTimerRef = useRef<number | null>(null);
  const lastSendTsRef = useRef<number>(0);
  const pendingSamplesRef = useRef<Float32Array | null>(null);
  const CHUNK_MS = 160; // 推荐 160ms/帧
  const TARGET_RATE = 16000;
  const SAMPLES_PER_CHUNK = Math.floor(TARGET_RATE * (CHUNK_MS / 1000)); // 2560 samples

  // 百度语音识别配置
  const config = {
    appId: 119914870,
    apiKey: 'bce-v3/ALTAK-MAS47IcC3jJbtwLNtaUuK/530fefebe5e2132d6b7abaa18fe7e695c1a38c31',
    devPid: 15372, // 中文普通话加强标点
    cuid: 'speech-recognition-' + Date.now(),
    format: 'pcm',
    sample: 16000
  };

  // 生成UUID
  const generateUUID = () => {
    return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
      const r = Math.random() * 16 | 0;
      const v = c === 'x' ? r : (r & 0x3 | 0x8);
      return v.toString(16);
    });
  };

  // 初始化音频上下文
  const initAudioContext = useCallback(async () => {
    try {
      const stream = await navigator.mediaDevices.getUserMedia({ 
        audio: {
          sampleRate: 16000,
          channelCount: 1,
          echoCancellation: true,
          noiseSuppression: true
        } 
      });
      
      mediaStreamRef.current = stream;
      audioContextRef.current = new (window.AudioContext || (window as any).webkitAudioContext)({
        sampleRate: 16000
      });
      
      const source = audioContextRef.current.createMediaStreamSource(stream);
      const processor = audioContextRef.current.createScriptProcessor(2048, 1, 1);
      
      processor.onaudioprocess = (event) => {
        if (isConnectedRef.current && wsRef.current?.readyState === WebSocket.OPEN) {
          const inputData = event.inputBuffer.getChannelData(0);
          // 某些浏览器实际采样率可能是 48000，这里做一次简易下采样到 16000
          const sourceRate = audioContextRef.current!.sampleRate;
          const downsampled = sourceRate === TARGET_RATE ? inputData : downsampleBuffer(inputData, sourceRate, TARGET_RATE);

          // 追加到待发送缓冲区，按 160ms 切片发送
          const pending = pendingSamplesRef.current;
          if (!pending) {
            pendingSamplesRef.current = downsampled.slice();
          } else {
            const merged = new Float32Array(pending.length + downsampled.length);
            merged.set(pending, 0);
            merged.set(downsampled, pending.length);
            pendingSamplesRef.current = merged;
          }

          // 循环切片发送
          while (pendingSamplesRef.current && pendingSamplesRef.current.length >= SAMPLES_PER_CHUNK) {
            const chunk = pendingSamplesRef.current.slice(0, SAMPLES_PER_CHUNK);
            pendingSamplesRef.current = pendingSamplesRef.current.slice(SAMPLES_PER_CHUNK);
            const pcmData = convertFloat32ToPCM16(chunk);
            wsRef.current.send(pcmData);
            lastSendTsRef.current = Date.now();
          }
        }
      };
      
      source.connect(processor);
      processor.connect(audioContextRef.current.destination);
      processorRef.current = processor;
      
      return true;
    } catch (err) {
      if (DEBUG) console.error('音频初始化失败:', err);
      setError('无法访问麦克风，请检查权限设置');
      return false;
    }
  }, []);

  // 转换Float32到PCM16
  const convertFloat32ToPCM16 = (float32Array: Float32Array): ArrayBuffer => {
    const buffer = new ArrayBuffer(float32Array.length * 2);
    const view = new DataView(buffer);
    let offset = 0;
    for (let i = 0; i < float32Array.length; i++, offset += 2) {
      const s = Math.max(-1, Math.min(1, float32Array[i]));
      view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
    }
    return buffer;
  };

  // 简易下采样（线性插值），将任意采样率降到 targetRate
  const downsampleBuffer = (buffer: Float32Array, sampleRate: number, targetRate: number): Float32Array => {
    if (targetRate === sampleRate) return buffer;
    const ratio = sampleRate / targetRate;
    const newLength = Math.floor(buffer.length / ratio);
    const result = new Float32Array(newLength);
    for (let i = 0; i < newLength; i++) {
      const idx = i * ratio;
      const idxFloor = Math.floor(idx);
      const idxFrac = idx - idxFloor;
      const v1 = buffer[idxFloor] || 0;
      const v2 = buffer[idxFloor + 1] || v1;
      result[i] = v1 + (v2 - v1) * idxFrac;
    }
    return result;
  };

  // 建立WebSocket连接
  const connectWebSocket = useCallback(() => {
    return new Promise<boolean>((resolve) => {
      const sn = generateUUID();
      const wsUrl = `wss://vop.baidu.com/realtime_asr?sn=${sn}`;
      
      const ws = new WebSocket(wsUrl);
      wsRef.current = ws;

      ws.onopen = () => {
        if (DEBUG) console.log('WebSocket连接已建立');
        isConnectedRef.current = true;
        
        // 发送开始参数帧
        const startFrame = {
          type: "START",
          data: {
            appid: config.appId,
            appkey: config.apiKey,
            dev_pid: config.devPid,
            cuid: config.cuid,
            format: config.format,
            sample: config.sample
          }
        };
        
        ws.send(JSON.stringify(startFrame));
        lastSendTsRef.current = Date.now();
        // 心跳：在静音或网络抖动时，避免 5s 超时断开
        if (heartbeatTimerRef.current) window.clearInterval(heartbeatTimerRef.current);
        heartbeatTimerRef.current = window.setInterval(() => {
          if (!isConnectedRef.current || wsRef.current?.readyState !== WebSocket.OPEN) return;
          if (Date.now() - lastSendTsRef.current > 1800) {
            try { wsRef.current?.send(JSON.stringify({ type: 'HEARTBEAT' })); } catch {}
          }
        }, 1500);
        resolve(true);
      };

      ws.onmessage = (event) => {
        try {
          const data = JSON.parse(event.data);
          if (DEBUG) console.log('收到识别结果:', data);
          
          if (data.type === 'MID_TEXT') {
            // 仅展示临时结果，不触发对话流程
            if (data.err_no === 0 && data.result) setRecognizedText(data.result);
          } else if (data.type === 'FIN_TEXT') {
            // 只在最终结果时触发外部回调
            if (data.err_no === 0 && data.result) {
              setRecognizedText(data.result);
              onResult(data.result);
            } else if (data.err_no !== 0) {
              if (DEBUG) console.error('识别错误:', data.err_msg);
              setError(`识别错误: ${data.err_msg}`);
            }
          }
        } catch (err) {
          if (DEBUG) console.error('解析识别结果失败:', err);
        }
      };

      ws.onerror = (error) => {
        if (DEBUG) console.error('WebSocket错误:', error);
        setError('语音识别服务连接失败');
        resolve(false);
      };

      ws.onclose = () => {
        if (DEBUG) console.log('WebSocket连接已关闭');
        isConnectedRef.current = false;
        if (heartbeatTimerRef.current) {
          window.clearInterval(heartbeatTimerRef.current);
          heartbeatTimerRef.current = null;
        }
      };
    });
  }, [onResult]);

  // 停止录音
  const stopListening = useCallback(() => {
    try {
      setIsListening(false);
      onStatusChange('processing');
      
      // 发送结束帧
      if (wsRef.current?.readyState === WebSocket.OPEN) {
        // 先把残留样本补齐一帧发送
        if (pendingSamplesRef.current && pendingSamplesRef.current.length > 0) {
          const pcmData = convertFloat32ToPCM16(pendingSamplesRef.current);
          try { wsRef.current.send(pcmData); } catch {}
          pendingSamplesRef.current = null;
        }
        setTimeout(() => {
          try {
            const finishFrame = { type: "FINISH" };
            wsRef.current?.send(JSON.stringify(finishFrame));
          } catch {}
        }, 180);
      }
      
      if (wsRef.current) {
        wsRef.current.close();
        wsRef.current = null;
      }
      
      if (mediaStreamRef.current) {
        mediaStreamRef.current.getTracks().forEach(track => track.stop());
        mediaStreamRef.current = null;
      }
      
      if (audioContextRef.current) {
        audioContextRef.current.close();
        audioContextRef.current = null;
      }
      
      if (processorRef.current) {
        processorRef.current.disconnect();
        processorRef.current = null;
      }
      
      isConnectedRef.current = false;
      if (heartbeatTimerRef.current) {
        window.clearInterval(heartbeatTimerRef.current);
        heartbeatTimerRef.current = null;
      }
      onStatusChange('idle');
    } catch (err) {
      if (DEBUG) console.error('停止录音失败:', err);
      setError('停止录音失败');
      onStatusChange('error');
    }
  }, [onStatusChange]);

  // 开始录音（放在 stopListening 之后，避免初始化阶段的 TDZ 报错）
  const startListening = useCallback(async () => {
    try {
      setError(null);
      setRecognizedText('');
      onStatusChange('listening');
      pendingSamplesRef.current = null;
      lastSendTsRef.current = 0;
      
      const audioInitialized = await initAudioContext();
      if (!audioInitialized) {
        onStatusChange('error');
        return;
      }

      const connected = await connectWebSocket();
      if (!connected) {
        onStatusChange('error');
        return;
      }

      setIsListening(true);
    } catch (err) {
      if (DEBUG) console.error('开始录音失败:', err);
      setError('开始录音失败');
      onStatusChange('error');
    }
  }, [initAudioContext, connectWebSocket, onStatusChange]);

  // 清理资源
  useEffect(() => {
    return () => {
      if (isListening) {
        stopListening();
      }
    };
  }, [isListening, stopListening]);

  const containerStyle: React.CSSProperties = inline ? {
    position: 'static', display: 'flex', gap: '8px', alignItems: 'center'
  } : {
    position: 'fixed', bottom: '20px', right: '20px', zIndex: 2147483647,
    display: 'flex', flexDirection: 'column', gap: '10px', alignItems: 'center',
    pointerEvents: 'auto', background: 'rgba(0,0,0,0.35)', padding: '8px', borderRadius: 10, border: '2px solid #00ffa0'
  };

  return (
    <div style={containerStyle}>
      {/* 控制按钮（放最前，方便你摆放在下拉框旁边） */}
      <button
        onClick={isListening ? stopListening : startListening}
        style={{
          width: inline ? 74 : 60,
          height: inline ? 36 : 60,
          borderRadius: inline ? 6 : '50%',
          // border: inline ? (theme === 'red' ? '1px solid #333' : '1px solid #666') : 'none',
          background: isListening ? '#ff4444' : '#4CAF50',
          color: inline && theme === 'red' ? '#333' : 'white',
          fontSize: inline ? 14 : 16,
          cursor: 'pointer',
          display: 'flex',
          alignItems: 'center',
          justifyContent: 'center',
          boxShadow: inline ? 'none' : '0 4px 8px rgba(0,0,0,0.3)',
          transition: 'all 0.3s ease'
        }}
      >
        {isListening ? `⏹️ ${t('voice.stop')}` : `🎤 ${t('voice.start')}`}
      </button>

      {/* 识别结果显示（仅浮层模式显示） */}
      {!inline && recognizedText && (
        <div style={{
          background: 'rgba(0,0,0,0.8)',
          color: 'white',
          padding: '10px 15px',
          borderRadius: '8px',
          maxWidth: '300px',
          wordWrap: 'break-word'
        }}>
          {recognizedText}
        </div>
      )}
      
      {/* 错误提示（仅浮层模式显示） */}
      {!inline && error && (
        <div style={{
          background: 'rgba(255,0,0,0.8)',
          color: 'white',
          padding: '10px 15px',
          borderRadius: '8px',
          maxWidth: '300px'
        }}>
          {error}
        </div>
      )}
    </div>
  );
};

export default SRControl;
