import 'react-native-get-random-values';
import { Audio } from 'expo-av';
import * as Crypto from 'expo-crypto';
import * as FileSystem from 'expo-file-system';
import { encode as base64Encode, decode as base64Decode } from 'base-64';
import ReconnectingWebSocket from 'react-native-reconnecting-websocket';

const WS_URL = 'wss://openspeech.bytedance.com/api/v3/sauc/bigmodel';

// 协议常量
const PROTOCOL_VERSION = 0x01;      // 0b0001 - version 1
const HEADER_SIZE = 0x01;           // 0b0001 - 4 bytes (1 x 4)

// 消息类型
const FULL_CLIENT_REQUEST = 0x01;   // 0b0001 - 包含请求参数
const AUDIO_ONLY_REQUEST = 0x02;    // 0b0010 - 包含音频数据

// 消息标志
const NO_FLAGS = 0x00;              // 0b0000 - 普通包
const SEQUENCE_FLAG = 0x01;         // 0b0001 - 包含序列号
const LAST_AUDIO_FLAGS = 0x02;      // 0b0010 - 最后一包音频
const LAST_WITH_SEQUENCE = 0x03;    // 0b0011 - 最后一包且包含序列号

// 序列化方法
const NO_SERIALIZATION = 0x00;      // 0b0000 - 无序列化
const JSON_SERIALIZATION = 0x01;    // 0b0001 - JSON格式

// 压缩方法
const NO_COMPRESSION = 0x00;        // 0b0000 - 无压缩

// 类型定义
export type RecognitionStatus = 
  | 'idle'
  | 'connecting'
  | 'recording'
  | 'processing'
  | 'error';

export interface RecognitionHistoryItem {
  id: number;
  text: string;
  timestamp: number;
  duration?: number;
  confidence?: number;
}

export interface RecognitionResult {
  result?: {
    text: string;
    confidence?: number;
    utterances?: {
      text: string;
      start_time: number;
      end_time: number;
      definite: boolean;
      words?: any[];
    }[];
  };
  audio_info?: {
    duration: number;
  };
  headerDetails?: {
    flags: number;
    messageType: number;
    version: number;
    headerSize: number;
    compression: number;
    serialization: number;
    reserved: number;
  };
}

interface WSRequestConfig {
  user: {
    uid: string;
    did?: string;
    platform?: string;
    sdk_version?: string;
    app_version?: string;
  };
  audio: {
    format: string;
    rate: number;
    bits: number;
    channel: number;
    language: string;
    codec?: string;
  };
  request: {
    model_name: string;
    enable_itn: boolean;
    enable_punc: boolean;
    enable_ddc: boolean;
    corpus?: {
      boosting_table_id?: string;
      context?: string;
    };
  };
}

// 辅助函数
function generateHeader(
  messageType: number,
  flags: number,
  serialization: number,
  compression: number
): Uint8Array {
  const header = new Uint8Array(4);
  
  // Byte 0: Protocol version (4 bits) | Header size (4 bits)
  header[0] = (PROTOCOL_VERSION << 4) | HEADER_SIZE;
  
  // Byte 1: Message type (4 bits) | Message flags (4 bits)
  header[1] = (messageType << 4) | flags;
  
  // Byte 2: Serialization (4 bits) | Compression (4 bits)
  header[2] = (serialization << 4) | compression;
  
  // Byte 3: Reserved
  header[3] = 0x00;
  
  return header;
}


// 音频配置常量
const RECORDING_OPTIONS: Audio.RecordingOptions = {
  android: {
    extension: '.pcm',
    sampleRate: 16000,
    numberOfChannels: 1,
    bitRate: 16000 * 16,
    outputFormat: 2, // PCM_16BIT
    audioEncoder: 2, // PCM_16BIT
  },
  ios: {
    extension: '.pcm',
    sampleRate: 16000,
    numberOfChannels: 1,
    bitRate: 16000 * 16,
    linearPCMBitDepth: 16,
    linearPCMIsBigEndian: false,
    linearPCMIsFloat: false,
    audioQuality: 127, // HIGH
  },
  web: {
    mimeType: 'audio/webm',
    bitsPerSecond: 16000 * 16,
  },
};

// 错误码映射
const ERROR_CODES = {
  10000: '系统内部错误',
  10001: '参数错误',
  10002: '鉴权失败',
  10003: '配额超限',
  10004: '并发超限',
  10005: '音频格式错误',
  10006: '音频过长',
  10007: '音频为空',
  10008: '音频采样率错误',
  10009: '音频比特率错误',
  10010: '音频声道数错误',
  10011: '音频编码格式错误',
} as const;

// 类实现
export class SpeechRecognitionManager {
  private ws: ReconnectingWebSocket | null = null;
  private recording: Audio.Recording | null = null;
  private sequence: number = 1;
  private isConnected = false;
  private retryCount = 0;
  private maxRetries = 3;
  private cleanupInterval: NodeJS.Timeout | null = null;
  private lastProcessedSize = 0;  // 记录上次处理的文件大小
  
  constructor(
    private appKey: string,
    private accessKey: string,
    private onResult: (result: RecognitionResult) => void,
    private onError: (error: Error) => void
  ) {}

  private getWSConfig(): WSRequestConfig {
    return {
      user: {
        uid: Crypto.randomUUID(),
        platform: 'iOS',
        sdk_version: '1.0.0',
      },
      audio: {
        format: 'pcm',
        rate: 16000,
        bits: 16,
        channel: 1,
        language: 'zh-CN',
        codec: 'pcm'
      },
      request: {
        model_name: 'bigmodel',
        enable_itn: true,  // 启用数字转换
        enable_punc: true, // 启用标点符号
        enable_ddc: true,  // 启用领域分类
        corpus: {
          context: ''  // 可选的上下文信息
        }
      }
    };
  }

  private async setupRecording() {
    console.log('[Audio] Requesting permissions...');
    await Audio.requestPermissionsAsync();
    
    console.log('[Audio] Setting audio mode...');
    await Audio.setAudioModeAsync({
      allowsRecordingIOS: true,
      playsInSilentModeIOS: true,
    });

    console.log('[Audio] Creating recording instance...');
    const recording = new Audio.Recording();
    
    console.log('[Audio] Preparing to record with options:', RECORDING_OPTIONS);
    await recording.prepareToRecordAsync(RECORDING_OPTIONS);
    
    this.recording = recording;
    console.log('[Audio] Recording setup complete');
  }

  private createMessage(header: Uint8Array, payload: Uint8Array): Uint8Array {
    // 创建完整消息：header(4字节) + payloadSize(4字节) + payload
    const buffer = new ArrayBuffer(4 + 4 + payload.length);
    const message = new Uint8Array(buffer);
    
    // 1. 写入header (4字节)
    message.set(header, 0);
    
    // 2. 写入payload size (4字节，大端序)
    const payloadSizeView = new DataView(buffer, 4, 4);
    payloadSizeView.setUint32(0, payload.length, false); // false = big endian
    
    // 3. 写入payload
    message.set(payload, 8);
    
    // 调试日志
    console.log('[WebSocket] Message construction:', {
      header: Array.from(header),
      headerHex: Array.from(header).map(b => '0x' + b.toString(16).padStart(2, '0')),
      headerDetails: {
        version: (header[0] >> 4) & 0x0F,        // 取高4位
        headerSize: header[0] & 0x0F,            // 取低4位
        messageType: (header[1] >> 4) & 0x0F,    // 取高4位
        flags: header[1] & 0x0F,                 // 取低4位
        serialization: (header[2] >> 4) & 0x0F,  // 取高4位
        compression: header[2] & 0x0F,           // 取低4位
        reserved: header[3]
      },
      payloadSize: payload.length,
      payloadSizeBytes: Array.from(new Uint8Array(buffer, 4, 4)),
      totalLength: message.length,
      payloadPreview: payload.length > 32 ? 
        new TextDecoder().decode(payload.slice(0, 32)) + '...' : 
        new TextDecoder().decode(payload)
    });
    
    return message;
  }

  private parseMessage(data: ArrayBuffer): any {
    const view = new DataView(data);
    
    // 1. 解析header (4字节)
    const header = new Uint8Array(data, 0, 4);
    const headerDetails = {
      version: (header[0] >> 4) & 0x0F,
      headerSize: header[0] & 0x0F,
      messageType: (header[1] >> 4) & 0x0F,
      flags: header[1] & 0x0F,
      serialization: (header[2] >> 4) & 0x0F,
      compression: header[2] & 0x0F,
      reserved: header[3]
    };
    
    let offset = 4; // 从header后开始
    
    // 2. 根据消息类型处理
    if (headerDetails.messageType === 0x0F) { // Error message
      const errorCode = view.getUint32(offset, false);
      offset += 4;
      
      const errorSize = view.getUint32(offset, false);
      offset += 4;
      
      const errorMessage = new TextDecoder().decode(
        new Uint8Array(data, offset, errorSize)
      );
      
      console.log('[WebSocket] Error message:', {
        headerDetails,
        errorCode,
        errorMessage
      });
      
      const knownError = ERROR_CODES[errorCode as keyof typeof ERROR_CODES];
      throw new Error(`服务器错误 (${errorCode}): ${knownError || errorMessage}`);
    } else if (headerDetails.messageType === 0x09) { // Full server response
      // 如果有序列号标志，解析序列号
      if (headerDetails.flags & 0x01) {
        const sequence = view.getUint32(offset, false);
        offset += 4;
      }
      
      // 解析payload size
      const payloadSize = view.getUint32(offset, false);
      offset += 4;
      
      // 解析payload
      const payload = new Uint8Array(data, offset, payloadSize);
      
      // 根据序列化方式处理payload
      let parsedPayload;
      if (headerDetails.serialization === JSON_SERIALIZATION) {
        parsedPayload = JSON.parse(new TextDecoder().decode(payload));
      } else {
        parsedPayload = payload;
      }
      
      console.log('[WebSocket] Full server response:', {
        headerDetails,
        payloadSize,
        parsedPayload
      });
      
      return parsedPayload;
    } else {
      console.error('[WebSocket] Unknown message type:', headerDetails.messageType);
      throw new Error(`Unknown message type: ${headerDetails.messageType}`);
    }
  }

  private async setupWebSocket() {
    if (this.ws) {
      this.ws.close();
      this.ws = null;
    }

    this.isConnected = false;
    const connectId = Crypto.randomUUID();

    try {
      this.ws = new ReconnectingWebSocket(WS_URL, undefined, {
        headers: {
          'X-Api-App-Key': this.appKey,
          'X-Api-Access-Key': this.accessKey,
          'X-Api-Resource-Id': 'volc.bigasr.sauc.duration',
          'X-Api-Connect-Id': connectId,
          'Content-Type': 'application/json'
        },
        debug: true,
        reconnectInterval: 3000,
        maxReconnectAttempts: this.maxRetries,
      });

      this.ws.addEventListener('open', () => {
        console.log('[WebSocket] Connection opened');
        this.isConnected = true;
        
        // 发送初始配置
        const config = this.getWSConfig();
        console.log('[WebSocket] Sending config:', config);
        
        // 生成full client request的header
        const header = generateHeader(
          FULL_CLIENT_REQUEST,    // 0b0001 - full client request
          NO_FLAGS,              // 0b0000 - 普通包
          JSON_SERIALIZATION,    // 0b0001 - JSON格式
          NO_COMPRESSION        // 0b0000 - 无压缩
        );
        
        // 将配置转换为UTF-8编码的字节数组
        const payload = new TextEncoder().encode(JSON.stringify(config));
        
        // 构建并发送消息
        const message = this.createMessage(header, payload);
        this.ws?.send(message);
      });



      // 处理服务器响应
      this.ws.addEventListener('message', (event) => {
        try {
          if (event.data instanceof ArrayBuffer) {
            const response = this.parseMessage(event.data);
            if (response.result?.text) {
              console.log('[WebSocket] Recognition result:', response.result.text);
              if (response.result.utterances) {
                console.log('[WebSocket] Utterances:', response.result.utterances);
              }
            }
            this.onResult(response);
          } else {
            console.error('[WebSocket] Unexpected message format:', event.data);
          }
        } catch (error) {
          if (error instanceof Error && error.message.startsWith('Server error')) {
            console.error('[WebSocket]', error.message);
          } else {
            console.error('[WebSocket] Failed to parse response:', error, 'Raw data:', event.data);
          }
          this.onError(error instanceof Error ? error : new Error('Failed to parse response'));
        }
      });

      this.ws.addEventListener('error', (event) => {
        this.isConnected = false;
        console.error('[WebSocket] Error:', event);
        
        // 检查是否需要重试
        if (this.retryCount < this.maxRetries) {
          this.retryCount++;
          console.log(`[WebSocket] Retrying connection (${this.retryCount}/${this.maxRetries})...`);
          this.setupWebSocket();
        } else {
          const errorMessage = event instanceof Error ? event.message : 'Unknown error';
          this.onError(new Error(`WebSocket error after ${this.maxRetries} retries: ${errorMessage}`));
        }
      });

      this.ws.addEventListener('close', () => {
        console.log('[WebSocket] Connection closed');
        this.isConnected = false;
      });

    } catch (error) {
      console.error('[WebSocket] Failed to setup WebSocket:', error);
      throw error;
    }
  }


  async startRecording() {
    try {
      console.log('[Audio] Starting recording process...');
      this.lastProcessedSize = 0;  // 重置处理位置
      
      // 1. 先设置 WebSocket 并等待连接成功
      console.log('[Audio] Setting up WebSocket...');
      await this.setupWebSocket();
      
      // 2. 等待 WebSocket 连接成功
      if (!this.isConnected) {
        console.log('[Audio] Waiting for WebSocket connection...');
        await new Promise<void>((resolve, reject) => {
          const timeout = setTimeout(() => {
            reject(new Error('WebSocket connection timeout'));
          }, 5000);
          
          const checkConnection = () => {
            if (this.isConnected) {
              clearTimeout(timeout);
              resolve();
            } else if (this.ws) {
              setTimeout(checkConnection, 100);
            } else {
              clearTimeout(timeout);
              reject(new Error('WebSocket connection failed'));
            }
          };
          
          checkConnection();
        });
      }
      
      // 3. 设置录音
      await this.setupRecording();
      
      // 4. 开始录音
      console.log('[Audio] Starting actual recording...');
      await this.recording?.startAsync();
      
      // 5. 开始发送音频
      console.log('[Audio] Starting audio sending process...');
      this.startSendingAudio();
    } catch (error) {
      console.error('[Audio] Error in startRecording:', error);
      this.stopRecording();
      throw error;
    }
  }

  private startSendingAudio() {
    const CHUNK_DURATION = 100; // 每100ms发送一次
    const SAMPLE_RATE = 16000;  // 采样率
    const BYTES_PER_SAMPLE = 2; // 16位 = 2字节
    const CHUNK_SIZE = Math.floor(SAMPLE_RATE * CHUNK_DURATION / 1000) * BYTES_PER_SAMPLE; // 每个音频块的大小
    
    let isRunning = true;
    let offset = 0;  // 当前处理位置
    
    const sendAudioChunk = async () => {
      if (!this.recording || !this.ws || !this.isConnected) {
        console.log('[Audio] Cannot send chunk:', {
          hasRecording: !!this.recording,
          hasWebSocket: !!this.ws,
          isConnected: this.isConnected
        });
        return;
      }

      try {
        const uri = this.recording.getURI();
        if (!uri) {
          console.log('[Audio] No recording URI available');
          return;
        }

        // 读取固定大小的音频块
        const audioData = await FileSystem.readAsStringAsync(uri, {
          encoding: FileSystem.EncodingType.Base64,
          position: offset,
          length: CHUNK_SIZE
        });

        if (!audioData) {
          console.log('[Audio] No audio data available');
          return;
        }

        // 转换为PCM数据
        const bytes = base64Decode(audioData).split('').map(c => c.charCodeAt(0));
        const pcmData = new Int16Array(bytes.length / 2);
        const view = new DataView(new Uint8Array(bytes).buffer);
        
        for (let i = 0; i < pcmData.length; i++) {
          pcmData[i] = view.getInt16(i * 2, true); // true表示小端序
        }

        // 发送音频数据
        const header = generateHeader(
          AUDIO_ONLY_REQUEST,    // 0b0010 - audio only request
          NO_FLAGS,             // 0b0000 - 普通包
          NO_SERIALIZATION,     // 0b0000 - 无序列化
          NO_COMPRESSION        // 0b0000 - 无压缩
        );
        
        const payload = new Uint8Array(pcmData.buffer);
        const message = this.createMessage(header, payload);
        this.ws?.send(message);
        
        // 更新偏移量
        offset += CHUNK_SIZE;
        
        console.log('[Audio] Sent audio chunk:', {
          chunkSize: CHUNK_SIZE,
          offset,
          samplesCount: pcmData.length
        });
      } catch (error) {
        console.error('[Audio] Failed to send audio chunk:', error);
        isRunning = false;
      }
    };

    const scheduleNextChunk = () => {
      if (isRunning && this.isConnected && this.recording) {
        setTimeout(async () => {
          await sendAudioChunk();
          scheduleNextChunk();
        }, CHUNK_DURATION);
      } else {
        console.log('[Audio] Stopped sending chunks:', {
          isRunning,
          isConnected: this.isConnected,
          hasRecording: !!this.recording
        });
      }
    };

    console.log('[Audio] Starting chunk scheduler with config:', {
      chunkDuration: CHUNK_DURATION,
      sampleRate: SAMPLE_RATE,
      bytesPerSample: BYTES_PER_SAMPLE,
      chunkSize: CHUNK_SIZE
    });
    
    scheduleNextChunk();
  }

  
  private async sendEndMarker() {
    if (!this.ws || !this.isConnected) return;
    
    // 1. 发送结束标记
    const finalHeader = generateHeader(
      AUDIO_ONLY_REQUEST,
      LAST_AUDIO_FLAGS,
      NO_SERIALIZATION,
      NO_COMPRESSION
    );
    
    const finalMessage = this.createMessage(finalHeader, new Uint8Array(0));
    this.ws?.send(finalMessage);
    console.log('[Audio] Sent last audio chunk marker');
    
    // 2. 等待最终结果
    await new Promise<void>((resolve, reject) => {
      const timeout = setTimeout(() => {
        reject(new Error('Timeout waiting for final result'));
      }, 10000);
      
      const messageHandler = (event: MessageEvent) => {
        if (event.data instanceof ArrayBuffer) {
          const header = new Uint8Array(event.data, 0, 4);
          const flags = header[1] & 0x0F;
          console.log('[Audio] Checking message flags:', flags);
          
          if (flags === 3) {
            console.log('[Audio] Received final result');
            const message = this.parseMessage(event.data);
            if (message?.result?.text) {
              this.onResult(message.result.text);
            }
            clearTimeout(timeout);
            this.ws?.removeEventListener('message', messageHandler);
            resolve();
          }
        }
      };
      
      this.ws?.addEventListener('message', messageHandler);
    });
    
    // 3. 清理 WebSocket 连接
    if (this.ws) {
      this.ws.close();
      this.ws = null;
    }
    this.isConnected = false;
  }

  async stopRecording() {
    try {
      console.log('[Audio] Stopping recording...');
      if (this.recording) {
        // 1. 停止录音
        await this.recording.stopAndUnloadAsync();
        // 2. 清理录音实例
        this.recording = null;
      }
    } catch (error) {
      console.error('[Audio] Error stopping recording:', error);
      this.onError(error instanceof Error ? error : new Error('Failed to stop recording'));
    }
  }

  async stopSendingAudio() {
    try {
      // 1. 等待一小段时间确保数据发送完成
      await new Promise(resolve => setTimeout(resolve, 200));
      // 2. 发送结束标记并等待最终结果
      await this.sendEndMarker();
      console.log('[Audio] Cleanup complete');
    } catch (error) {
      console.error('[Audio] Error stopping audio sending:', error);
      this.onError(error instanceof Error ? error : new Error('Failed to stop sending audio'));
    }
  }
} 