/**
 * Copyright 2025 Beijing Volcano Engine Technology Co., Ltd. All Rights Reserved.
 * SPDX-license-identifier: BSD-3-Clause
 */

import openAPIs from '@/app/api';
import {finishPractice, getPracticeConfig, startPractice} from '@/app/fetch';
import aigcConfig from '@/config';
import { COMMAND, INTERRUPT_PRIORITY } from '@/utils/handler';
import Utils from '@/utils/utils';
import { Message } from '@arco-design/web-react';
import VERTC, {
  AudioProfileType,
  AutoPlayFailedEvent,
  DeviceInfo,
  IRTCEngine,
  LocalAudioPropertiesInfo,
  LocalStreamStats,
  MediaType,
  MirrorType,
  NetworkQuality,
  PlayerEvent,
  RemoteAudioPropertiesInfo,
  RemoteStreamStats,
  RoomProfileType,
  ScreenEncoderConfig,
  StreamIndex,
  StreamRemoveReason,
  VideoRenderMode,
  onUserJoinedEvent,
  onUserLeaveEvent,
} from '@volcengine/rtc';
import RTCAIAnsExtension from '@volcengine/rtc/extension-ainr';
import {  useSelector } from 'react-redux';
import Config from '@/config';
import { RootState } from '@/store';
import { useNavigate } from "react-router-dom"
import store from '@/store';
import { updateLastMsgEndMs } from '@/store/slices/room';
import type { Msg } from '@/store/slices/room';

// 定义事件监听器接口，用于处理各种RTC事件
export interface IEventListener {
  handleError: (e: { errorCode: any }) => void;  // RTC引擎错误处理
  handleUserJoin: (e: onUserJoinedEvent) => void;  // 用户加入房间
  handleUserLeave: (e: onUserLeaveEvent) => void;  // 用户离开房间
  handleTrackEnded: (e: { kind: string; isScreen: boolean }) => void;  // 媒体轨道结束
  handleUserPublishStream: (e: { userId: string; mediaType: MediaType }) => void;  // 用户发布流
  handleUserUnpublishStream: (e: {   // 用户取消发布流
    userId: string;
    mediaType: MediaType;
    reason: StreamRemoveReason;
  }) => void;
  handleRemoteStreamStats: (e: RemoteStreamStats) => void;  // 远端流统计
  handleLocalStreamStats: (e: LocalStreamStats) => void;  // 本地流统计
  handleLocalAudioPropertiesReport: (e: LocalAudioPropertiesInfo[]) => void;  // 本地音频属性报告
  handleRemoteAudioPropertiesReport: (e: RemoteAudioPropertiesInfo[]) => void;  // 远端音频属性报告
  handleAudioDeviceStateChanged: (e: DeviceInfo) => void;  // 音频设备状态改变
  handleAutoPlayFail: (e: AutoPlayFailedEvent) => void;  // 自动播放失败
  handlePlayerEvent: (e: PlayerEvent) => void;  // 播放器事件
  handleUserStartAudioCapture: (e: { userId: string }) => void;  // 用户开始音频采集
  handleUserStopAudioCapture: (e: { userId: string }) => void;  // 用户停止音频采集
  handleRoomBinaryMessageReceived: (e: { userId: string; message: ArrayBuffer }) => void;  // 收到二进制消息
  handleNetworkQuality: (  // 网络质量报告
    uplinkNetworkQuality: NetworkQuality,
    downlinkNetworkQuality: NetworkQuality
  ) => void;
}

// RTC引擎初始化配置项
interface EngineOptions {
  appId: string;  // 应用ID
  uid: string;    // 用户ID
  roomId: string; // 房间ID
  token: string;  // 身份验证token
}

// 练习相关数据体
export interface PracticeBody {
  taskId: string;    // 任务ID
  practiceId: number; // 练习ID
}

// 基础信息数据体
export interface BasicBody {
  room_id: string;     // 房间ID
  user_id: string;     // 用户ID
  login_token: string | null; // 登录token
}

/**
 * RTC核心客户端类
 * 封装了与火山引擎RTC SDK的交互逻辑
 */
export class RTCClient {
  engine!: IRTCEngine; // RTC引擎实例

  config!: EngineOptions; // 引擎配置项

  basicInfo!: BasicBody; // 基础信息

  practiceInfo!: PracticeBody; // 练习信息

  private _audioCaptureDevice?: string; // 当前音频采集设备ID

  private _videoCaptureDevice?: string; // 当前视频采集设备ID

  audioBotEnabled = false; // AI音频机器人是否启用

  audioBotStartTime = 0; // AI音频机器人启动时间戳
  chatMessage = []; // 聊天消息列表
  private store: any; // Redux存储实例

  constructor() {
    this.store = store; // 初始化时绑定Redux store
  }

  /**
   * 创建RTC引擎
   * @param props 引擎配置项
   */
  createEngine = async (props: EngineOptions) => {
    this.config = props;
    this.basicInfo = {
      room_id: props.roomId,
      user_id: props.uid,
      login_token: props.token,
    };
    this.practiceInfo = {
        practiceId: 0,
        taskId: ""
    }
    // 创建RTC引擎实例
    this.engine = VERTC.createEngine(this.config.appId);
    
    try {
      // 注册AI降噪扩展
      const AIAnsExtension = new RTCAIAnsExtension();
      await this.engine.registerExtension(AIAnsExtension);
      AIAnsExtension.enable(); // 启用AI降噪
    } catch (error) {
      // 捕获并忽略不支持AI降噪的环境异常
      console.warn(
        `当前环境不支持 AI 降噪, 此错误可忽略, 不影响实际使用, e: ${(error as any).message}`
      );
    }
  };

  /**
   * 添加事件监听器
   * @param listeners 事件监听器对象
   */
  addEventListeners = ({
    handleError,
    handleUserJoin,
    handleUserLeave,
    handleTrackEnded,
    handleUserPublishStream,
    handleUserUnpublishStream,
    handleRemoteStreamStats,
    handleLocalStreamStats,
    handleLocalAudioPropertiesReport,
    handleRemoteAudioPropertiesReport,
    handleAudioDeviceStateChanged,
    handleAutoPlayFail,
    handlePlayerEvent,
    handleUserStartAudioCapture,
    handleUserStopAudioCapture,
    handleRoomBinaryMessageReceived,
    handleNetworkQuality,
  }: IEventListener) => {
    // 绑定各种RTC事件到对应的处理函数
    this.engine.on(VERTC.events.onError, handleError);
    this.engine.on(VERTC.events.onUserJoined, handleUserJoin);
    this.engine.on(VERTC.events.onUserLeave, handleUserLeave);
    this.engine.on(VERTC.events.onTrackEnded, handleTrackEnded);
    this.engine.on(VERTC.events.onUserPublishStream, handleUserPublishStream);
    this.engine.on(VERTC.events.onUserUnpublishStream, handleUserUnpublishStream);
    this.engine.on(VERTC.events.onRemoteStreamStats, handleRemoteStreamStats);
    this.engine.on(VERTC.events.onLocalStreamStats, handleLocalStreamStats);
    this.engine.on(VERTC.events.onAudioDeviceStateChanged, handleAudioDeviceStateChanged);
    this.engine.on(VERTC.events.onLocalAudioPropertiesReport, handleLocalAudioPropertiesReport);
    this.engine.on(VERTC.events.onRemoteAudioPropertiesReport, handleRemoteAudioPropertiesReport);
    this.engine.on(VERTC.events.onAutoplayFailed, handleAutoPlayFail);
    this.engine.on(VERTC.events.onPlayerEvent, handlePlayerEvent);
    this.engine.on(VERTC.events.onUserStartAudioCapture, handleUserStartAudioCapture);
    this.engine.on(VERTC.events.onUserStopAudioCapture, handleUserStopAudioCapture);
    this.engine.on(VERTC.events.onRoomBinaryMessageReceived, handleRoomBinaryMessageReceived);
    this.engine.on(VERTC.events.onNetworkQuality, handleNetworkQuality);
  };

  /**
   * 加入RTC房间
   * @param token 身份验证token
   * @param username 用户名
   * @returns Promise
   */
  joinRoom = (token: string | null, username: string): Promise<void> => {
    // 启用音频属性报告（间隔1000ms）
    this.engine.enableAudioPropertiesReport({ interval: 1000 });
    // 加入房间
    return this.engine.joinRoom(
      token,
      `${this.config.roomId!}`,
      {
        userId: this.config.uid!,
        // 额外信息（JSON字符串）
        extraInfo: JSON.stringify({
          call_scene: 'RTC-AIGC',
          user_name: username,
          user_id: this.config.uid,
        }),
      },
      {
        isAutoPublish: true, // 自动发布流
        isAutoSubscribeAudio: true, // 自动订阅音频
        roomProfileType: RoomProfileType.chat, // 房间类型设为聊天
      }
    );
  };

  /**
   * 离开RTC房间
   * @param chatMessage 聊天消息
   * @returns Promise
   */
  leaveRoom = async(chatMessage: any) => {
    this.chatMessage = chatMessage;
    // 先停止AI机器人
    let result = await this.stopAudioBot();
    this.audioBotEnabled = false;
    // 离开房间
    this.engine.leaveRoom();
    // 销毁引擎
    VERTC.destroyEngine(this.engine);
    this._audioCaptureDevice = undefined; // 重置音频设备
    return result;
  };

  /**
   * 检查设备权限
   * @returns 包含视频和音频权限状态的对象
   */
  checkPermission(): Promise<{
    video: boolean;
    audio: boolean;
  }> {
    return VERTC.enableDevices({
      video: false,
      audio: true,
    });
  }

  /**
   * 获取设备列表
   * @param props 可选参数，指定是否获取视频/音频设备
   * @returns 设备列表
   */
  async getDevices(props?: { video?: boolean; audio?: boolean }): Promise<{
    audioInputs: MediaDeviceInfo[];
    audioOutputs: MediaDeviceInfo[];
    videoInputs: MediaDeviceInfo[];
  }> {
    const { video = false, audio = true } = props || {};
    let audioInputs: MediaDeviceInfo[] = [];
    let audioOutputs: MediaDeviceInfo[] = [];
    let videoInputs: MediaDeviceInfo[] = [];
    
    // 请求设备权限
    const { video: hasVideoPermission, audio: hasAudioPermission } = await VERTC.enableDevices({
      video,
      audio,
    });
    
    // 获取音频设备
    if (audio) {
      const inputs = await VERTC.enumerateAudioCaptureDevices();
      const outputs = await VERTC.enumerateAudioPlaybackDevices();
      audioInputs = inputs.filter((i) => i.deviceId && i.kind === 'audioinput');
      audioOutputs = outputs.filter((i) => i.deviceId && i.kind === 'audiooutput');
      // 设置默认音频设备
      this._audioCaptureDevice = audioInputs.filter((i) => i.deviceId)?.[0]?.deviceId;
      
      // 权限提示
      if (hasAudioPermission) {
        if (!audioInputs?.length) {
          Message.error('无麦克风设备, 请先确认设备情况。');
        }
        if (!audioOutputs?.length) {
          Message.error('无扬声器设备, 请先确认设备情况。');
        }
      } else {
        // Message.error('暂无麦克风设备权限, 请先确认设备权限授予情况。');
      }
    }
    
    // 获取视频设备
    if (video) {
      videoInputs = await VERTC.enumerateVideoCaptureDevices();
      videoInputs = videoInputs.filter((i) => i.deviceId && i.kind === 'videoinput');
      // 设置默认视频设备
      this._videoCaptureDevice = videoInputs?.[0]?.deviceId;
      
      // 权限提示
      if (hasVideoPermission) {
        if (!videoInputs?.length) {
          Message.error('无摄像头设备, 请先确认设备情况。');
        }
      } else {
        Message.error('暂无摄像头设备权限, 请先确认设备权限授予情况。');
      }
    }

    return {
      audioInputs,
      audioOutputs,
      videoInputs,
    };
  }

  // 以下是一系列RTC操作接口：
  
  startVideoCapture = async (camera?: string) => {
    // 启动视频采集（使用指定设备或默认设备）
    await this.engine.startVideoCapture(camera || this._videoCaptureDevice);
  };

  stopVideoCapture = async () => {
    // 设置本地视频镜像类型
    this.engine.setLocalVideoMirrorType(MirrorType.MIRROR_TYPE_RENDER);
    // 停止视频采集
    await this.engine.stopVideoCapture();
  };

  startScreenCapture = async (enableAudio = false) => {
    // 启动屏幕共享（可选是否采集音频）
    await this.engine.startScreenCapture({
      enableAudio,
    });
  };

  stopScreenCapture = async () => {
    // 停止屏幕共享
    await this.engine.stopScreenCapture();
  };

  startAudioCapture = async (mic?: string) => {
    // 启动音频采集（使用指定设备或默认设备）
    await this.engine.startAudioCapture(mic || this._audioCaptureDevice);
  };

  stopAudioCapture = async () => {
    // 停止音频采集
    await this.engine.stopAudioCapture();
  };

  publishStream = (mediaType: MediaType) => {
    // 发布媒体流
    this.engine.publishStream(mediaType);
  };

  unpublishStream = (mediaType: MediaType) => {
    // 取消发布媒体流
    this.engine.unpublishStream(mediaType);
  };

  publishScreenStream = async (mediaType: MediaType) => {
    // 发布屏幕共享流
    await this.engine.publishScreen(mediaType);
  };

  unpublishScreenStream = async (mediaType: MediaType) => {
    // 取消发布屏幕共享流
    await this.engine.unpublishScreen(mediaType);
  };

  setScreenEncoderConfig = async (description: ScreenEncoderConfig) => {
    // 设置屏幕共享编码配置
    await this.engine.setScreenEncoderConfig(description);
  };

  /**
   * 设置业务标识参数
   * @param businessId 业务ID
   */
  setBusinessId = (businessId: string) => {
    this.engine.setBusinessId(businessId);
  };

  /**
   * 设置音频采集音量
   * @param volume 音量值
   */
  setAudioVolume = (volume: number) => {
    this.engine.setCaptureVolume(StreamIndex.STREAM_INDEX_MAIN, volume);
    this.engine.setCaptureVolume(StreamIndex.STREAM_INDEX_SCREEN, volume);
  };

  /**
   * 设置音质档位
   * @param profile 音质配置
   */
  setAudioProfile = (profile: AudioProfileType) => {
    this.engine.setAudioProfile(profile);
  };

  /**
   * 切换音视频设备
   * @param deviceType 设备类型
   * @param deviceId 设备ID
   */
  switchDevice = (deviceType: MediaType, deviceId: string) => {
    if (deviceType === MediaType.AUDIO) {
      // 切换音频设备
      this._audioCaptureDevice = deviceId;
      this.engine.setAudioCaptureDevice(deviceId);
    }
    if (deviceType === MediaType.VIDEO) {
      // 切换视频设备
      this._videoCaptureDevice = deviceId;
      this.engine.setVideoCaptureDevice(deviceId);
    }
    if (deviceType === MediaType.AUDIO_AND_VIDEO) {
      // 同时切换音频和视频设备
      this._audioCaptureDevice = deviceId;
      this._videoCaptureDevice = deviceId;
      this.engine.setVideoCaptureDevice(deviceId);
      this.engine.setAudioCaptureDevice(deviceId);
    }
  };

  /**
   * 设置本地视频镜像类型
   * @param type 镜像类型
   * @returns 
   */
  setLocalVideoMirrorType = (type: MirrorType) => {
    return this.engine.setLocalVideoMirrorType(type);
  };

  /**
   * 设置本地视频播放器
   * @param userId 用户ID
   * @param renderDom 渲染DOM元素
   * @param isScreenShare 是否为屏幕共享
   * @returns 
   */
  setLocalVideoPlayer = (
    userId: string,
    renderDom?: string | HTMLElement,
    isScreenShare = false
  ) => {
    return this.engine.setLocalVideoPlayer(
      isScreenShare ? StreamIndex.STREAM_INDEX_SCREEN : StreamIndex.STREAM_INDEX_MAIN,
      {
        renderDom, // 渲染元素
        userId,    // 用户ID
        renderMode: VideoRenderMode.RENDER_MODE_FILL, // 渲染模式设为填充
      }
    );
  };

  /**
   * 启动AI音频机器人
   */
  startAudioBot = async () => {
    const agentConfig = aigcConfig.aigcConfig.AgentConfig;
    const roomId = this.basicInfo.room_id;
    const userId = this.basicInfo.user_id;
    
    // 如果已启用，先停止
    if (this.audioBotEnabled) {
      await this.stopAudioBot();
    }
    
    // 获取练习配置
    let configResponse = await getPracticeConfig();

    // 构建配置选项
    const options = {
      AppId: configResponse.data.AppId,
      RoomId: configResponse.data.RoomId,
      TaskId: configResponse.data.AgentConfig.UserId,
      AgentConfig: {
        ...configResponse.data.AgentConfig,
      },
      Config: configResponse.data.Config,
    };
    
    // 开始练习
    let practiceData = await startPractice();
    this.practiceInfo.practiceId = practiceData.data.practice_id;
    this.practiceInfo.taskId = configResponse.data.TaskId;
    
    // 更新状态
    this.audioBotEnabled = true;
    this.audioBotStartTime = Date.now();
    
    // 存储启动状态
    Utils.setSessionInfo({ audioBotEnabled: 'enable' });
    
    // 记录练习开始时间
    store.dispatch({ type: 'room/setStartTime', payload: { startTime: Date.now() } });
  };

  /**
   * 停止AI音频机器人
   */
  stopAudioBot = async () => {
    const endTime = Date.now();
    // 更新最后一条消息的结束时间
    this.store.dispatch(updateLastMsgEndMs({ endMs: endTime }));
    
    // 计算练习时长
    let startTime = this.audioBotStartTime;
    const timestamp = Date.now() - startTime;
    let audioBotTime = new Date(timestamp);
    const roomId = this.basicInfo.room_id;
    const state = store.getState();
    const msgHistory = state.room.msgHistory;
    
    // 检查未结束的消息
    if (msgHistory.length) {
      const lastMsg = msgHistory[msgHistory.length - 1];
      if (!lastMsg.end_ms || lastMsg.end_ms === 0) {
        store.dispatch({
          type: 'room/updateLastMsgEndMs',
          payload: { end_ms: Date.now() - state.room.startTime }
        });
      }
    }
    
    if (this.audioBotEnabled || sessionStorage.getItem('audioBotEnabled')) {
      // 获取训练ID
      let training_id = localStorage?.getItem('training_id');
      
      // 准备聊天记录
      let chatMessage = this.store.getState().room.msgHistory.map((m: Msg) => ({
        role: m.user === Config.BotName + training_id ? 'system' : 'user',
        content: m.value,
        start_ms: m.start_ms ?? 0,
        end_ms: m.end_ms ?? 0,
      }));
      
      // 结束练习并提交结果
      let res = await finishPractice(
        this.practiceInfo.practiceId, 
        aigcConfig.BaseConfig.AppId, 
        roomId, 
        this.practiceInfo.taskId,
        audioBotTime.getSeconds(), // 练习时长（秒）
        chatMessage
      );
      
      // 清除状态
      this.audioBotStartTime = 0;
      sessionStorage.removeItem('audioBotEnabled');
      
      return res;
    }
    
    this.audioBotEnabled = false;
  };

  /**
   * 向AI机器人发送命令
   * @param command 命令类型
   * @param interruptMode 中断优先级
   * @param message 附加消息
   */
  commandAudioBot = (command: COMMAND, interruptMode = INTERRUPT_PRIORITY.NONE, message = '') => {
    if (this.audioBotEnabled) {
      // 构建二进制控制消息
      this.engine.sendUserBinaryMessage(
        aigcConfig.BotName, // 机器人名称
        Utils.string2tlv(   // 转为TLV格式的二进制消息
          JSON.stringify({
            Command: command,
            InterruptMode: interruptMode,
            Message: message,
          }),
          'ctrl' // 消息类型为控制
        )
      );
      return;
    }
    // 机器人未启用时发出警告
    console.warn('Interrupt failed, bot not enabled.');
  };

  /**
   * 更新AI机器人配置
   */
  updateAudioBot = async () => {
    if (this.audioBotEnabled) {
      // 已启用时先停止再启动
      await this.stopAudioBot();
      await this.startAudioBot();
    } else {
      // 未启用时直接启动
      await this.startAudioBot();
    }
  };

  /**
   * 获取AI机器人启用状态
   * @returns 是否启用
   */
  getAudioBotEnabled = () => {
    return this.audioBotEnabled;
  };
  
  /**
   * 获取练习ID
   * @returns 练习ID
   */
  getPracticeId = () => {
    return this.practiceInfo.practiceId;
  };
}

// 导出一个单例的RTCClient实例
export default new RTCClient();