import { create } from 'zustand';
import { MetahumanPermissionConfig } from '@/types/metahuman';
import metahumanService from '@/api/services/metahumanService';
import { decodeBase64 } from '@/utils/common';

const setupVideoEventHandlers = (video: HTMLVideoElement, audio: HTMLAudioElement | null) => {
  if (audio) {
    audio.onplay = () => {
      video.currentTime = audio.currentTime;
      video.play();
    };

    audio.onpause = () => {
      video.pause();
    };

    audio.onseeking = () => {
      video.currentTime = audio.currentTime;
    };

    audio.onseeked = () => {
      video.currentTime = audio.currentTime;
    };

    audio.onratechange = () => {
      video.playbackRate = audio.playbackRate;
    };
  }

};

// 合成音视频流的辅助函数
const createCombinedStream = (
  videoStream: MediaStream | null,
  audioStream: MediaStream | null,
): MediaStream | null => {
  if (!videoStream && !audioStream) {
    return null;
  }
  const combinedStream = new MediaStream();
  // 添加视频轨道
  if (videoStream) {
    const videoTracks = videoStream.getVideoTracks();
    videoTracks.forEach((track) => {
      combinedStream.addTrack(track);
    });
  }
  // 添加音频轨道
  if (audioStream) {
    const audioTracks = audioStream.getAudioTracks();
    audioTracks.forEach((track) => {
      combinedStream.addTrack(track);
    });
  }
  return combinedStream;
};

// 使用 video + canvas.captureStream 的回退处理（不依赖 MediaStreamTrackProcessor）
const processVideoStreamForTransparencyFallback = async (stream: MediaStream, virtualAvatarName: string): Promise<MediaStream> => {
  try {
    const canvas = document.createElement('canvas');
    const ctx = canvas.getContext('2d');
    if (!ctx) {
      console.warn('无法获取 Canvas 2D 上下文，返回原始流');
      return stream;
    }

    const videoEl = document.createElement('video');
    videoEl.muted = true;
    (videoEl as any).playsInline = true;
    videoEl.autoplay = true;
    videoEl.srcObject = stream;

    const frameRate = 30;
    const outputStream = (canvas as any).captureStream(frameRate) as MediaStream;

    const isGreenBg = true

    const render = () => {
      try {
        if (videoEl.readyState >= 2) {
          ctx.drawImage(videoEl, 0, 0, canvas.width, canvas.height);
          const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
          const data = imageData.data;

          for (let i = 0; i < data.length; i += 4) {
            const r = data[i];
            const g = data[i + 1];
            const b = data[i + 2];
            // if (isGreenBg && g > r && g > b && g > 150 && r < 200 && b < 200) {
            //   data[i + 3] = 0;
            // }
            if (r > 250 && g > 250 && b > 250) {
              data[i] = 255;
              data[i + 1] = 255;
              data[i + 2] = 255;
              data[i + 3] = 255;
            }
          }

          ctx.putImageData(imageData, 0, 0);
        }
      } catch (e) {
        console.warn('回退方案渲染帧异常:', e);
      }
      requestAnimationFrame(render);
    };

    videoEl.addEventListener('loadedmetadata', () => {
      const vw = videoEl.videoWidth || 300;
      const vh = videoEl.videoHeight || 400;
      if (vw && vh) {
        canvas.width = vw;
        canvas.height = vh;
      }
      videoEl.play().catch(() => {});
      requestAnimationFrame(render);
    });

    return outputStream;
  } catch (error) {
    console.error('回退视频流背景透明化处理失败:', error);
    return stream;
  }
};

// 处理视频流背景透明化的函数（首选基于 MediaStreamTrackProcessor）
const processVideoStreamForTransparency = async (stream: MediaStream, virtualAvatarName: string): Promise<MediaStream> => {
  try {
    // 创建 Canvas 元素用于处理视频帧
    const canvas = document.createElement('canvas');
    const ctx = canvas.getContext('2d');
    if (!ctx) {
      console.warn('无法获取 Canvas 2D 上下文，返回原始流');
      return stream;
    }

    // 创建 MediaStreamTrackProcessor 用于处理视频轨道
    if (!('MediaStreamTrackProcessor' in window)) {
      console.warn('MediaStreamTrackProcessor 不可用，使用回退方案');
      return processVideoStreamForTransparencyFallback(stream, virtualAvatarName);
    }

    const videoTrack = stream.getVideoTracks()[0];
    if (!videoTrack) {
      console.warn('没有找到视频轨道，返回原始流');
      return stream;
    }

    // 创建处理器
    const processor = new (window as any).MediaStreamTrackProcessor({
      track: videoTrack
    });

    // 创建可写流
    const writable = new (window as any).MediaStreamTrackGenerator({
      kind: 'video'
    });

    // 设置 Canvas 尺寸
    const settings = videoTrack.getSettings();
    canvas.width = settings.width || 300;
    canvas.height = settings.height || 400;

    // 处理视频帧
    const reader = processor.readable.getReader();
    const writer = writable.writable.getWriter();

    // const isGreenBg = virtualAvatarName.includes('lvmu');
    // console.log('isGreenBg', isGreenBg, virtualAvatarName);
    const isGreenBg = true

    const processFrame = async () => {
      let frame: any = null;
      let newFrame: any = null;

      try {
        const { value: frameValue, done } = await reader.read();
        if (done) {
          writer.close();
          return;
        }

        frame = frameValue;

        // 将视频帧绘制到 Canvas
        ctx.drawImage(frame, 0, 0, canvas.width, canvas.height);

        // 获取图像数据
        const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
        const data = imageData.data;

        for (let i = 0; i < data.length; i += 4) {
          const r = data[i];
          const g = data[i + 1];
          const b = data[i + 2];

          // if (!isGreenBg && r > 240 && g > 240 && b > 240) {
          //   data[i + 3] = 0; // 设置透明度为 0
          // }
          // if (isGreenBg && g > r && g > b && g > 150 && r < 200 && b < 200) {
          //   data[i + 3] = 0; // 设置透明度为 0
          // }
          if (r > 250 && g > 250 && b > 250) {
            data[i] = 255;
            data[i + 1] = 255;
            data[i + 2] = 255;
            data[i + 3] = 255;
          }
        }

        // 将处理后的图像数据绘制回 Canvas
        ctx.putImageData(imageData, 0, 0);

        // 创建新的视频帧
        newFrame = new (window as any).VideoFrame(canvas, {
          timestamp: frame.timestamp,
          duration: frame.duration
        });

        // 写入新帧
        await writer.write(newFrame);

        // 继续处理下一帧
        processFrame();
      } catch (error) {
        console.error('处理视频帧时出错:', error);
        writer.close();
        
        // 出错时返回原始流，确保视频播放不中断
        console.warn('视频帧处理出错，返回原始流');
        return stream;
      } finally {
        // 确保 VideoFrame 对象被正确关闭
        if (frame) {
          try {
            frame.close();
          } catch (e) {
            console.warn('关闭原始帧时出错:', e);
          }
        }
        if (newFrame) {
          try {
            newFrame.close();
          } catch (e) {
            console.warn('关闭新帧时出错:', e);
          }
        }
      }
    };

    // 开始处理
    processFrame();

    // 返回新的流
    return new MediaStream([writable]);
  } catch (error) {
    console.error('视频流背景透明化处理失败:', error);
    return stream;
  }
};

interface MetahumanState {
  // 虚拟形象启用状态
  virtualAvatarEnabled: boolean;
  appId: string;
  // 虚拟形象名称
  virtualAvatarName: string;
  // 虚拟形象语音类型
  virtualAvatarVoice: string;
  // 数字人连接状态
  isConnected: boolean;
  // 数字人加载状态
  isLoading: boolean;
  // 数字人静音状态
  isMuted: boolean;
  // 数字人静音状态是否已设置过
  isMuteToggle: boolean;
  // 数字人权限配置
  permissionConfig: MetahumanPermissionConfig | null;
  // 权限配置加载状态
  isPermissionConfigLoading: boolean;
  // 数字人形象地址
  virtualAvatarUrl: string;
  // 数字人形象ID
  virtualAvatarId: string;
  // 数字人语音类型
  virtualSpeakerId: string;
  // 数字人URL
  metahumanUrl: string;
  sessionId: string;
  // 数字人任务ID
  taskId: string;
  // WebRTC连接相关状态
  pc: RTCPeerConnection | null;
  videoStream: MediaStream | null;
  audioStream: MediaStream | null;
  // 连接状态
  connectionState: RTCPeerConnectionState;
  iceConnectionState: RTCIceConnectionState;
  // 背景色相关
  makeBackgroundTransparent: boolean;
  // 添加轮询定时器引用
  pollInterval: NodeJS.Timeout | null;
  // 添加 AbortController 用于取消请求
  abortController: AbortController | null;
  // 是否开始数字人请求
  isMetahumanStart: boolean;
}

interface MetahumanActions {
  // 设置虚拟形象启用状态
  setVirtualAvatarEnabled: (enabled: boolean) => void;
  // 设置应用ID
  setAppId: (id: string) => void;
  // 设置虚拟形象名称
  setVirtualAvatarName: (name: string) => void;
  // 设置虚拟形象语音类型
  setVirtualAvatarVoice: (voice: string) => void;
  // 设置数字人连接状态
  setIsConnected: (connected: boolean) => void;
  // 设置是否开始数字人请求
  setIsMetahumanStart: (start: boolean) => void;
  // 设置数字人加载状态
  setIsLoading: (loading: boolean) => void;
  // 设置数字人静音状态
  setIsMuted: (muted: boolean) => void;
  // 设置数字人静音状态
  setIsMuteToggle: (toggle: boolean) => void;
  // 设置数字人权限配置
  setPermissionConfig: (config: MetahumanPermissionConfig | null) => void;
  // 设置权限配置加载状态
  setIsPermissionConfigLoading: (loading: boolean) => void;
  // 设置数字人形象地址
  setVirtualAvatarUrl: (url: string) => void;
  // 设置数字人形象ID
  setVirtualAvatarId: (id: string) => void;
  // 设置数字人语音类型
  setVirtualSpeakerId: (id: string) => void;
  // 设置数字人URL
  setMetahumanUrl: (url: string) => void;
  // 设置会话ID
  setSessionId: (id: string) => void;
  // 设置任务ID
  setTaskId: (id: string) => void;
  // WebRTC连接管理
  setPc: (pc: RTCPeerConnection | null) => void;
  setVideoStream: (stream: MediaStream | null) => void;
  setAudioStream: (stream: MediaStream | null) => void;
  setConnectionState: (state: RTCPeerConnectionState) => void;
  setIceConnectionState: (state: RTCIceConnectionState) => void;
  // 背景色相关
  setMakeBackgroundTransparent: (transparent: boolean) => void;
  // 启动WebRTC连接
  startWebRTCConnection: (offerApi: string, coturnUrl: string, makeBackgroundTransparent?: boolean) => Promise<void>;
  // 协商WebRTC连接
  negotiate: (offerApi: string) => Promise<void>;
  // 停止数字人
  stop: () => Promise<void>;
  // 停止对话
  stopSpeaking: (chatId?: string) => Promise<void>;
  // 重置所有状态
  reset: () => void;
  // 添加设置轮询定时器的方法
  setPollInterval: (interval: NodeJS.Timeout | null) => void;
  // 清理轮询定时器的方法
  clearPollInterval: () => void;
  // 设置 AbortController 的方法
  setAbortController: (controller: AbortController | null) => void;
}

type MetahumanStore = MetahumanState & MetahumanActions;

const initialState: MetahumanState = {
  virtualAvatarEnabled: false,
  appId: '',
  virtualAvatarName: '',
  virtualAvatarVoice: '',
  isConnected: false,
  isLoading: false,
  isMuted: true,
  isMuteToggle: false,
  permissionConfig: null,
  isPermissionConfigLoading: false,
  virtualAvatarUrl: '',
  virtualAvatarId: '',
  virtualSpeakerId: '',
  metahumanUrl: '',
  sessionId: '',
  taskId: '',
  // WebRTC连接相关状态
  pc: null,
  videoStream: null,
  audioStream: null,
  connectionState: 'new',
  iceConnectionState: 'new',
  // 背景色相关
  makeBackgroundTransparent: false,
  pollInterval: null,
  // 初始化 AbortController
  abortController: null,
  // 是否开始数字人请求
  isMetahumanStart: false,
};

export const useMetahumanStore = create<MetahumanStore>((set, get) => ({
  ...initialState,

  setVirtualAvatarEnabled: (enabled) => set({ virtualAvatarEnabled: enabled }),

  setAppId: (id) => set({ appId: id }),

  setVirtualAvatarName: (name) => set({ virtualAvatarName: name }),

  setVirtualAvatarVoice: (voice) => set({ virtualAvatarVoice: voice }),

  setIsConnected: (connected) => set({ isConnected: connected }),

  setIsMetahumanStart: (start) => set({ isMetahumanStart: start }),

  setIsLoading: (loading) => set({ isLoading: loading }),

  setIsMuted: (muted) => set({ isMuted: muted }),

  setIsMuteToggle: (toggle) => set({ isMuteToggle: toggle }),

  setPermissionConfig: (config) => set({ permissionConfig: config }),

  setIsPermissionConfigLoading: (loading) => set({ isPermissionConfigLoading: loading }),

  setVirtualAvatarUrl: (url) => set({ virtualAvatarUrl: url }),

  setVirtualAvatarId: (id) => set({ virtualAvatarId: id }),

  setVirtualSpeakerId: (id) => set({ virtualSpeakerId: id }),

  setMetahumanUrl: (url) => set({ metahumanUrl: url }),

  setSessionId: (id) => set({ sessionId: id }),

  setTaskId: (id) => set({ taskId: id }),

  setPc: (pc) => set({ pc }),

  setVideoStream: (stream) => set({ videoStream: stream }),

  setAudioStream: (stream) => set({ audioStream: stream }),

  setConnectionState: (state) => set({ connectionState: state }),

  setIceConnectionState: (state) => set({ iceConnectionState: state }),

  setMakeBackgroundTransparent: (transparent) => set({ makeBackgroundTransparent: transparent }),

  setAbortController: (controller) => set({ abortController: controller }),

  setPollInterval: (interval) => set({ pollInterval: interval }),

  clearPollInterval: () => {
    const { pollInterval } = get();
    if (pollInterval) {
      clearInterval(pollInterval);
      set({ pollInterval: null });
    }
  },

  startWebRTCConnection: async (offerApi: string, coturnUrl: string, makeBackgroundTransparent?: boolean) => {
    const { setIsConnected, setIsLoading, setIsMuted, isMuted, virtualAvatarName, setVirtualAvatarEnabled } = get();
    console.log('startWebRTCConnection virtualAvatarName', virtualAvatarName);

    console.log('开始WebRTC连接:', offerApi, { makeBackgroundTransparent });

    if (makeBackgroundTransparent !== undefined) {
      set({ makeBackgroundTransparent });
    }

    // 开始WebRTC连接时设置loading状态为true
    setIsLoading(true);

    const rtcCredentials = await metahumanService.getMetahumanTurnCredentials()
    const rtcCredentialsList = decodeBase64(rtcCredentials?.metahumanTurnCredentials)?.split("=")
    if (rtcCredentialsList.length !== 2) {
      setIsLoading(false);
      setIsConnected(false);
      setVirtualAvatarEnabled(false);
      throw new Error('数字人服务连接失败，请稍后再试');
    }
    const user = rtcCredentialsList[0]?.slice(1)
    const credential = rtcCredentialsList[1]?.slice(0, -1)

    const config: RTCConfiguration = {
      iceServers: [{
        urls: [coturnUrl],
        username: user,
        credential: credential
      }],
      iceTransportPolicy: 'relay'
    };

    let pc: RTCPeerConnection | null = null;
    if (typeof window !== 'undefined' && typeof window.RTCPeerConnection !== 'undefined') {
      pc = new window.RTCPeerConnection(config);
      console.log('RTCPeerConnection已创建');
      set({ pc });
    } else {
      console.error('当前环境不支持 WebRTC');
      setIsLoading(false);
      setIsConnected(false);
      setVirtualAvatarEnabled(false);
      set({ pc: null });
    }

    if (!pc) {
      setIsLoading(false);
      setIsConnected(false);
      setVirtualAvatarEnabled(false);
      return;
    }

    // 添加连接状态监听
    pc.addEventListener('connectionstatechange', () => {
      const state = pc.connectionState;
      set({ connectionState: state });

      if (state === 'failed' || state === 'disconnected') {
        setIsLoading(false);
        setIsConnected(false);
      }
    });

    pc.addEventListener('iceconnectionstatechange', () => {
      const state = pc.iceConnectionState;
      set({ iceConnectionState: state });
    });

    pc.addEventListener('track', async (evt) => {
      console.log('收到WebRTC track:', {
        kind: evt.track.kind,
        streams: evt.streams.length,
        stream: evt.streams[0]
      });

      const videoElement = document.getElementById('metahuman-video') as HTMLVideoElement;
      const audioElement = document.getElementById('metahuman-audio') as HTMLAudioElement;
      setupVideoEventHandlers(videoElement, audioElement);

      if (evt.track.kind === 'video') {
        console.log('设置视频流:', evt.streams[0]);

        // 如果需要背景透明化，处理视频流
        let processedStream = evt.streams[0];
        if (get().makeBackgroundTransparent) {
          processedStream = await processVideoStreamForTransparency(evt.streams[0], virtualAvatarName);
        }

        // 合成音视频流
        const currentState = get();
        const combinedStream = createCombinedStream(processedStream, currentState.audioStream);
        set({ videoStream: combinedStream });

        // 只设置视频流，不立即设置连接状态
        // set({ videoStream: processedStream });

        // 监听视频流的loadeddata事件
        const videoElement = document.createElement('video');
        videoElement.srcObject = processedStream;
        videoElement.muted = true; // 静音避免播放声音

        videoElement.addEventListener('loadeddata', () => {
          console.log('视频数据加载完成，设置连接状态');
          set((state) => ({
            ...state,
            isConnected: true,
            isLoading: false
          }));

          // 清理临时视频元素
          videoElement.remove();
        });

        videoElement.addEventListener('error', (error) => {
          console.error('视频加载错误:', error);
          // 视频加载失败时也设置状态
          set((state) => ({
            ...state,
            isConnected: false,
            isLoading: false
          }));

          // 清理临时视频元素
          videoElement.remove();
        });

      } else if (evt.track.kind === 'audio') {
        console.log('设置音频流:', evt.streams[0]);
        set({ audioStream: evt.streams[0] });

        const currentState = get();
        const combinedStream = createCombinedStream(currentState.videoStream, evt.streams[0]);
        set({ videoStream: combinedStream });
      }
    });

    try {
      // 调用协商方法
      await get().negotiate(offerApi);

      // 确保初始静音状态正确
      // if (isMuted) {
      //   setIsMuted(true);
      // }
    } catch (error) {
      console.error('WebRTC连接失败:', error);
      setIsLoading(false);
      throw error;
    }
  },

  negotiate: async (offerApi: string) => {
    const { pc, setSessionId, sessionId } = get();
    if (!pc) return;

    pc.addTransceiver('video', { direction: 'recvonly' });
    pc.addTransceiver('audio', { direction: 'recvonly' });

    try {
      const offer = await pc.createOffer();
      await pc.setLocalDescription(offer);

      await new Promise<void>((resolve) => {
        if (pc.iceGatheringState === 'complete') {
          resolve();
        } else {
          const checkState = () => {
            if (pc.iceGatheringState === 'complete') {
              pc.removeEventListener('icegatheringstatechange', checkState);
              resolve();
            }
          };
          pc.addEventListener('icegatheringstatechange', checkState);
        }
      });

      const offerData = pc.localDescription;
      if (sessionId) {
        console.log('sessionId不为空，跳过offer');
        return;
      }
      const response = await fetch(offerApi, {
        body: JSON.stringify({
          sdp: offerData?.sdp,
          type: offerData?.type,
        }),
        headers: {
          'Content-Type': 'application/json'
        },
        method: 'POST'
      });

      const answer = await response.json();
      if (!(answer.sessionid || answer.sessionid == 0)) {
        throw new Error(answer?.message || '协商失败');
      }

      if (answer.sessionid || answer.sessionid == 0) {
        setSessionId(String(answer.sessionid || answer.sessionid));
      }

      await pc.setRemoteDescription(answer);
    } catch (error) {
      console.error('协商失败:', error);
      throw error;
    }
  },

  stop: async () => {
    const { taskId, pc, videoStream, audioStream, isMuteToggle, isMuted } = get();

    console.log('store stop 函数被调用，当前taskId:', taskId);

    // 取消正在进行的 startMetahuman 请求
    // if (abortController) {
    //   console.log('取消正在进行的 startMetahuman 请求');
    //   abortController.abort();
    //   set({ abortController: null });
    // }

    // 清理轮询定时器
    get().clearPollInterval();

    // 关闭WebRTC连接
    if (pc) {
      pc.close();
      set({ pc: null });
    }

    // 清理流
    if (videoStream) {
      videoStream.getTracks().forEach(track => track.stop());
      set({ videoStream: null });
    }

    if (audioStream) {
      audioStream.getTracks().forEach(track => track.stop());
      set({ audioStream: null });
    }

    // 重置所有状态
    set({
      isConnected: false,
      // isMetahumanStart: false,
      isLoading: false,
      // isMuted: isMuteToggle ? isMuted : true,
      isMuted: false,
      metahumanUrl: '',
      sessionId: '',
      connectionState: 'new',
      iceConnectionState: 'new',
    });

    // 停止数字人 - 只有在有taskId时才调用API
    if (taskId) {
      console.log('开始调用 stopMetahuman API，使用 taskId:', taskId);
      try {
        await metahumanService.stopMetahuman(taskId);
        console.log('数字人停止成功，清空taskId');
        set({ taskId: '' }); // 只有在成功停止后才清空taskId
      } catch (error) {
        console.error('停止数字人失败:', error);
        // 停止失败时不清空taskId，保持状态以便重试
      }
    } else {
      console.log('当前没有有效的 taskId，跳过 stopMetahuman API 调用');
    }
  },

  stopSpeaking: async (chatId?: string) => {
    const { sessionId, metahumanUrl } = get();

    console.log('停止对话函数被调用:', { sessionId, metahumanUrl, chatId });

    let mhSessionIdInt = parseInt(sessionId, 10);

    if (isNaN(mhSessionIdInt)) {
      console.log("mhSessionId无效:", sessionId);
      return;
    }

    if (!metahumanUrl) {
      console.log("metahumanUrl为空");
      return;
    }

    let origin = new URL(metahumanUrl).origin;
    const hostname = new URL(origin).hostname;
    const port = new URL(origin).port;

    let api = '';
    if (port) {
      api = '/api/openapi/proxy/' + hostname + ':' + port + '/human';
    } else {
      api = '/api/openapi/proxy/' + hostname + '/human';
    }

    let body: any = {
      text: "",
      sessionid: mhSessionIdInt,
      interrupt: true,
      type: "echo"
    };

    if (chatId) {
      body.chatId = chatId;
    }

    try {
      console.log('发送停止对话请求:', { api, body });

      const response = await fetch(api, {
        body: JSON.stringify(body),
        headers: {
          'Content-Type': 'application/json',
        },
        method: 'POST',
      });

      if (response.ok) {
        console.log('停止对话请求成功');
      } else {
        console.error('停止对话请求失败:', response.status, response.statusText);
      }
    } catch (error) {
      console.error('停止对话请求异常:', error);
    }
  },

  reset: () => {
    const { pc, videoStream, audioStream } = get();

    // 取消正在进行的请求
    // if (abortController) {
    //   abortController.abort();
    // }

    // 清理轮询定时器
    get().clearPollInterval();

    // 清理WebRTC连接
    if (pc) {
      pc.close();
    }

    // 停止流
    if (videoStream) {
      videoStream.getTracks().forEach(track => track.stop());
    }

    if (audioStream) {
      audioStream.getTracks().forEach(track => track.stop());
    }

    set(initialState);
  },
})); 