import { useEffect, useRef, useState } from 'react';
import { event, invoke } from '@tauri-apps/api';
import { Loader } from '@mantine/core';
import { IconPlayerPlay } from '@tabler/icons-react';
import Hls from 'hls.js';
import dashjs from 'dashjs';
import { Controls } from './Controls';
import './MediaPlayer.css';

// 添加日志工具对象
const log = {
  info: (...args: any[]) => console.log('[MediaPlayer]', ...args),
  debug: (...args: any[]) => console.debug('[MediaPlayer]', ...args),
  error: (...args: any[]) => console.error('[MediaPlayer]', ...args),
  warn: (...args: any[]) => console.warn('[MediaPlayer]', ...args),
  trace: (...args: any[]) => console.trace('[MediaPlayer]', ...args),
};

interface VideoFrameEvent {
  type: 'VideoFrame';
  width: number;
  height: number;
  data: Uint8Array;
  timestamp: number;
  duration: number;
}

interface AudioFrameEvent {
  type: 'AudioFrame';
  data: Float32Array;
  channels: number;
  sampleRate: number;
  timestamp: number;
}

interface DurationEvent {
  type: 'Duration';
  duration: number;
}

interface PlaybackStartedEvent {
  type: 'PlaybackStarted';
}

interface PlaybackResumedEvent {
  type: 'PlaybackResumed';
}

interface PlaybackPausedEvent {
  type: 'PlaybackPaused';
}

interface PlaybackStoppedEvent {
  type: 'PlaybackStopped';
}

interface ErrorEvent {
  type: 'Error';
  message: string;
}

interface PositionChangedEvent {
  type: 'PositionChanged';
  position: number;
  duration: number;
}

type PlayerEvent = 
  | VideoFrameEvent 
  | AudioFrameEvent 
  | DurationEvent 
  | PlaybackStartedEvent 
  | PlaybackResumedEvent 
  | PlaybackPausedEvent 
  | PlaybackStoppedEvent 
  | ErrorEvent
  | PositionChangedEvent;

interface MediaPlayerProps {
  src: string;
  isPlaying: boolean;
  volume: number;
  currentTime: number;
  duration: number;
  isMuted: boolean;
  playbackSpeed: number;
  onPlay: () => void;
  onPause: () => void;
  onSeek: (position: number) => void;
  onVolumeChange: (volume: number) => void;
  onMuteToggle: () => void;
  onSpeedChange: (speed: number) => void;
  onError: (error: Error) => void;
  onDurationChange: (duration: number) => void;
}

// 使用状态机管理播放状态
type PlaybackState = 'loading' | 'ready' | 'playing' | 'paused' | 'stopped' | 'error';

interface MediaPlayerState {
    playbackState: PlaybackState;
    error: string | null;
    isFullscreen: boolean;
}

// 添加新的工具函数
const createVideoProcessor = (canvas: HTMLCanvasElement) => {
  const ctx = canvas.getContext('2d');
  if (!ctx) throw new Error('无法获取 canvas 上下文');

  return {
    processFrame: (frame: VideoFrameEvent) => {
      const { width, height, data } = frame;

      // 只在尺寸变化时调整画布
      if (canvas.width !== width || canvas.height !== height) {
        canvas.width = width;
        canvas.height = height;
      }

      // 使用 requestAnimationFrame 优化渲染
      requestAnimationFrame(() => {
        const imageData = new ImageData(
          new Uint8ClampedArray(data),
          width,
          height
        );
        ctx.putImageData(imageData, 0, 0);
      });
    },
    clear: () => {
      ctx.clearRect(0, 0, canvas.width, canvas.height);
    }
  };
};

const createAudioProcessor = (context: AudioContext, gainNode: GainNode) => {
  let currentSource: AudioBufferSourceNode | null = null;

  return {
    processFrame: (frame: AudioFrameEvent) => {
      const { data, channels, sampleRate } = frame;

      // 停止当前正在播放的源
      if (currentSource) {
        try {
          currentSource.stop();
          currentSource.disconnect();
        } catch (error) {
          // 忽略已经停止的错误
          if (error instanceof Error && error.name !== 'InvalidStateError') {
            throw error;
          }
        }
      }

      // 创建新的音频缓冲区
      const audioBuffer = context.createBuffer(
        channels,
        data.length / channels,
        sampleRate
      );

      // 填充音频数据
      for (let channel = 0; channel < channels; channel++) {
        const channelData = audioBuffer.getChannelData(channel);
        for (let i = 0; i < channelData.length; i++) {
          channelData[i] = data[i * channels + channel];
        }
      }

      // 创建并连接新的源
      const source = context.createBufferSource();
      source.buffer = audioBuffer;
      source.connect(gainNode);
      
      currentSource = source;
      source.start();
    },
    cleanup: () => {
      if (currentSource) {
        try {
          currentSource.stop();
          currentSource.disconnect();
        } catch (error) {
          // 忽略清理错误
        }
        currentSource = null;
      }
    }
  };
};

// 添加视频尺寸自适应逻辑
const useVideoAspectRatio = (src: string) => {
  const [aspectRatio, setAspectRatio] = useState(16 / 9);
  const videoRef = useRef<HTMLVideoElement>(null);

  useEffect(() => {
    if (!videoRef.current) return;

    const video = videoRef.current;
    const updateDimensions = () => {
      if (video.videoWidth > 0 && video.videoHeight > 0) {
        const ratio = video.videoWidth / video.videoHeight;
        setAspectRatio(ratio);
      }
    };

    video.addEventListener('loadedmetadata', updateDimensions);
    return () => video.removeEventListener('loadedmetadata', updateDimensions);
  }, [src]);

  return aspectRatio;
};

export function MediaPlayer({
  src,
  isPlaying,
  volume,
  currentTime,
  duration,
  isMuted,
  playbackSpeed,
  onPlay,
  onPause,
  onSeek,
  onVolumeChange,
  onMuteToggle,
  onSpeedChange,
  onError,
  onDurationChange,
}: MediaPlayerProps) {
  const [state, setState] = useState<MediaPlayerState>({
    playbackState: 'loading',
    error: null,
    isFullscreen: false
  });

  const videoRef = useRef<HTMLVideoElement>(null);
  const hlsRef = useRef<Hls | null>(null);
  const dashRef = useRef<dashjs.MediaPlayerClass | null>(null);
  const canvasRef = useRef<HTMLCanvasElement>(null);
  const mountedRef = useRef<boolean>(true);

  // 添加处理器引用
  const videoProcessorRef = useRef<ReturnType<typeof createVideoProcessor> | null>(null);
  const audioProcessorRef = useRef<ReturnType<typeof createAudioProcessor> | null>(null);

  let gainNode: GainNode | null = null;

  const aspectRatio = useVideoAspectRatio(src);

  useEffect(() => {
    const audioContext = new AudioContext();
    gainNode = audioContext.createGain();
    gainNode.connect(audioContext.destination);

    return () => {
      mountedRef.current = false;
      audioContext.close();
    };
  }, []);

  // 用于跟踪播放状态变化
  useEffect(() => {
    const handlePlaybackStateChange = async () => {
      if (!mountedRef.current || !src) return;

      try {
        if (isPlaying) {
          await invoke('play');
        } else {
          await invoke('pause');
        }
      } catch (error) {
        if (mountedRef.current) {
          console.error('[FRONTEND] Error changing playback state:', error);
        }
      }
    };

    handlePlaybackStateChange();

    return () => {
      // 组件卸载时确保暂停播放
      if (src && mountedRef.current) {
        invoke('pause').catch(() => {});
      }
    };
  }, [isPlaying, src]);

  // 初始化处理器
  useEffect(() => {
    if (!canvasRef.current) return;

    try {
      videoProcessorRef.current = createVideoProcessor(canvasRef.current);
      audioProcessorRef.current = createAudioProcessor(
        new AudioContext(),
        gainNode!
      );
    } catch (error) {
      log.error('Failed to initialize processors:', error);
      onError(error instanceof Error ? error : new Error('初始化失败'));
    }

    return () => {
      audioProcessorRef.current?.cleanup();
    };
  }, [onError]);

  // 修改事件处理逻辑
  useEffect(() => {
    const setupEventListeners = async () => {
      try {
        const unlisten = await event.listen<PlayerEvent>('player-event', (event) => {
          if (!mountedRef.current) return;

          const { payload } = event;
          try {
            switch (payload.type) {
              case 'PositionChanged':
                if (typeof payload.position === 'number') {
                  // 后端发送的是秒
                  onSeek(payload.position);
                }
                break;

              case 'Duration':
                if (typeof payload.duration === 'number') {
                  // 后端发送的是秒
                  onDurationChange(payload.duration);
                }
                break;

              case 'VideoFrame':
                if (isPlaying && videoProcessorRef.current) {
                  videoProcessorRef.current.processFrame(payload as VideoFrameEvent);
                }
                break;

              case 'AudioFrame':
                if (isPlaying && audioProcessorRef.current) {
                  audioProcessorRef.current.processFrame(payload as AudioFrameEvent);
                }
                break;

              case 'PlaybackStarted':
              case 'PlaybackResumed':
                console.log('[FRONTEND] Playback started/resumed');
                break;

              case 'PlaybackPaused':
              case 'PlaybackStopped':
                console.log('[FRONTEND] Playback paused/stopped');
                break;

              case 'Error':
                console.error('[FRONTEND] Player error:', payload.message);
                break;
            }
          } catch (error) {
            log.error(`Error processing ${payload.type}:`, error);
            onError(error instanceof Error ? error : new Error(`处理${payload.type}失败`));
          }
        });

        return unlisten;
      } catch (error) {
        log.error('Error setting up event listener:', error);
        throw error;
      }
    };

    setupEventListeners();
  }, [onSeek, onDurationChange, onError]);

  useEffect(() => {
    if (!src || !videoRef.current) return;

    const video = videoRef.current;
    log.info("Setting up video with URL:", src);

    // 清理之前的实例
    if (hlsRef.current) {
      hlsRef.current.destroy();
      hlsRef.current = null;
    }
    if (dashRef.current) {
      dashRef.current.destroy();
      dashRef.current = null;
    }

    const handleCanPlay = () => {
      log.info("Video can play event triggered");
      setState(prev => ({ ...prev, playbackState: 'ready' }));
      onPlay();
    };

    // 处理本地文件
    if (src.startsWith('file://')) {
      log.info('Loading local file');
      video.style.display = 'none';
      if (canvasRef.current) {
        videoProcessorRef.current = createVideoProcessor(canvasRef.current);
      }
      handleCanPlay();
    }

    return () => {
      video.removeEventListener('canplay', handleCanPlay);
      video.src = '';
      setState(prev => ({ ...prev, playbackState: 'loading' }));
    };
  }, [src, onPlay]);

  useEffect(() => {
    if (!videoRef.current) return;
    
    const video = videoRef.current;
    if (isPlaying) {
      // 使用 Promise.race 添加超时处理
      const playPromise = video.play();
      const timeoutPromise = new Promise((_, reject) => {
        setTimeout(() => reject(new Error('播放超时')), 5000);
      });

      Promise.race([playPromise, timeoutPromise])
        .catch(error => {
          log.error('Error playing video:', error);
          // 如果是 AbortError 或 NotSupportedError，尝试使用原生播放器
          if (error.name === 'AbortError' || error.name === 'NotSupportedError') {
            setState(prev => ({ ...prev, useNativePlayer: true }));
          } else {
            onError(error);
          }
        });
    } else {
      video.pause();
    }
  }, [isPlaying, onError]);

  // 处理音量变化
  useEffect(() => {
    if (!videoRef.current) return;
    
    const safeVolume = Math.max(0, Math.min(1, volume));
    videoRef.current.volume = safeVolume;
    
    if (gainNode) {
      gainNode.gain.value = safeVolume;
    }
  }, [volume]);

  // 处理时间更新
  useEffect(() => {
    if (!videoRef.current) return;
    
    // 只有当时间差超过 100ms 时才更新
    if (Math.abs(videoRef.current.currentTime - currentTime) > 0.1) {
      log.debug('[FRONTEND] Updating video time:', currentTime);
      videoRef.current.currentTime = currentTime;
    }
  }, [currentTime]);

  useEffect(() => {
    if (!videoRef.current) return;
    videoRef.current.playbackRate = playbackSpeed;
  }, [playbackSpeed]);

  // 自适应布局逻辑
  useEffect(() => {
    const updateSize = () => {
      if (!videoRef.current) return;
      
      const video = videoRef.current;
      const container = video.parentElement;
      if (!container) return;
      
      const containerWidth = container.clientWidth;
      const containerHeight = container.clientHeight;
      const videoRatio = video.videoWidth / video.videoHeight;
      const containerRatio = containerWidth / containerHeight;

      if (containerRatio > videoRatio) {
        video.style.width = `${containerWidth}px`;
        video.style.height = `${containerWidth / videoRatio}px`;
      } else {
        video.style.height = `${containerHeight}px`;
        video.style.width = `${containerHeight * videoRatio}px`;
      }
      
      video.style.position = 'absolute';
      video.style.top = '50%';
      video.style.left = '50%';
      video.style.transform = 'translate(-50%, -50%)';
    };

    window.addEventListener('resize', updateSize);
    videoRef.current?.addEventListener('loadedmetadata', updateSize);
    updateSize();

    return () => {
      window.removeEventListener('resize', updateSize);
      videoRef.current?.removeEventListener('loadedmetadata', updateSize);
    };
  }, [aspectRatio]);

  return (
    <div className="media-player-container">
      {state.playbackState === 'loading' && (
        <div className="loading-overlay">
          <Loader size="lg" />
        </div>
      )}

      {state.error && (
        <div className="error-message">
          {state.error}
        </div>
      )}

      {!src && (
        <div className="placeholder">
          <IconPlayerPlay size={48} opacity={0.5} />
        </div>
      )}

      <video
        ref={videoRef}
        src={src}
        style={{
          flex: 1,
          backgroundColor: '#000',
        }}
        controls={false}
      />
      <Controls
        isPlaying={isPlaying}
        currentTime={currentTime}
        duration={duration}
        volume={volume}
        isMuted={isMuted}
        playbackSpeed={playbackSpeed}
        onPlay={onPlay}
        onPause={onPause}
        onSeek={onSeek}
        onVolumeChange={onVolumeChange}
        onMuteToggle={onMuteToggle}
        onSpeedChange={onSpeedChange}
      />
      <canvas
        ref={canvasRef}
        className="video-canvas"
        style={{
          display: src.startsWith('file://') ? 'block' : 'none',
          width: '100%',
          height: '100%',
          objectFit: 'contain',
          backgroundColor: 'transparent',
          position: 'absolute',
          top: 0,
          left: 0
        }}
      />
    </div>
  );
}
