import { FilterOptionsWithId } from 'common/utils/eq-filter/types';
import { useMemoizedFn } from 'ahooks';
import { Alert, Flex, Switch, message } from 'antd'; // Assuming you use antd
import { memo, useEffect, useRef, useState } from 'react';

interface AudioPlayerProps {
  src: string;
  eqs: FilterOptionsWithId[]; // Array of EQ settings
  isActive?: boolean;
}

function getMaxFreqResponseGain(eqs: FilterOptionsWithId[], audioCtx: AudioContext) {
  if (!eqs || eqs.length === 0) return 0;

  // 采样 100 个频率点，20Hz~20kHz 对数分布
  const freqPoints = 100;
  const freqHz = new Float32Array(freqPoints);
  for (let i = 0; i < freqPoints; i++) {
    freqHz[i] = 20 * Math.pow(1000, i / (freqPoints - 1)); // 20Hz~20kHz对数分布
  }

  // 初始化每个频率点的线性增益为1
  let mag = new Float32Array(freqPoints).fill(1);

  // 依次串联每个滤波器
  eqs.forEach(eq => {
    const filter = audioCtx.createBiquadFilter();
    filter.type = eq.filterType;
    if (filter.frequency) filter.frequency.value = eq.frequency;
    if (filter.Q) filter.Q.value = eq.q;
    if (filter.gain) filter.gain.value = eq.gain;

    const magResponse = new Float32Array(freqPoints);
    const phaseResponse = new Float32Array(freqPoints);
    filter.getFrequencyResponse(freqHz, magResponse, phaseResponse);

    // 叠加到总响应
    for (let i = 0; i < freqPoints; i++) {
      mag[i] *= magResponse[i];
    }
  });

  // 找到最大线性增益，转dB
  let maxMag = 0;
  for (let i = 0; i < freqPoints; i++) {
    if (mag[i] > maxMag) maxMag = mag[i];
  }
  const maxGainDb = 20 * Math.log10(maxMag);
  return maxGainDb;
}

const AudioPlayer = (props: AudioPlayerProps) => {
  const { src, eqs, isActive = true } = props;

  const audioRef = useRef<HTMLAudioElement>(null);
  const audioContextRef = useRef<AudioContext | null>(null);
  const sourceNodeRef = useRef<MediaElementAudioSourceNode | null>(null);
  const filterNodesRef = useRef<BiquadFilterNode[]>([]);
  const gainNodeRef = useRef<GainNode | null>(null);

  const [isEqEnabled, setIsEqEnabled] = useState(true);

  const [isDynamicProtect, setIsDynamicProtect] = useState(false);

  const setupAudioGraph = useMemoizedFn(() => {
    if (
      !audioContextRef.current ||
      !sourceNodeRef.current ||
      !gainNodeRef.current
    ) {
      console.error('Audio context or nodes not initialized');
      return;
    }

    const audioCtx = audioContextRef.current;
    let previousNode: AudioNode = sourceNodeRef.current;

    previousNode.disconnect();
    filterNodesRef.current.forEach((node) => node.disconnect());
    gainNodeRef.current.disconnect();

    filterNodesRef.current = [];

    if (isEqEnabled && eqs && eqs.length > 0) {
      eqs.forEach((eqSetting, index) => {
        try {
          const filter = audioCtx.createBiquadFilter();
          filter.type = eqSetting.filterType;
          if (filter.frequency) {
            filter.frequency.setValueAtTime(
              eqSetting.frequency,
              audioCtx.currentTime,
            );
          }
          if (filter.Q) {
            filter.Q.setValueAtTime(eqSetting.q, audioCtx.currentTime);
          }
          if (filter.gain) {
            filter.gain.setValueAtTime(eqSetting.gain, audioCtx.currentTime);
          }

          console.log(filter);
          previousNode.connect(filter);
          previousNode = filter;

          filterNodesRef.current.push(filter);
        } catch (error) {
          console.error(
            `Error creating or setting parameters for filter ${index}:`,
            error,
            eqSetting,
          );
        }
      });
    }

    // 频响曲线分析自动增益补偿
    const maxGainDb = getMaxFreqResponseGain(eqs, audioCtx);
    let gainCompensate = Math.pow(10, -3 / 20); // 默认-3dB ≈ 0.707
    if (maxGainDb > 3) {
      setIsDynamicProtect(true);
      // 超过3dB，补偿到0dB
      gainCompensate = Math.pow(10, -(maxGainDb) / 20);
    } else {
      setIsDynamicProtect(false);
    }

    if (gainNodeRef.current) {
      gainNodeRef.current.gain.setValueAtTime(gainCompensate, audioCtx.currentTime);
      console.log('频响自动增益补偿:', gainCompensate, 'dB:', -maxGainDb);
    }

    previousNode.connect(gainNodeRef.current);

    gainNodeRef.current.connect(audioCtx.destination);

    console.log('Audio graph rebuilt.');
  });

  const initializeAudio = useMemoizedFn(() => {
    if (!audioRef.current) return false;
    if (audioContextRef.current) return true;

    try {
      const AudioContext =
        window.AudioContext || (window as any).webkitAudioContext;
      if (!AudioContext) {
        alert('您的浏览器不支持，请使用最新版谷歌或Edge浏览器打开');
        return false;
      }
      const audioCtx = new AudioContext();
      const sourceNode = audioCtx.createMediaElementSource(audioRef.current);
      const gainNode = audioCtx.createGain();

      audioContextRef.current = audioCtx;
      sourceNodeRef.current = sourceNode;
      gainNodeRef.current = gainNode;

      console.log('AudioContext and SourceNode initialized.');
      return true;
    } catch (error) {
      console.error(
        'Error initializing AudioContext or MediaElementSource:',
        error,
      );
      alert(
        `Failed to initialize audio. If the audio source is from a different domain, ensure CORS is configured correctly and the <audio> tag has crossorigin="anonymous". Error: ${error}`,
      );
      return false;
    }
  });

  const handlePlay = useMemoizedFn(async () => {
    if (!initializeAudio()) {
      audioRef.current?.pause();
      return;
    }

    const audioCtx = audioContextRef.current;

    if (audioCtx && audioCtx.state === 'suspended') {
      try {
        await audioCtx.resume();
        console.log('AudioContext resumed.');
      } catch (err) {
        console.error('Error resuming AudioContext:', err);
        return;
      }
    }

    setupAudioGraph();
  });

  const handleEqToggle = useMemoizedFn((checked: boolean) => {
    setIsEqEnabled(checked);
  });

  useEffect(() => {
    if (
      audioContextRef.current &&
      sourceNodeRef.current &&
      gainNodeRef.current
    ) {
      console.log(
        'EQ settings or enabled state changed, rebuilding audio graph...',
      );
      setupAudioGraph();
    }
  }, [eqs, isEqEnabled]);

  useEffect(() => {
    const currentAudioCtx = audioContextRef.current;
    return () => {
      if (currentAudioCtx && currentAudioCtx.state !== 'closed') {
        currentAudioCtx.close().then(() => console.log('AudioContext closed.'));
      }
    };
  }, []);

  useEffect(() => {
    console.log('isActive',src, isActive);
    if (!isActive) {
      audioRef.current?.pause();
      const currentAudioCtx = audioContextRef.current;
      if (currentAudioCtx && currentAudioCtx.state !== 'suspended') {
        currentAudioCtx.suspend();
      }
    }
  }, [isActive]);

  return (
    <>
      <Flex align="center" justify="center" gap={10}>
        <audio
          ref={audioRef}
          src={src}
          controls
          onPlay={handlePlay}
          crossOrigin="anonymous"
          className="w-[500px]"
        />
        <span>EQ</span>
        <Switch checked={isEqEnabled} onChange={handleEqToggle} />
      </Flex>
      {isDynamicProtect && <Alert className='mt-[8px]' showIcon message='已开启动态保护（避免音质劣化），若听不清，建议适当提高系统音量。' type="warning" />}
    </>
  );
};

export default memo(AudioPlayer);
