/**
 * MatrixSpectrumService
 * 封装：系统音频采集 → WebAudio 频谱 → 22条(500-15kHz) → 6×22 矩阵
 *
 * @example
 * ```ts
 * const svc = new MatrixSpectrumService();
 * await svc.start({ maxDb: -20, gain: 1.0 });
 * svc.setGain(1.5); // 实时调增益（对振幅做倍乘）
 * const frame = svc.getMatrix(); // { rows, cols, colLevels, values, stats }
 * await svc.stop();
 * ```
 *
 * 事件：svc.onData(cb) / offData(cb) 每帧回调
 *
 * 说明：
 *   1) 依赖 computeMatrix6x22（matrixProcessor.ts）
 *   2) 在 Electron 中若存在 window.electronAPI，会启用/关闭 loopback；非 Electron 环境自动跳过
 */

// 依赖类型声明与算法导入
import { computeMatrix6x22, type MatrixResult } from "./matrixProcessor";

// Electron API 接口
declare global {
  interface Window {
    electronAPI?: {
      enableLoopbackAudio?: () => Promise<void>;
      disableLoopbackAudio?: () => Promise<void>;
    };
  }
}

// 服务配置选项
export interface MatrixSpectrumServiceOptions {
  gain?: number;
  maxDb?: number;
  fps?: number;
  fftSize?: number;
}

// 音频设备信息
interface AudioDeviceInfo {
  deviceId: string;
  label: string;
}

// 详细频谱分段配置
interface DetailedBinRange {
  start: number;
  end: number;
}

/**
 * 音频频谱矩阵服务
 * 负责采集系统音频、处理频谱数据并生成6×22矩阵
 */
export default class MatrixSpectrumService {
  private audioContext: AudioContext | null = null;
  private analyser: AnalyserNode | null = null;
  private mediaStream: MediaStream | null = null;
  private sourceNode: MediaStreamAudioSourceNode | null = null;
  private animationId: number | null = null;
  private isRunning = false;

  private fftSize: number;
  private frequencyData: Uint8Array<ArrayBuffer> | null = null;
  private timeData: Uint8Array | null = null;

  private gain = 1.0;
  private maxDb = -20;
  private fps = 30;
  private frameInterval = 1000 / 30;
  private _lastFrameTs = 0;

  private detailedBars = 22;
  private detailedRange = { min: 500, max: 15000 };
  private detailedData: number[] = new Array(this.detailedBars).fill(0);
  private latestMatrix: MatrixResult | null = null;

  private _subscribers = new Set<(frame: MatrixResult) => void>();

  private autoDeviceSwitch = true;
  private _deviceSnapshot = "";
  private _deviceMonitorTimer: ReturnType<typeof setInterval> | null = null;
  private _onDeviceChange: () => void;
  private _detailRanges: DetailedBinRange[] | null = null;

  // 保留原始构造函数结构
  constructor() {
    this.fftSize = 1024;
    this._onDeviceChange = this._handleDeviceChange.bind(this);
  }

  async start(opts?: MatrixSpectrumServiceOptions): Promise<void> {
    if (this.isRunning) return;
    opts = opts || {};

    if (typeof opts.gain === "number") this.gain = opts.gain;
    if (typeof opts.maxDb === "number") this.maxDb = opts.maxDb;
    if (typeof opts.fps === "number" && opts.fps > 0) this.setFps(opts.fps);
    if (typeof opts.fftSize === "number") this.fftSize = opts.fftSize;

    try {
      // Electron loopback 开关（可选）
      if (
        typeof window !== "undefined" &&
        window.electronAPI &&
        window.electronAPI.enableLoopbackAudio
      ) {
        try {
          await window.electronAPI.enableLoopbackAudio();
        } catch (_) {}
      }

      // 检查浏览器支持
      if (!navigator.mediaDevices || !navigator.mediaDevices.getDisplayMedia) {
        throw new Error(
          "浏览器不支持屏幕共享API，请使用Chrome、Edge或Firefox最新版本"
        );
      }

      // 采集系统音频（通过屏幕共享拿到系统声音）
      try {
        this.mediaStream = await navigator.mediaDevices.getDisplayMedia({
          audio: true,
          video: true,
        });
      } catch (error: any) {
        if (error.name === "NotAllowedError") {
          throw new Error(
            "用户拒绝了屏幕共享权限，请在浏览器中允许屏幕共享以使用音乐律动功能"
          );
        } else if (error.name === "NotSupportedError") {
          throw new Error(
            "当前环境不支持屏幕共享功能，请确保在HTTPS环境下运行"
          );
        } else if (error.name === "NotFoundError") {
          throw new Error("未找到可用的音频设备，请检查系统音频设置");
        } else {
          throw new Error(
            `获取音频流失败: ${error.message || error.name || "未知错误"}`
          );
        }
      }
      // 移除视频轨
      this.mediaStream.getVideoTracks().forEach((t) => {
        try {
          t.stop();
        } catch (_) {}
      });
      this.mediaStream
        .getVideoTracks()
        .forEach((t) => this.mediaStream!.removeTrack(t));

      if (
        typeof window !== "undefined" &&
        window.electronAPI &&
        window.electronAPI.disableLoopbackAudio
      ) {
        try {
          await window.electronAPI.disableLoopbackAudio();
        } catch (_) {}
      }

      // WebAudio 初始化
      this.audioContext = new window.AudioContext();
      this.sourceNode = this.audioContext.createMediaStreamSource(
        this.mediaStream
      );
      this.analyser = this.audioContext.createAnalyser();
      this.analyser.fftSize = this.fftSize;
      this.analyser.smoothingTimeConstant = 0.7;
      this.sourceNode.connect(this.analyser);

      this.frequencyData = new Uint8Array(
        this.analyser.frequencyBinCount
      ) as Uint8Array<ArrayBuffer>;
      this.timeData = new Uint8Array(this.fftSize);

      this._prepareDetailedBins();

      this.isRunning = true;
      this._startDeviceMonitor();
      this._loop();
    } catch (err) {
      this.isRunning = false;
      this._cleanup();
      throw err;
    }
  }

  async stop(): Promise<void> {
    this.isRunning = false;
    if (this.animationId) {
      cancelAnimationFrame(this.animationId);
      this.animationId = null;
    }
    this._stopDeviceMonitor();
    this._cleanup();
  }

  _cleanup(): void {
    try {
      if (this.mediaStream) {
        this.mediaStream.getTracks().forEach((t) => t.stop());
      }
    } catch (_) {}
    this.mediaStream = null;
    try {
      if (this.sourceNode) {
        this.sourceNode.disconnect();
      }
    } catch (_) {}
    this.sourceNode = null;
    try {
      if (this.audioContext) {
        this.audioContext.close();
      }
    } catch (_) {}
    this.audioContext = null;
    this.analyser = null;
    this.frequencyData = null;
    this.timeData = null;
    this.latestMatrix = null;
  }

  setGain(g: number): void {
    this.gain = Math.max(0, g || 0);
  }

  setMaxDb(db: number): void {
    this.maxDb = db;
  }

  setFps(fps: number): void {
    this.fps = Math.max(1, Math.min(60, Math.round(fps)));
    this.frameInterval = 1000 / this.fps;
  }

  async setFftSize(fftSize: number): Promise<void> {
    const allowed = [512, 1024, 2048];
    if (!allowed.includes(fftSize)) return;
    this.fftSize = fftSize;
    if (this.isRunning && this.analyser) {
      // 重新配置分析器与缓冲
      this.analyser.fftSize = this.fftSize;
      this.frequencyData = new Uint8Array(
        this.analyser.frequencyBinCount
      ) as Uint8Array<ArrayBuffer>;
      this.timeData = new Uint8Array(this.fftSize);
      this._prepareDetailedBins();
    }
  }

  onData(cb: (frame: MatrixResult) => void): void {
    if (typeof cb === "function") this._subscribers.add(cb);
  }

  offData(cb: (frame: MatrixResult) => void): void {
    this._subscribers.delete(cb);
  }

  getMatrix(): MatrixResult | null {
    return this.latestMatrix;
  }

  _loop(ts?: DOMHighResTimeStamp): void {
    if (!this.isRunning || !this.analyser) return;

    // 帧率节流
    if (ts && ts - this._lastFrameTs < this.frameInterval) {
      this.animationId = requestAnimationFrame((t) => this._loop(t));
      return;
    }
    this._lastFrameTs = ts || performance.now();

    this.analyser.getByteFrequencyData(this.frequencyData!);

    // 应用增益
    if (this.gain !== 1.0 && this.frequencyData) {
      for (let i = 0; i < this.frequencyData.length; i++) {
        const v = Math.min(255, this.frequencyData[i] * this.gain);
        this.frequencyData[i] = v;
      }
    }

    // 计算 22 条（500-15kHz 区间均值）
    this._computeDetailed22Fast();

    // 计算 6×22 矩阵
    const cfg = {
      rows: 6,
      cols: 22,
      minDbCut: -50,
      maxDb: this.maxDb,
      gamma: 0.7,
      minDisplay: 0.05,
    };
    this.latestMatrix = computeMatrix6x22(this.detailedData, cfg);

    // 分发回调
    if (this.latestMatrix && this._subscribers.size) {
      this._subscribers.forEach((cb) => {
        try {
          cb(this.latestMatrix!);
        } catch (_) {}
      });
    }

    this.animationId = requestAnimationFrame((t) => this._loop(t));
  }

  _prepareDetailedBins(): void {
    if (!this.audioContext) return;

    const sampleRate = this.audioContext.sampleRate;
    const nyquist = sampleRate / 2;
    const bins = this.analyser!.frequencyBinCount;
    const minIndex = Math.floor((this.detailedRange.min / nyquist) * bins);
    const maxIndex = Math.floor((this.detailedRange.max / nyquist) * bins);
    const rangeSize = Math.max(1, maxIndex - minIndex);
    const barSize = Math.max(1, Math.floor(rangeSize / this.detailedBars));
    this._detailRanges = [];
    for (let i = 0; i < this.detailedBars; i++) {
      const start = minIndex + i * barSize;
      const end = Math.min(bins, start + barSize);
      this._detailRanges.push({ start, end });
    }
  }

  _computeDetailed22Fast(): void {
    if (!this._detailRanges) {
      this._prepareDetailedBins();
    }
    const arr = this.frequencyData!;
    for (let i = 0; i < this._detailRanges!.length; i++) {
      const { start, end } = this._detailRanges![i];
      let sum = 0;
      for (let k = start; k < end; k++) sum += arr[k];
      const count = Math.max(1, end - start);
      this.detailedData[i] = sum / count;
    }
  }

  // ================ 设备变化监控与自动切换 ================
  _startDeviceMonitor(): void {
    if (!this.autoDeviceSwitch) return;
    try {
      if (navigator.mediaDevices && navigator.mediaDevices.addEventListener) {
        navigator.mediaDevices.addEventListener(
          "devicechange",
          this._onDeviceChange
        );
      }
    } catch (_) {}

    // 同时开启一个轮询兜底：每3秒对 audiooutput 设备做快照
    this._deviceMonitorTimer = setInterval(async () => {
      try {
        const snap = await this._makeDeviceSnapshot();
        if (snap && this._deviceSnapshot && snap !== this._deviceSnapshot) {
          this._handleDeviceChange();
        }
        if (snap) this._deviceSnapshot = snap;
      } catch (_) {}
    }, 3000);
  }

  _stopDeviceMonitor(): void {
    try {
      if (
        navigator.mediaDevices &&
        navigator.mediaDevices.removeEventListener
      ) {
        navigator.mediaDevices.removeEventListener(
          "devicechange",
          this._onDeviceChange
        );
      }
    } catch (_) {}
    if (this._deviceMonitorTimer) {
      clearInterval(this._deviceMonitorTimer);
      this._deviceMonitorTimer = null;
    }
  }

  async _makeDeviceSnapshot(): Promise<string> {
    if (!navigator.mediaDevices || !navigator.mediaDevices.enumerateDevices)
      return "";
    const list = await navigator.mediaDevices.enumerateDevices();
    // 仅收集 audiooutput 的 deviceId/label 形成签名
    const outs = list.filter((d) => d.kind === "audiooutput");
    const sig = outs
      .map((d) => `${d.deviceId}:${d.label}`)
      .sort()
      .join("|");
    return sig;
  }

  async _handleDeviceChange(): Promise<void> {
    if (!this.isRunning) return;
    try {
      await this._restartStream();
    } catch (_) {}
  }

  async _restartStream(): Promise<void> {
    // 重新抓取系统音频并替换 sourceNode
    try {
      if (
        typeof window !== "undefined" &&
        window.electronAPI &&
        window.electronAPI.enableLoopbackAudio
      ) {
        await window.electronAPI.enableLoopbackAudio();
      }
    } catch (_) {}

    const newStream = await navigator.mediaDevices.getDisplayMedia({
      audio: true,
      video: true,
    });
    newStream.getVideoTracks().forEach((t) => {
      try {
        t.stop();
      } catch (_) {}
    });
    newStream.getVideoTracks().forEach((t) => newStream.removeTrack(t));

    try {
      if (
        typeof window !== "undefined" &&
        window.electronAPI &&
        window.electronAPI.disableLoopbackAudio
      ) {
        await window.electronAPI.disableLoopbackAudio();
      }
    } catch (_) {}

    // 替换节点
    if (this.sourceNode) {
      try {
        this.sourceNode.disconnect();
      } catch (_) {}
    }
    try {
      if (this.mediaStream) {
        this.mediaStream.getTracks().forEach((t) => t.stop());
      }
    } catch (_) {}
    this.mediaStream = newStream;
    this.sourceNode = this.audioContext!.createMediaStreamSource(
      this.mediaStream
    );
    this.sourceNode.connect(this.analyser!);
  }
}
