/// <reference lib="dom" />
/// <reference lib="dom.iterable" />
/// <reference lib="dom.webcodecs" />

export type EncodedVideoCallback = (data: Uint8Array, info: {
  codec: string;               // 'avc1.42E01E'
  timestamp: number;           // microseconds
  compositionTime: number; //pts - dts
  isKey: boolean;
  width: number;
  height: number;
  description?: Uint8Array;    // avcC (存在于某些实现/配置下)
}) => void;

export type EncodedAudioCallback = (data: Uint8Array, info: {
  codec: string;               // 'mp4a.40.2'
  timestamp: number;           // microseconds
  sampleRate: number;          // 48000
  channels: number;            // 2
  isSequenceHeader: boolean;
  frames: number;
  description?: Uint8Array;    // AudioSpecificConfig
}) => void;

export interface EncodeOptions {
  video?: MediaTrackConstraints;      // 摄像头约束
  audio?: MediaTrackConstraints;      // 麦克风约束
  videoBitrate?: number;              // bps，默认 2_500_000
  videoFramerate?: number;            // fps，默认从 track settings 推断或 30
  videoGopMs?: number;                // 关键帧间隔毫秒，默认 2000
  avcFormat?: 'annexb' | 'avc';       // H264 输出格式，默认 'annexb'
  audioBitrate?: number;              // bps，默认 128_000
}

export class MediaCodecCapture {
  private stream?: MediaStream;

  private videoEl?: HTMLVideoElement;
  private previewOwned = false;

  private vReader?: ReadableStreamDefaultReader<VideoFrame>;
  private aProcessorUrl?: string;
  private audioCtx?: AudioContext;
  private audioSource?: MediaStreamAudioSourceNode;
  private aWorkletNode?: AudioWorkletNode;
  private zeroGain?: GainNode;

  private vEncoder?: VideoEncoder;
  private aEncoder?: AudioEncoder;

  private running = false;
  private audioFrameCount = 0;
  private lastKeyMs = 0;
  private firstAudioPktTimeMs = 0;
  private firstVideoPktTimeMs = 0;

  private readonly opts: Required<Pick<EncodeOptions,
    'videoBitrate' | 'videoFramerate' | 'videoGopMs' | 'avcFormat' | 'audioBitrate'
  >> & EncodeOptions;

  public onVideoEncoded?: EncodedVideoCallback;
  public onAudioEncoded?: EncodedAudioCallback;

  constructor(options: EncodeOptions = {}) {
    this.opts = {
      videoBitrate: options.videoBitrate ?? 2_500_000,
      videoFramerate: options.videoFramerate ?? 30,
      videoGopMs: options.videoGopMs ?? 2000,
      avcFormat: options.avcFormat ?? 'annexb',
      audioBitrate: options.audioBitrate ?? 128_000,
      ...options,
    };
  }

  setPreviewElement(el?: HTMLVideoElement) {
    if (this.videoEl && this.previewOwned) {
      this.videoEl.remove();
    }
    this.videoEl = el;
    this.previewOwned = false;
  }

  get mediaStream(): MediaStream | undefined {
    return this.stream;
  }

  async start(previewEl?: HTMLVideoElement) {
    if (this.running) return;

    if (!('VideoEncoder' in window) || !('AudioEncoder' in window)) {
      throw new Error('This browser does not support WebCodecs encoders.');
    }

    if (previewEl) this.setPreviewElement(previewEl);

    // 申请媒体流（尽量请求 48k/2ch）
    this.stream = await navigator.mediaDevices.getUserMedia({
      video: this.opts.video ?? true,
      audio: this.opts.audio ?? {
        channelCount: 2,
        sampleRate: 48000,
        echoCancellation: true,
        noiseSuppression: true,
        autoGainControl: true,
      },
    });

    this.audioFrameCount = 0;

    await this.setupPreview();
    await this.startVideoPipeline();
    await this.startAudioPipeline();

    this.running = true;
  }

  async stop() {
    this.running = false;

    // 关闭 encoders
    try {
      await this.vEncoder?.flush();
    } catch {}
    try {
      await this.aEncoder?.flush();
    } catch {}
    this.vEncoder?.close();
    this.aEncoder?.close();
    this.vEncoder = undefined;
    this.aEncoder = undefined;

    // 关闭读取/处理
    try { await this.vReader?.cancel(); } catch {}
    this.vReader = undefined;

    if (this.aWorkletNode) {
      try { this.aWorkletNode.port.onmessage = null as any; } catch {}
      try { this.aWorkletNode.disconnect(); } catch {}
    }
    this.aWorkletNode = undefined;

    if (this.zeroGain) {
      try { this.zeroGain.disconnect(); } catch {}
    }
    this.zeroGain = undefined;

    if (this.audioSource) {
      try { this.audioSource.disconnect(); } catch {}
    }
    this.audioSource = undefined;

    if (this.audioCtx) {
      try { await this.audioCtx.close(); } catch {}
    }
    this.audioCtx = undefined;

    if (this.aProcessorUrl) {
      URL.revokeObjectURL(this.aProcessorUrl);
      this.aProcessorUrl = undefined;
    }

    // 停止 tracks
    if (this.stream) {
      this.stream.getTracks().forEach(t => t.stop());
      this.stream = undefined;
    }

    // 预览清理
    if (this.videoEl) {
      try { this.videoEl.srcObject = null; } catch {}
      if (this.previewOwned) this.videoEl.remove();
      this.videoEl = undefined;
      this.previewOwned = false;
    }
    this.audioFrameCount = 0;
  }

  // =============== 内部实现 ===============

  private async setupPreview() {
    if (!this.stream) return;

    let v = this.videoEl;
    if (!v) {
      v = document.createElement('video');
      v.style.position = 'fixed';
      v.style.left = '-99999px';
      v.style.top = '-99999px';
      v.width = 1;
      v.height = 1;
      document.body.appendChild(v);
      this.videoEl = v;
      this.previewOwned = true;
    }
    v.playsInline = true;
    v.muted = true;
    v.srcObject = this.stream;
    try { await v.play(); } catch {}
  }

  private async startVideoPipeline() {
    if (!this.stream) return;

    const vTrack = this.stream.getVideoTracks()[0];
    if (!vTrack) return;

    const settings = vTrack.getSettings?.() ?? {};
    const width = settings.width || 1280;
    const height = settings.height || 720;
    const framerate = Math.round(settings.frameRate || this.opts.videoFramerate);

    const processor = new (window as any).MediaStreamTrackProcessor({ track: vTrack }) as MediaStreamTrackProcessor<VideoFrame>;
    const reader = processor.readable.getReader();
    this.vReader = reader;

    // 初始化视频编码器
    this.vEncoder = new VideoEncoder({
      output: (chunk, meta) => {
        const data = new Uint8Array(chunk.byteLength);
        chunk.copyTo(data);
        const desc = meta?.decoderConfig?.description
          ? new Uint8Array(meta.decoderConfig.description)
          : undefined;
        let tsMs = 0;

        if (!this.firstVideoPktTimeMs) {
          this.firstVideoPktTimeMs = chunk.timestamp / 1000;
          if (this.firstAudioPktTimeMs > 0) {
            tsMs = this.firstAudioPktTimeMs;
          }
        } else {
          const deltaMs = (chunk.timestamp / 1000) - this.firstVideoPktTimeMs;
          if (this.firstAudioPktTimeMs > 0) {
            tsMs = this.firstAudioPktTimeMs + deltaMs;
          } else {
            tsMs = deltaMs;
          }
        }
        // console.log('video encoded chunk, timestamp:', tsMs, 'data len:', data.length);
        this.onVideoEncoded?.(data, {
          codec: 'avc1.42E01F',// Level 3.1
          timestamp: tsMs,
          compositionTime: (meta as any)?.compositionTime || 0,
          isKey: chunk.type === 'key',
          width,
          height,
          description: desc,
        });
      },
      error: (e) => console.error('VideoEncoder error:', e),
    });

    this.vEncoder.configure({
      codec: 'avc1.42E01F', // H.264 Level 3.1 baseline
      width,
      height,
      bitrate: this.opts.videoBitrate,
      framerate,
      hardwareAcceleration: 'prefer-hardware',
      avc: { format: this.opts.avcFormat }, // 'annexb' or 'avc' (length-delimited)
    });

    this.lastKeyMs = 0;

    const loop = async () => {
      const { done, value } = await reader.read();
      if (done || !value) return;
      const frame = value as VideoFrame;
      try {
        const nowMs = performance.now();
        const forceKey = (nowMs - this.lastKeyMs) >= this.opts.videoGopMs;
        if (forceKey) this.lastKeyMs = nowMs;

        this.vEncoder!.encode(frame, forceKey ? { keyFrame: true } : undefined);
      } catch (e) {
        console.error('encode video frame failed:', e);
      } finally {
        try { frame.close(); } catch {}
      }
      if (this.running) loop();
    };

    loop().catch(err => console.error('video read loop error', err));
  }

  private async startAudioPipeline() {
    if (!this.stream) return;

    const aTrack = this.stream.getAudioTracks()[0];
    if (!aTrack) return;

    // 使用 48kHz 音频上下文，确保 worklet 输出即为 48k/2ch
    const ac = new (window.AudioContext || (window as any).webkitAudioContext)({ sampleRate: 48000 });
    this.audioCtx = ac;

    this.audioSource = ac.createMediaStreamSource(this.stream);
    this.zeroGain = ac.createGain();
    this.zeroGain.gain.value = 0.0;

    // 注册 AudioWorklet
    const url = this.createStereoWorklet();
    this.aProcessorUrl = url;
    await ac.audioWorklet.addModule(url);

    const node = new AudioWorkletNode(ac, 'stereo-capture', {
      numberOfInputs: 1,
      numberOfOutputs: 1,
      outputChannelCount: [2],
    });
    this.aWorkletNode = node;

    // 链接图，保证处理持续进行但不回放
    this.audioSource.connect(node);
    node.connect(this.zeroGain);
    this.zeroGain.connect(ac.destination);

    // 初始化音频编码器
    this.aEncoder = new AudioEncoder({
      output: (chunk, meta) => {
        const data = new Uint8Array(chunk.byteLength);
        chunk.copyTo(data);
        const desc = meta?.decoderConfig?.description
          ? new Uint8Array(meta.decoderConfig.description)
          : undefined;

        // console.log('audio encoded chunk, timestamp:', chunk.timestamp / 1000, 'data len:', data.length);
        if (desc !== undefined) {
            console.log('AudioSpecificConfig:', Array.from(desc).map(b => b.toString(16).padStart(2, '0')).join(' '));
            this.onAudioEncoded?.(desc, {
              codec: 'mp4a.40.2', // AAC-LC
              timestamp: chunk.timestamp,
              sampleRate: 48000,
              channels: 2,
              isSequenceHeader: true,
              frames: 0,
              description: desc,
            });
        }
        this.onAudioEncoded?.(data, {
          codec: 'mp4a.40.2', // AAC-LC
          timestamp: chunk.timestamp / 1000,
          sampleRate: 48000,
          channels: 2,
          isSequenceHeader: false,
          frames: (chunk as any).numberOfFrames ?? 0, // 某些实现可能不提供
          description: desc,
        });
      },
      error: (e) => console.error('AudioEncoder error:', e),
    });

    this.aEncoder.configure({
      codec: 'mp4a.40.2',          // AAC LC
      sampleRate: 48000,
      numberOfChannels: 2,
      bitrate: this.opts.audioBitrate,
    });

    node.port.onmessage = (ev: MessageEvent) => {
      if (!this.running) return;
      const { buffer, frames } = ev.data as { buffer: ArrayBuffer; frames: number };
      const data = new Float32Array(buffer);
      if (data.length !== frames * 2) return;

      try {
        const timestamp = Math.floor((this.audioFrameCount * 1_000_1000) / 48000);
        this.audioFrameCount += frames;
        const audioData = new AudioData({
          format: 'f32',                // 交织 float32
          sampleRate: 48000,
          numberOfFrames: frames,
          numberOfChannels: 2,
          timestamp,
          data,                         // TypedArray 亦可
        });
        this.aEncoder!.encode(audioData);
        audioData.close();

        this.audioFrameCount += frames;
      } catch (e) {
        console.error('encode audio data failed:', e);
      }
    };
  }

  private createStereoWorklet(): string {
    // 将输入任意声道数混成/扩展为双声道，并输出交织 Float32
    const code = `
      class StereoCapture extends AudioWorkletProcessor {
        process(inputs, outputs) {
          const input = inputs[0] || [];
          const frames = input[0]?.length || input[1]?.length || 128;

          // 读取/生成 L/R
          let L, R;
          if (input.length === 0 || (input[0]?.length ?? 0) === 0) {
            L = new Float32Array(frames);
            R = new Float32Array(frames);
          } else if (input.length === 1) {
            const chs = input;
            if (chs.length === 1) {
              L = chs[0];
              R = chs[0];
            } else {
              L = chs[0];
              R = chs[1];
            }
          } else {
            // 保障至少两路
            const ch0 = input[0] || new Float32Array(frames);
            const ch1 = input[1] || new Float32Array(frames);
            L = ch0;
            R = ch1;
          }

          const interleaved = new Float32Array(frames * 2);
          for (let i = 0; i < frames; i++) {
            interleaved[i * 2] = L[i] || 0.0;
            interleaved[i * 2 + 1] = R[i] || 0.0;
          }

          // 发送到主线程
          this.port.postMessage({ buffer: interleaved.buffer, frames }, [interleaved.buffer]);

          // 仍然把空输出传下游，保持图连通
          const out = outputs[0];
          if (out && out.length >= 2) {
            out[0].fill(0);
            out[1].fill(0);
          }
          return true;
        }
      }
      registerProcessor('stereo-capture', StereoCapture);
    `;
    const blob = new Blob([code], { type: 'application/javascript' });
    return URL.createObjectURL(blob);
  }
}

export default MediaCodecCapture;