/*
High-level audio recorder tailored for 16k mono WAV chunking.
Collects microphone audio via WebAudio, downsamples to 16000 Hz, and
exposes onChunk callbacks with WAV byte payloads at a fixed interval.
*/

export type RecorderChunkHandler = (payload: {
  wavBytes: Uint8Array;
  rawPcmBytes: Uint8Array;
  durationMs: number;
}) => void;

export type RecorderOptions = {
  chunkDurationMs?: number; // default 2500ms
  targetSampleRate?: number; // default 16000
};

export class SixteenKMonoRecorder {
  private mediaStream: MediaStream | null = null;
  private audioContext: AudioContext | null = null;
  private sourceNode: MediaStreamAudioSourceNode | null = null;
  private processorNode: ScriptProcessorNode | null = null;
  private isRecording = false;
  private chunkDurationMs: number;
  private targetSampleRate: number;
  private collected: Float32Array[] = [];
  private collectedSamples = 0;
  private lastChunkEmitTime = 0;
  private onChunk?: RecorderChunkHandler;

  constructor(options?: RecorderOptions) {
    this.chunkDurationMs = options?.chunkDurationMs ?? 2500;
    this.targetSampleRate = options?.targetSampleRate ?? 16000;
  }

  public async start(onChunk: RecorderChunkHandler) {
    if (this.isRecording) return;
    this.onChunk = onChunk;
    this.mediaStream = await navigator.mediaDevices.getUserMedia({ audio: true });
    this.audioContext = new (window.AudioContext || (window as any).webkitAudioContext)();
    this.sourceNode = this.audioContext.createMediaStreamSource(this.mediaStream);
    // Use 4096 buffer size for wide compatibility.
    this.processorNode = this.audioContext.createScriptProcessor(4096, 1, 1);
    this.sourceNode.connect(this.processorNode);
    this.processorNode.connect(this.audioContext.destination);
    this.isRecording = true;
    this.collected = [];
    this.collectedSamples = 0;
    this.lastChunkEmitTime = performance.now();

    this.processorNode.onaudioprocess = (event) => {
      if (!this.isRecording || !this.audioContext) return;
      const input = event.inputBuffer.getChannelData(0);
      // Clone buffer to avoid referencing internal buffer
      this.collected.push(new Float32Array(input));
      this.collectedSamples += input.length;

      const elapsed = performance.now() - this.lastChunkEmitTime;
      if (elapsed >= this.chunkDurationMs) {
        this.emitChunk();
        this.lastChunkEmitTime = performance.now();
      }
    };
  }

  public async stop() {
    if (!this.isRecording) return;
    this.isRecording = false;
    // emit remaining samples as a final small chunk if enough data exists
    if (this.collectedSamples > 0) {
      this.emitChunk();
    }
    if (this.processorNode) {
      this.processorNode.disconnect();
      this.processorNode.onaudioprocess = null;
      this.processorNode = null as any;
    }
    if (this.sourceNode) {
      this.sourceNode.disconnect();
      this.sourceNode = null as any;
    }
    if (this.audioContext) {
      await this.audioContext.close();
      this.audioContext = null;
    }
    if (this.mediaStream) {
      this.mediaStream.getTracks().forEach((t) => t.stop());
      this.mediaStream = null;
    }
    this.collected = [];
    this.collectedSamples = 0;
  }

  private emitChunk() {
    if (!this.audioContext || this.collectedSamples === 0 || !this.onChunk) {
      this.collected = [];
      this.collectedSamples = 0;
      return;
    }
    const inputSampleRate = this.audioContext.sampleRate;
    const interleaved = this.mergeCollected(this.collected, this.collectedSamples);
    const downsampled = this.downsampleToTarget(interleaved, inputSampleRate, this.targetSampleRate);
    const pcmBytes = this.encodePCM16(downsampled);
    const wavBytes = this.buildWavFile(pcmBytes, this.targetSampleRate, 1);
    const durationMs = Math.round((downsampled.length / this.targetSampleRate) * 1000);

    this.onChunk({ wavBytes, rawPcmBytes: pcmBytes, durationMs });

    this.collected = [];
    this.collectedSamples = 0;
  }

  private mergeCollected(chunks: Float32Array[], totalLength: number): Float32Array {
    const result = new Float32Array(totalLength);
    let offset = 0;
    for (const chunk of chunks) {
      result.set(chunk, offset);
      offset += chunk.length;
    }
    return result;
  }

  private downsampleToTarget(buffer: Float32Array, sampleRate: number, outSampleRate: number): Float32Array {
    if (outSampleRate === sampleRate) return buffer;
    const ratio = sampleRate / outSampleRate;
    const newLength = Math.floor(buffer.length / ratio);
    const result = new Float32Array(newLength);
    let resultIndex = 0;
    let bufferIndex = 0;
    while (resultIndex < newLength) {
      const nextBufferIndex = Math.floor((resultIndex + 1) * ratio);
      // Simple average for downsampling window
      let accum = 0;
      let count = 0;
      for (let i = bufferIndex; i < nextBufferIndex && i < buffer.length; i++) {
        accum += buffer[i];
        count++;
      }
      result[resultIndex] = count > 0 ? accum / count : 0;
      bufferIndex = nextBufferIndex;
      resultIndex++;
    }
    return result;
  }

  private encodePCM16(samples: Float32Array): Uint8Array {
    const bytes = new Uint8Array(samples.length * 2);
    const view = new DataView(bytes.buffer);
    let offset = 0;
    for (let i = 0; i < samples.length; i++, offset += 2) {
      let s = samples[i];
      s = Math.max(-1, Math.min(1, s));
      view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true);
    }
    return bytes;
  }

  private buildWavFile(pcmBytes: Uint8Array, sampleRate: number, numChannels: number): Uint8Array {
    const bytesPerSample = 2; // 16-bit
    const blockAlign = numChannels * bytesPerSample;
    const byteRate = sampleRate * blockAlign;
    const wavHeaderSize = 44;
    const totalSize = wavHeaderSize + pcmBytes.length;
    const buffer = new Uint8Array(totalSize);
    const view = new DataView(buffer.buffer);

    // RIFF chunk descriptor
    this.writeString(view, 0, 'RIFF');
    view.setUint32(4, 36 + pcmBytes.length, true);
    this.writeString(view, 8, 'WAVE');

    // fmt subchunk
    this.writeString(view, 12, 'fmt ');
    view.setUint32(16, 16, true); // Subchunk1Size (16 for PCM)
    view.setUint16(20, 1, true); // AudioFormat = 1 (PCM)
    view.setUint16(22, numChannels, true);
    view.setUint32(24, sampleRate, true);
    view.setUint32(28, byteRate, true);
    view.setUint16(32, blockAlign, true);
    view.setUint16(34, 16, true); // BitsPerSample

    // data subchunk
    this.writeString(view, 36, 'data');
    view.setUint32(40, pcmBytes.length, true);
    buffer.set(pcmBytes, 44);

    return buffer;
  }

  private writeString(view: DataView, offset: number, str: string) {
    for (let i = 0; i < str.length; i++) {
      view.setUint8(offset + i, str.charCodeAt(i));
    }
  }
}

export function bytesToBase64(bytes: Uint8Array): string {
  // Convert bytes to base64 in chunks to avoid call stack limits
  let binary = '';
  const chunkSize = 0x8000;
  for (let i = 0; i < bytes.length; i += chunkSize) {
    const chunk = bytes.subarray(i, i + chunkSize);
    binary += String.fromCharCode.apply(null, Array.from(chunk) as unknown as number[]);
  }
  return btoa(binary);
}


