<script setup>
import {onMounted, ref} from 'vue'

defineProps({
  msg: String,
})

let animationId;
let analyser;
let frquencyArray;
let timeDomainArray;

const byteFrequencyRef = ref();
const timeDomainRef = ref();

let audioCtx;

async function startAudio() {
  audioCtx = new AudioContext({ sampleRate: 48000 });
  analyser = audioCtx.createAnalyser();
  analyser.fftSize = 2048;
  analyser.minDecibels = -100;  // 比默认-100更高，忽略更弱噪声
  analyser.maxDecibels = -10;  // 可适当调高
  analyser.smoothingTimeConstant = 0.8


  const bufferLength = analyser.frequencyBinCount;
  console.log(bufferLength)
  frquencyArray = new Uint8Array(bufferLength);
  timeDomainArray = new Uint8Array(analyser.fftSize);

  await audioCtx.audioWorklet.addModule('my-processor.js');
  navigator.mediaDevices.getUserMedia({
    audio: true,
  }).then(stream => {
    console.log(stream);

    const myNode = new AudioWorkletNode(audioCtx, 'my-processor');
    myNode.port.postMessage({ some: 'start' });
    const source = audioCtx.createMediaStreamSource(stream);

    source.connect(analyser);
    analyser.connect(myNode);
    myNode.connect(audioCtx.destination);

    animationId = requestAnimationFrame(draw);
  });
}

function stopAudio() {
  audioCtx.close();
  cancelAnimationFrame(animationId);
}

function draw() {
  animationId = requestAnimationFrame(draw);
  // 读取频率数据
  analyser.getByteFrequencyData(frquencyArray);
  // 这里可以用 dataArray 做频谱可视化
  // console.log(frquencyArray);
  drawByteFrequency(frquencyArray);


  // 读取时域波形数据
  // analyser.getByteTimeDomainData(timeDomainArray);
  // console.log(timeDomainArray);
  // drawTimeDomain(timeDomainArray);
}

function drawByteFrequency(frequencyArray) {
  const canvas = byteFrequencyRef.value;
  const ctx = canvas.getContext('2d');

  const maxBarHeight = canvas.height * 1.0;
  // console.log(canvas.width, canvas.height)
  ctx.clearRect(0, 0, canvas.width, canvas.height);

  const start = Math.floor(frequencyArray.length / 3);
  const filteredArray = frequencyArray.slice(start);

  const barWidth = (canvas.width / filteredArray.length);
  const barGap = 1;
  let x = 0;

  // console.log(filteredArray)

  for (let i = 0; i < filteredArray.length; i++) {
    const barHeight = Math.min((filteredArray[i] / 255) * maxBarHeight * 2, maxBarHeight);

    ctx.fillStyle = `rgb(255,0,0)`;
    const y = canvas.height - barHeight;

    ctx.fillRect(x, y, barWidth, barHeight);
    x += barWidth + barGap;
  }
}

function drawTimeDomain(timeDomainArray) {
  const canvas = timeDomainRef.value;
  const ctx = canvas.getContext('2d');

  ctx.clearRect(0, 0, canvas.width, canvas.height);
  ctx.beginPath();
  const sliceWidth = canvas.width / timeDomainArray.length;
  let x = 0;

  for (let i = 0; i < timeDomainArray.length; i++) {
    const v = timeDomainArray[i] / 255; // 归一化到 0~1
    const y = v * canvas.height;

    if (i === 0) {
      ctx.moveTo(x, y);
    } else {
      ctx.lineTo(x, y);
    }

    x += sliceWidth;
  }

  ctx.stroke();
}


</script>

<template>
  <div class="wave-container">
    <button @click="startAudio">开始录音</button>
    <button @click="stopAudio">停止录音</button>

    <canvas id="byteFrequency" ref="byteFrequencyRef" class="frequencyCanvas" width="800" height="400"></canvas>
    <canvas id="timeDomain" ref="timeDomainRef"></canvas>
  </div>
</template>

<style scoped>
.wave-container {
  width: 100%;
  height: 100%;
  display: flex;
}

.frequencyCanvas {
  background-color: lightcyan;
}
</style>