<!DOCTYPE html>
<html lang="en">
  <head>
    <meta charset="UTF-8" />
    <meta http-equiv="X-UA-Compatible" content="IE=edge" />
    <meta name="viewport" content="width=device-width, initial-scale=1.0" />
    <title>音频可视化</title>
  </head>
  <body>
    <div class="box">这里有些什么</div>

    <div>
      <input id="file" type="file" ></input>
    </div>
    <!-- <div>音频可视化</div> -->
    <!-- <video id="video" controls src="./i am father.mp4"></video> -->
    <!-- <audio src="./i am father.mp4" controls id="audio"></audio> -->
  </body>
  <script>
    window.onload = () => {
      // demo();
      start();
    };

    function start() {
      let buffer1000 = new ArrayBuffer(1000);
      let file = document.querySelector("#file");
      file.addEventListener("change", async (event) => {
        const file = event.target.files[0];
        const blob = file.slice(0, file.size);

        let stream = blob.stream();
        let arrayBuffer = await blob.arrayBuffer();
        let fileView = new Int8Array(arrayBuffer);

        stream.getReader()
        
        for (let i = 0; i < 10; i++) {
          console.log(`fileView: ${fileView[i]}`);
        }

        console.log(`file size: ${arrayBuffer.byteLength} fileView: ${fileView.byteLength}`, stream);
        var buffer = new ArrayBuffer(8);
        var view = new Int32Array(buffer);        
        
      });
    }

    function demo() {
      const AudioContext = window.AudioContext || window.webkitAudioContext;

      const ctx = new AudioContext();

      // 分析器
      const analyser = ctx.createAnalyser();
      analyser.fftSize = 512;

      const audio = document.getElementById("audio");

      const source = ctx.createMediaElementSource(audio);

      // 将音频源关联到分析器
      const aContext = source.connect(analyser);

      // 将分析器关联到输出设备（耳机、扬声器）
      analyser.connect(ctx.destination);

      // audio.play();

      const bufferLength = aContext.frequencyBinCount;
      const dataArray = new Float32Array(bufferLength);

      let audioData = aContext.getFloatFrequencyData(dataArray);

      console.log("audio", audioData);
    }

    function momo() {
      // let mediaStream = new MediaStream();

      const AudioContext = window.AudioContext || window.webkitAudioContext;

      const ctx = new AudioContext();

      const analyser = ctx.createAnalyser();

      analyser.fftSize = 512;

      // 获取<audio>节点
      const audio = document.getElementById("audio");

      // 通过<audio>节点创建音频源
      const source = ctx.createMediaElementSource(audio);

      // 将音频源关联到分析器
      source.connect(analyser);

      // 将分析器关联到输出设备（耳机、扬声器）
      analyser.connect(ctx.destination);

      const bufferLength = analyser.frequencyBinCount;
      const dataArray = new Uint8Array(bufferLength);

      analyser.getByteFrequencyData(dataArray);

      console.log("音频文件", analyser.frequencyBinCount);
    }
  </script>
</html>
