<script setup>
  import { ref, onMounted} from 'vue'
  import * as THREE from 'three'

  class OralAudio {
    constructor(url) {
      this.url = url;
      this.audioContext = new (window.AudioContext || window.webkitAudioContext())();
      this.audioContext.onstatechange = (e) => this.onstatechange(e);
      this.ajax().then((arrayBuffer) => {
        this.audioContext.decodeAudioData(arrayBuffer).then((audioBuffer) => {
          console.log('audioBuffer',audioBuffer)
          this.audioBuffer = audioBuffer;
          if(typeof this._loaded === 'function'){
            this._loaded()
          }
        }).catch(() => {
          this._msg_error()
        })
      })
    }
    ajax(){
      return new Promise((resolve, reject) => {
        fetch(this.url).then((res) => {
          res.arrayBuffer().then(resolve).catch(() =>{
            this._msg_error()
            reject()
          })
        }).catch(() => {
          this._msg_error()
          reject()
        })
      })
    }
    onstatechange(e){
      if(typeof this._statechangeFn === 'function'){
        this._statechangeFn(e)
      }
    }
    onended(e){
      if(this.timer){
        clearInterval(this.timer)
      }
      if(e.type === 'ended' && typeof this._endedFn === 'function'){
        this._endedFn(e)
      }
    }
    loaded(fn){
      this._loaded = fn
    }
    play(){
      this.audioSource = this.audioContext.createBufferSource();
      this.audioSource.onended = (e) => this.onended(e);
      this.audioSource.buffer = this.audioBuffer;
      this.duration = this.audioBuffer.duration;
      this.audioSource.loop = false;
      this.audioSource.connect(this.audioContext.destination);
      this.audioSource.start(0);

      let currentTime = 0
      const interval = 100; // 定义更新间隔，以毫秒为单位
      this.timer = setInterval(() => {
        if(typeof this._ontimeupdateFn === 'function'){
          this._ontimeupdateFn(currentTime, this.audioContext)
        }
        currentTime += interval
      }, interval);
    }
    pause(){
      if (this.audioContext.state === "running") {
        this.audioContext.suspend();
      } else if (this.audioContext.state === "suspended") {
        this.audioContext.resume();
      }
    }
    stop(){
      this.audioSource.stop(0);
    }
    error(fn){
      this._error = fn
    }
    _msg_error(){
      this._error && this._error('Audio source loading failed： ' + this.url)
    }
    /**
     * ended
     * */
    addEventListener(type, fn){
      this['_'+ type + 'Fn'] = fn
    }
  }


let a = new OralAudio('/a2.wav')



  // 创建场景
  var scene = new THREE.Scene();

  // 创建相机
  var camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.1, 1000);
  camera.position.z = 5;

  // 创建渲染器
  var renderer = new THREE.WebGLRenderer();
  renderer.setSize(window.innerWidth, window.innerHeight);
  document.body.appendChild(renderer.domElement);

  // 创建一个立方体表示嘴巴
  var geometry = new THREE.BoxGeometry(1, 1, 1);
  var material = new THREE.MeshBasicMaterial({ color: 0x00ff00 });
  var mouth = new THREE.Mesh(geometry, material);
  scene.add(mouth);

  a.loaded(() => {
    document.onclick = () => {
      a.play()
      let audioContext = a.audioContext;
      let audioSource = a.audioSource;
      let analyser = audioContext.createAnalyser();
      analyser.fftSize = 2048;
      analyser.smoothingTimeConstant = 0.8;
      audioSource.connect(analyser);

      let dataArray = new Uint8Array(analyser.frequencyBinCount);
      function animate() {
        analyser.getByteFrequencyData(dataArray);
        let sum = 0;
        for (let i = 0; i < dataArray.length; i++) {
          sum += dataArray[i];
        }
        let rms = Math.sqrt(sum / dataArray.length);
        let db = 20 * Math.log10(rms);
        var scale = (dataArray[0] / 255) * 2 + 0;
        mouth.scale.y = scale;

        // 渲染场景
        renderer.render(scene, camera);
        requestAnimationFrame(animate);
      }
      animate()

    }
  })






</script>
<template>
  <audio id="audio" src="/a2.wav" controls></audio>
</template>