<template>
  <div class="canvas">
    <audio id="audio2" controls="controls" preload="auto" :src="playUrl"></audio>
    <canvas id="myCanvas">您的浏览器不支持canvas标签</canvas>
  </div>
</template>

<script>
import config from '@/service/config'
import { useStore } from 'vuex'
import { computed, onMounted, onUnmounted, ref, watch, watchEffect } from 'vue'
export default {
  name: 'App',
  components: {},
  setup() {
    const store = useStore()
    const playUrl = require('@/assets/audio/test.mp3')
    //监听音频
    let audioRef = ''
    const audio = el => {
      audioRef = el
    }

    const audioLoaded = () => {
      console.log('currentTime', audioRef.currentTime)
      let now = parseFloat((Date.now() - songInfo.value.since) / 1000).toFixed(2)
      audioRef.currentTime = now < 0 ? 0 : now
      if (songInfo.value.nowDuration == 0) {
        audioRef.currentTime = 0
      }
      console.log('currentTime', audioRef.currentTime, now)
      // initAudioContext()
    }
    let songInfo = computed(() => {
      return store.getters.songInfo
    })
    let currentTime = ref('')
    let duration = ref('')
    let songOrderList = computed(() => {
      return store.getters.songOrderList
    })
    const handleUpdateTime = e => {
      currentTime.value = e.target.currentTime
      duration.value = e.target.duration || 0
    }
    const handleEnd = () => {
      console.log('end')
      // store.dispatch('_postDelSongOrder')
    }
    //音频可视化
    const initAudioContext = () => {
      // const AudioContext = window.AudioContext || window.webkitAudioContext

      // const ctx = new AudioContext()
      // const analyser = ctx.createAnalyser()
      // analyser.fftSize = 512
      // // 获取<audio>节点
      // const audio = document.getElementById('audio')

      // // 通过<audio>节点创建音频源
      // const source = ctx.createMediaElementSource(audio)

      // // 将音频源关联到分析器
      // source.connect(analyser)

      // // 将分析器关联到输出设备（耳机、扬声器）
      // analyser.connect(ctx.destination)
      // const bufferLength = analyser.frequencyBinCount
      // const dataArray = new Uint8Array(bufferLength)

      // analyser.getByteFrequencyData(dataArray)
      // console.log(dataArray)
      try {
        var audioCtx = new (window.AudioContext || window.webkitAudioContext)()
      } catch (err) {
        alert('!Your browser does not support Web Audio API!')
      }
      var myCanvas = document.getElementById('myCanvas'),
        canvasCtx = myCanvas.getContext('2d'),
        myAudio = document.getElementById('audio2'),
        source = audioCtx.createMediaElementSource(myAudio),
        analyser = audioCtx.createAnalyser()
      source.connect(analyser)
      analyser.connect(audioCtx.destination)
      myAudio.oncanplaythrough = function draw() {
        var cwidth = myCanvas.width,
          cheight = myCanvas.height,
          array = new Uint8Array(128)
        analyser.getByteFrequencyData(array)
        console.log('array', array[0])
        canvasCtx.clearRect(0, 0, cwidth, cheight)
        for (var i = 0; i < array.length; i++) {
          canvasCtx.fillRect(i * 3, cheight - array[i], 2, cheight)
        }
        requestAnimationFrame(draw)
      }
    }
    function getData(source, audioCtx) {
      var request = new XMLHttpRequest() //开一个请求
      request.open('GET', songInfo.value.playUrl, true) //往url请求数据
      request.responseType = 'arraybuffer' //设置返回数据类型
      request.onload = function () {
        var audioData = request.response
        //数据缓冲完成之后，进行解码
        audioCtx.decodeAudioData(
          audioData,
          function (buffer) {
            source.buffer = buffer //将解码出来的数据放入source中
            //进行数据处理
          },
          function (err) {
            alert('!Fail to decode the file!') //解码出错处理
          }
        )
      }
      request.send()
    }
    const initAudioContextBuffer = () => {
      try {
        var audioCtx = new (window.AudioContext || window.webkitAudioContext)()
      } catch (err) {
        alert('!Your browser does not support Web Audio API!')
      }
      var source = audioCtx.createBufferSource() //创建一个空的音源，一般使用该方式，后续将解码的缓冲数据放入source中，直接对source操作。
      getData(source, audioCtx)
    }
    function resize_canvas() {
      let canvas = document.getElementById('myCanvas')
      if (canvas.width < window.innerWidth) {
        canvas.width = window.innerWidth
      }

      if (canvas.height < window.innerHeight) {
        canvas.height = window.innerHeight
      }
      console.log(canvas.width)
    }
    onMounted(async () => {
      // initPlay()
      //获取用户信息
      initAudioContext()
      await store.dispatch('_postUserDetail')
      store.dispatch('initSocket')
      //   resize_canvas()
      //   initAudioContextBuffer()
    })

    return { songInfo, handleUpdateTime, handleEnd, audio, audioLoaded, playUrl }
  },
  methods: {
    open() {
      this.$alert('即将播放音乐', {
        confirmButtonText: '确定',
        callback: action => {
          let audio = document.getElementById('audio')
          audio.play()
        }
      })
    }
  },
  mounted() {
    this.open()
  },
  beforeUnmount() {
    this.$store.dispatch('disconnect')
  }
}
</script>

<style lang="scss">
.canvas {
  position: absolute;
  top: 0;
  left: 0;
  bottom: 0;
  right: 0;
  background: rgba(0, 0, 0, 0.5);
  #myCanvas {
    width: 100vw;
    height: 100vh;
  }
}
</style>
