<template>
  <div class="audio-line">
    <canvas ref="showNode" @click="switchState" @contextmenu.prevent.right="chooseFile" />
  </div>
</template>

<script setup lang="ts">
import globalStore from '@/store/global-store'
import { onMounted, ref, onBeforeUnmount, watch } from 'vue'



// 波纹
const barGap = 8
const barWidth = 2
const barHeight = 200
let upperFringe:CanvasGradient
let downFringe:CanvasGradient
let barTotalCardinal = 0

// 状态管理
const gStore = globalStore()

// 画布
const showNode = ref<HTMLCanvasElement>()
let showCtx:CanvasRenderingContext2D
let audioContext:AudioContext
let audioAnalyser:AnalyserNode
let audioBufSourceNode:AudioBufferSourceNode
let recordAnimation: number | null
const reservePad = 100

// 当前帧画面数据
let rangeData:Uint8Array

watch(
  () => [gStore.CURRENT_SIZE_WIDTH, gStore.CURRENT_SIZE_HEIGHT],
  nd => {
    if (showNode.value) {
      showNode.value.width = nd[0]
      showNode.value.height = nd[1]
    }
  }
)

/** 选择待播放文件 */
const chooseFile = async () => {

  if (audioContext?.state === 'running') {
    await audioContext.close()
    drawUpContext()

    RPlaceholder()
  }

  const tempNode = document.createElement('input')
  tempNode.type = 'file'
  tempNode.accept = 'audio/*'
  tempNode.addEventListener('change', function({ target }: Event) {
    const inputElement = target as HTMLInputElement
    if (inputElement.files && inputElement.files.length > 0) {
      drawUpData(inputElement.files[0])
    }
  })

  tempNode.click()
}

/** 准备数据 */
const drawUpData = (audioFile: File) => {
  const localReader = new FileReader()
  localReader.readAsArrayBuffer(audioFile)
  localReader.onload = function() {
    audioContext!.decodeAudioData(<ArrayBuffer>this.result, transToDestination)
  }
}

/** 音频数据传递至设备, 并 调用 绘制波形图 */
const transToDestination = (audioBuffer: AudioBuffer) => {
  audioBufSourceNode!.buffer = audioBuffer
  audioBufSourceNode!.start()

  drawBar()
}

/** 循环绘制单签帧数据 */
const drawBar = () => {

  showCtx.clearRect(0, 0, gStore.CURRENT_SIZE_WIDTH, gStore.CURRENT_SIZE_HEIGHT)

  audioAnalyser.getByteFrequencyData(rangeData)
  let localBarHeight
  let moveX
  let supplement
  for (let i = 0; i< rangeData.length; i++) {
    localBarHeight = rangeData[i]
    moveX = gStore.CURRENT_SIZE_WIDTH / 2
    supplement = i * 8

    showCtx.fillStyle = upperFringe
    showCtx.fillRect(moveX + supplement, gStore.CURRENT_SIZE_HEIGHT / 2, barWidth, -localBarHeight)
    showCtx.fillRect(moveX - supplement, gStore.CURRENT_SIZE_HEIGHT / 2, barWidth, -localBarHeight)

    showCtx.fillStyle = downFringe
    showCtx.fillRect(moveX + supplement, gStore.CURRENT_SIZE_HEIGHT / 2, barWidth, localBarHeight)
    showCtx.fillRect(moveX - supplement, gStore.CURRENT_SIZE_HEIGHT / 2, barWidth, localBarHeight)

  }

  recordAnimation = requestAnimationFrame(drawBar)
}

/** 切换播放状态 */
const switchState = () => {
  if (audioContext?.state === 'running') {
    cancelAnimationFrame(<number>recordAnimation)
    recordAnimation = null
    audioContext.suspend()
  } else if (audioContext?.state === 'suspended') {
    audioContext.resume()
    drawBar()
  }
}

/** 音频上下文准备 */
const drawUpContext = () => {
  const localContext = new AudioContext()

  // 媒体数据
  const audioNode = localContext.createBufferSource()

  // 解码器
  audioAnalyser = localContext.createAnalyser()

  audioAnalyser.fftSize = 2 ** barTotalCardinal
  audioNode.connect(audioAnalyser)

  // 解码器, 连接音频渲染设备(通常指 音响,耳机等)
  audioAnalyser.connect(localContext.destination)

  audioBufSourceNode = audioNode

  audioContext = localContext

  rangeData = new Uint8Array(audioAnalyser.frequencyBinCount)
}

/** 提示文案 */
const RPlaceholder = () => {
  showCtx.clearRect(0, 0, gStore.CURRENT_SIZE_WIDTH, gStore.CURRENT_SIZE_HEIGHT)

  const showText = '鼠标左键:暂停/播放;右键:选择音频'
  showCtx.fillStyle = 'rgba(62,62,62,.4)'
  const fontSize = Math.ceil((gStore.CURRENT_SIZE_WIDTH - reservePad * 2) / showText.length)

  showCtx.font = `${fontSize}px Verdana`

  showCtx.fillText(showText, reservePad, (gStore.CURRENT_SIZE_HEIGHT + fontSize) / 2)

}

onMounted(() => {

  if (!showNode.value) { return }

  showCtx = <CanvasRenderingContext2D>showNode.value.getContext('2d')

  let localBarHeight:number
  if (gStore.CURRENT_SIZE_HEIGHT) {
    showNode.value.width = gStore.CURRENT_SIZE_WIDTH
    showNode.value.height = gStore.CURRENT_SIZE_HEIGHT
    localBarHeight = gStore.CURRENT_SIZE_HEIGHT / 2
  } else {
    const appNode = <HTMLElement>document.getElementById('app-main')

    const { offsetWidth, offsetHeight } = appNode
    const surplus = parseInt(window.getComputedStyle(appNode).padding) * 2

    const localW = offsetWidth - surplus
    const localH = offsetHeight - surplus - 10
    gStore.SET_CURRENT_SIZE(localW, localH)

    localBarHeight = localH / 2
  }

  const line1 = showCtx.createLinearGradient(0, localBarHeight - barGap, 2, localBarHeight - barGap - barHeight)
  line1.addColorStop(0, '#1E90FF')
  line1.addColorStop(.25, '#FF7F50')
  line1.addColorStop(.5, '#8A2BE2')
  line1.addColorStop(.75, '#4169E1')
  line1.addColorStop(1, '#00FFFF')
  upperFringe = line1

  const line2 = showCtx.createLinearGradient(0, localBarHeight - barGap, 2, localBarHeight - barGap - barHeight)
  line2.addColorStop(0, '#1E90FF')
  line2.addColorStop(.25, '#FFD700')
  line2.addColorStop(.5, '#8A2BE2')
  line2.addColorStop(.75, '#4169E1')
  line2.addColorStop(1, '#FF0000')
  downFringe = line2

  const barTotal = (gStore.CURRENT_SIZE_WIDTH + 2 * barGap) / (( barGap + barWidth ) * 2)
  barTotalCardinal = Math.ceil( Math.log2(barTotal) )

  drawUpContext()

  RPlaceholder()

})

onBeforeUnmount(() => {

  if (audioContext && audioContext.state === 'running') {
    audioContext.close()
  }

  if (recordAnimation) {
    cancelAnimationFrame(recordAnimation)
  }

})
</script>

<style lang="scss">

canvas {
  border: 1px dashed;
}
</style>