export class AudioWave {
  canvasWidth = 0 // 画布宽度
  canvasHeight = 0 // 画布高度
timer = null
  audioContext = null // 音频上下文
  analyser = null // 音频分析器
  audioSource = null // 音频资源节点
  sourceCache = new Set() // 音频资源缓存，防止GC后，音频中断

  /**
   * 创建音频频谱canvas动画
   * @param {String} canvasId canvas的id
   * @param {Object} options 可选配置
   * {
   *  barWidth,
   *  barHeightScale,
   *  barMargin,
   *  horizonPadding,
   *  fftSize,
   *  onStop,
   *  onError
   * }
   */
  constructor(canvasId, options = {}) {
    this.canvasId = canvasId
    this.canvas = null
    this.ctx = null
    this.animationId = null // 动画ID

    this.barWidth = options.barWidth || 10 // 音波柱状条宽度
    this.barHeightScale = options.barHeightScale || 80 //音波柱子高度缩放值
    this.barMargin = options.barMargin || 8 // 音波柱状条左右间距
    this.horizonPadding = options.horizonPadding || 5 //水平方向上左右那边距
    this.fftSize = options.fftSize || 1024 // 音频FFT大小 [32, 32768]

    this.fs = wx.getFileSystemManager() // 文件管理器，用于读取本地音频文件
    this.onStop = options.onStop || null // 音频or音波动画结束
    this.onError = options.onError || null //任意报错

    this.createCanvas(this.canvasId)
  }

  /**
   * 创建canvas绘制相关
   */
  createCanvas() {
    const dpr = wx.getWindowInfo().pixelRatio

    // 创建动画上下文
    wx.createSelectorQuery()
      .select(this.canvasId)
      .fields({
        node: true,
        size: true,
      })
      .exec((res) => {
        // Canvas 对象
        let canvas = res[0].node
        // 渲染上下文
        let ctx = canvas.getContext('2d')

        // Canvas 画布的实际绘制宽高
        const renderWidth = res[0].width
        const renderHeight = res[0].height
        this.canvasWidth = renderWidth
        this.canvasHeight = renderHeight

        // 初始化画布大小，以dpr缩放更清晰
        canvas.width = renderWidth * dpr
        canvas.height = renderHeight * dpr
        ctx.scale(dpr, dpr)

        // 坐标系转换(画布正中心为原点)
        ctx.translate(renderWidth / 2, renderHeight / 2)
        ctx.scale(1, -1)

        this.canvas = canvas
        this.ctx = ctx

        // 绘制测试
        // this.ctx.fillStyle = this.getBarColor(60);
        // let drawH = 10
        // let drawW = 20
        // this.ctx.fillRect(
        // 	-drawW / 2,
        // 	-drawH / 2,
        // 	drawW,
        // 	drawH
        // );
        // setTimeout(() => {
        // 	this.drawCircle(0, 0, 100)
        // }, 2000);
      })
  }

  /**
   * 创建Web音频上下文控制相关
   * @param {Number} fftSize
   */
  createWebAudioCtx(fftSize = 128) {
    // 创建音频上下文
    this.audioContext = wx.createWebAudioContext()

    // 创建音频分析器
    this.analyser = this.audioContext.createAnalyser()
    this.analyser.fftSize = fftSize // 设置FFT大小
  }

  /**
   * 开始播放音频
   * @param {String} url 音频地址
   * @param {Boolean} is_remote 是否是在线地址
   * onlineUrl = 'https://website/audio/test_audio.mp3';
   * localUrl = '/resources/audio/test_audio.mp3';
   */
  startAudio(url, is_remote = false) {
    // !!! 使用的时候再创建，因为在多端应用模式中，会出现调用audioSource.start()不会播放的问题
    this.createWebAudioCtx(this.fftSize)
    let { audioContext, analyser, onStop } = this

    this.loadAudio(url, is_remote)
      .then((buffer) => {
        let audioSource = audioContext.createBufferSource()
        audioSource.buffer = buffer
        audioSource.connect(analyser)
        audioSource.connect(audioContext.destination)
        this.sourceCache.add(audioSource) // Tips：缓存住 source，防止被GC掉，GC掉的话音频会中断

        audioSource.onended = () => {
          // 结束动画
          this.stopAnimate()

          // 执行【onStop】回调函数
          onStop && onStop(buffer)
        }
        this.audioSource = audioSource

        // 开始播放
        try {
          this.audioSource.start()
          // 开始动画
          this.startAnimate()
        } catch (err) {
          console.error(err)
        }
      })
      .catch((err) => {
        console.log('fail', err)
        this.handleError(err)
      })
  }
 /**
  * 清除定时器
  */
  clearTimer(){
	  clearInterval(this.timer)
  }
  /**
   * 停止播放音频
   */
  stopAudio() {
	let that = this;
	if(that.audioSource == null){
		that.timer = setInterval(function(){
			if(that.audioSource != null){
				that.audioSource.stop()
				// 停止动画
				that.stopAnimate()
				clearInterval(that.timer)
			}
		},200)	
	}else{
		that.audioSource.stop()
		// 停止动画
		that.stopAnimate()
		clearInterval(that.timer)
	}
  }

  /**
   * 开始动画
   */
  startAnimate() {
    let {
      ctx,
      canvas,
      canvasWidth,
      canvasHeight,
      analyser,
      barWidth,
      barHeightScale,
      barMargin,
      horizonPadding,
      normalizedBuffer,
    } = this

    // 获取音频数据
    let bufferLength = analyser.frequencyBinCount
    let dataArray = new Uint8Array(bufferLength)

    // 动画函数
    const animate = () => {
      ctx.clearRect(
        -canvasWidth / 2,
        -canvasHeight / 2,
        canvasWidth,
        canvasHeight
      )

      // 获取音频数据
      analyser.getByteFrequencyData(dataArray)
      let normalizedArr = normalizedBuffer(dataArray)
      // normalizedArr = normalizedArr.filter(item => item > 0.3)

      const barCount = Math.ceil(canvasWidth / (barWidth + barMargin))
      const halfBarCount = Math.floor(barCount / 2)
      const barStep = Math.floor(bufferLength / barCount)

      // 绘制音波柱状条
      for (let i = -halfBarCount; i <= halfBarCount; i++) {
        let index = Math.abs(i) * barStep
        let item = normalizedArr[index]
        let barHeight = Math.round(item * barHeightScale) // 占位高度
        let x = i * (barMargin + barWidth) - (barMargin + barWidth) / 2
        let y = Math.ceil(-barHeight / 2) //垂直居中

        // 排除左右边距范围内的渲染
        if (
          x > -canvasWidth / 2 + horizonPadding &&
          x < canvasWidth / 2 - horizonPadding
        ) {
          this.drawItem(x, y, barWidth, barHeight)
        }
      }

      // 继续下一帧动画
      this.animationId = canvas.requestAnimationFrame(animate)
    }

    // 开始动画循环
    animate()
  }

  /**
   * 结束动画
   */
  stopAnimate() {
    const { ctx, canvas, canvasWidth, canvasHeight, animationId, sourceCache } =
      this
    if (animationId) {
      ctx.clearRect(
        -canvasWidth / 2,
        -canvasHeight / 2,
        canvasWidth,
        canvasHeight
      )
      this.drawOpacity()
      canvas.cancelAnimationFrame(animationId)
      sourceCache.delete(this.audioSource) // Tips：播放完之后，再清掉source缓存
    }
  }

  drawItem(x, y, w, h, opacity = 1) {
    let baseFixedY = 1
    let baseFixedW = 1
    let radius = w / 2
    opacity = Math.max(0.1, opacity)
    this.drawCircle(x, h / 2, radius, 0, Math.PI, this.getBarColor(opacity))
    this.drawRect(
      x,
      y - baseFixedY,
      w + baseFixedW,
      h + baseFixedY,
      this.getBarColor(opacity)
    )
    this.drawCircle(
      x,
      -h / 2,
      radius,
      Math.PI,
      2 * Math.PI,
      this.getBarColor(opacity)
    )
  }

  drawCircle(
    x,
    y,
    radius,
    startAngle = 0,
    endAngle = 2 * Math.PI,
    color = '#ffffff'
  ) {
    this.ctx.beginPath()
    this.ctx.strokeStyle = color
    this.ctx.fillStyle = color
    this.ctx.arc(x, y, radius, startAngle, endAngle, false)
    this.ctx.stroke()
    this.ctx.fill()
    this.ctx.closePath()
  }

  drawRect(x, y, w, h, color = '#ffffff') {
    this.ctx.strokeStyle = color //this.getBarColor(opacity);
    this.ctx.fillStyle = color //this.getBarColor(opacity);
    this.ctx.fillRect(x - w / 2, y, w, h)
  }

  drawOpacity() {
    let { ctx, canvasWidth, canvasHeight } = this
    ctx.fillStyle = 'rgba(255,255,255,0)'
    ctx.fillRect(-canvasWidth / 2, -canvasHeight / 2, canvasWidth, canvasHeight)
  }

  /**
   * 加载音频文件buffer数据
   * @param {String} url 音频地址
   * @param {Boolean} is_remote 是否是在线地址
   */
  loadAudio(url, is_remote = false) {
    const { audioContext } = this
    return new Promise((resolve, reject) => {
      if (is_remote) {
        // 处理在线文件
        wx.request({
          url,
          responseType: 'arraybuffer',
          success: (res) => {
            audioContext.decodeAudioData(
              res.data,
              (buffer) => {
                resolve(buffer)
              },
              (err) => {
                console.error('decodeAudioData fail', err)
                reject(err)
              }
            )
          },
          fail: (err) => {
            console.error('request fail', err)
            reject(err)
          },
        })
      } else {
        // 处理本地文件
        this.fs.readFile({
          filePath: url,
          success: (res) => {
            // console.log('加载音频数据：', res.data)
            audioContext.decodeAudioData(
              res.data,
              (buffer) => {
                // console.log('音频数据解码：', buffer)
                resolve(buffer)
              },
              (err) => {
                console.error('decodeAudioData fail', err)
                reject(err)
              }
            )
          },
          fail: (err) => {
            console.error('err:', err)
            reject(err)
          },
        })
      }
    })
  }

  getBarColor(opacity = 1) {
    return `rgba(76, 73 ,233, ${opacity})`
  }

  normalizedBuffer(data = []) {
    let copyData = [...data]
    // 找到音频数据的最大值和最小值
    const max = Math.max(...copyData)
    const min = Math.min(...copyData)

    // 计算音频数据的范围
    const range = max - min
    // console.log(min, max, range)

    // 对音频数据进行归一化处理，音频数据范围在 0 到 1 之间
    return copyData.map((sample) => (sample - min) / range || 0)
  }

  handleError(err) {
    this.onError && this.onError(err)
  }
}

export default AudioWave
