import axios from "axios";
class Dispatcher {
  constructor() {
    this.handlers = []
  }

  listen(handler) {
    this.handlers.push(handler)
  }

  emit(...args) {
    this.handlers.forEach(handler => {
      handler(...args)
    })
  }
}

class Player {
  constructor() {
    this.audioContext = new AudioContext()
    this.playList = []
    this.playIndex = 0

    this.emptyNode = {
      id: null,
      offset: 0,
      start: null,
      source: null,
      buffer: null
    }

    this.onPlay = new Dispatcher()
    this.onPause = new Dispatcher()
    this.onChange = new Dispatcher()
    this.onReady = new Dispatcher()
  }

  // async readAudioBuffer (file) {
  //   return new Promise((resolve, reject) => {
  //     const reader = new FileReader()
  //     reader.onload = async evt => {
  //       this.audioContext
  //         .decodeAudioData(evt.target.result)
  //         .then(resolve, reject)
  //     }
  //     reader.onerror = reject
  //     reader.readAsArrayBuffer(file)
  //   })
  // }
  spectrogram(analyser) {
    const bufferLength = analyser.frequencyBinCount;
    const dataArray = new Uint8Array(bufferLength);
    analyser.getByteFrequencyData(dataArray);

    // 清空画布为绘制新的可视化效果做准备
    var canvas = document.getElementById('oscilloscope');
    var canvasCtx = canvas.getContext("2d");
    var WIDTH = canvas.width;
    var HEIGHT = canvas.height;

    // var intendedWidth = document.querySelector('oscilloscope').clientWidth;
    // canvas.setAttribute('width', intendedWidth);

    function renderFrame() {
      requestAnimationFrame(renderFrame);

      // 更新频率数据
      analyser.getByteFrequencyData(dataArray);

      canvasCtx.fillStyle = 'rgb(200, 200, 200)';
      canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);

      canvasCtx.lineWidth = 2;
      canvasCtx.strokeStyle = 'rgb(0, 0, 0)';

      canvasCtx.beginPath();

      var sliceWidth = WIDTH * 1.0 / bufferLength;
      var x = 0;

      for (var i = 0; i < bufferLength; i++) {

        var v = dataArray[i] / 128.0;
        var y = v * HEIGHT / 2;

        if (i === 0) {
          canvasCtx.moveTo(x, y);
        } else {
          canvasCtx.lineTo(x, y);
        }

        x += sliceWidth;
      }

      canvasCtx.lineTo(canvas.width, canvas.height / 2);
      canvasCtx.stroke();
    }
    renderFrame()
  }

  // 解码并载入音频流
  decodeAudioData(audioContext, url) {
    return new Promise((resolve) => {
      axios({
        method: 'get',
        url: url,
        responseType: 'arraybuffer'
      }).then((res) => {
        audioContext.decodeAudioData(res.data, (buffer) => {
          resolve(buffer);
        })
      })
    })
  }


  async append(model) {
    let tempModel = model.split("^$")
    let url = tempModel[0]
    let id = tempModel[1]
    const isEmpty = this.isEmpty
    this.playList.push({
      id,
      offset: 0,
      start: null,
      source: null,
      buffer: await this.decodeAudioData(this.audioContext, url)
    })
    if (isEmpty) {
      this.onReady.emit(this)
    }
  }

  play() {
    if (!this.playList.length || this.current.source) {
      return
    }
    const source = this.audioContext.createBufferSource()
    // 创建音频读取器
    const analyser = this.audioContext.createAnalyser();
    // 没有必要用太大的频率数组去存储本身就不够精细的源数据
    analyser.fftSize = 512;

    source.buffer = this.current.buffer
    source.onended = this.next.bind(this)
    source.connect(analyser)
    // 连接节点
    source.connect(this.audioContext.destination)
    source.start(0, this.current.offset)
    this.current.source = source
    this.current.start = this.audioContext.currentTime
    console.log(analyser, "??")

    this.spectrogram(analyser)
    this.onPlay.emit(this)
  }

  pause() {
    if (!this.playList.length || !this.current.source) {
      return
    }
    this.current.source.stop(0)
    this.current.source.disconnect(0)
    this.current.source.onended = null
    this.current.source = null
    this.current.offset = this.position
    this.current.start = null

    this.onPause.emit(this)
  }

  stop() {
    this.pause()
    this.current.offset = 0
    this.current.start = null
  }

  next() {
    this.stop()
    this.playIndex++
    if (this.playIndex >= this.playList.length) {
      this.playIndex = 0
    }
    this.play()
    this.onChange.emit(this)
  }

  prev() {
    this.stop()
    this.playIndex--
    if (this.playIndex < 0) {
      this.playIndex = Math.max(this.playList.length - 1, 0)
    }
    this.play()
    this.onChange.emit(this)
  }

  get isEmpty() {
    return this.current === this.emptyNode
  }

  get current() {
    return this.playList[this.playIndex] || this.emptyNode
  }

  get position() {
    if (!this.playList.length) {
      return 0
    }
    return (
      this.current.offset +
      (this.current.start !== null
        ? this.audioContext.currentTime - this.current.start
        : 0)
    )
  }

  set position(val) {
    if (!this.playList.length) {
      return
    }
    this.stop()
    this.current.offset = val
    this.current.start = null
    this.play()
  }

  get duration() {
    return this.current.buffer ? this.current.buffer.duration : 0.001
  }
}

export const player = new Player()
