// 组合函数
import { reactive, onMounted } from 'vue'
import { encodeWAV } from './lib/transform'
import Recorder from 'js-audio-recorder' 
import { ElMessage } from 'element-plus'

// 按照惯例，组合式函数名以“use”开头
export function videoFun() {
  let recorder = null

  let oCanvas = null
  let pCanvas = null
  let ctx = null
  let pCtx = null
  let drawRecordId = null
  let drawPlayId = null
  let videoState = reactive({
    playTimer: null,
    duration: 0,
    fileSize: 0,
    vol: 0,
    play:false // false没有播放 pause暂停 play正在播放
  })

  onMounted(() => {
    componentDidMount()
  })

  const startVideoRecord = () => {
    clearPlay()
    videoState.play == false
    const config = {
      sampleBits: 16,                 // 采样位数，支持 8 或 16，默认是16
      sampleRate: 16000,              // 采样率，支持 11025、16000、22050、24000、44100、48000，根据浏览器默认值，我的chrome是48000
      numChannels: 1,                 // 声道，支持 1 或 2， 默认是1
      // compiling: false,(0.x版本中生效,1.x增加中)  // 是否边录边转换，默认是false
    }

    if (!recorder) {
      recorder = new Recorder(config)
      recorder.onprogress = (params) => {

        videoState.duration = params.duration.toFixed(2),
          videoState.fileSize = params.fileSize,
          videoState.vol = params.vol.toFixed(2)
        // 此处控制数据的收集频率
        if (config.compiling) {
          console.log('音频总数据：', params.data)
        }
      }

      recorder.onplay = () => {
        console.log('%c回调监听，开始播放音频', 'color: #2196f3')
      }
      recorder.onpauseplay = () => {
        console.log('%c回调监听，暂停播放音频', 'color: #2196f3')
      }
      recorder.onresumeplay = () => {
        console.log('%c回调监听，恢复播放音频', 'color: #2196f3')
      }
      recorder.onstopplay = () => {
        console.log('%c回调监听，停止播放音频', 'color: #2196f3')
      }
      recorder.onplayend = () => {
        console.log('%c回调监听，音频已经完成播放', 'color: #2196f3')
        // 播放结束后，停止绘制canavs
        stopDrawPlay()
        videoState.play=false
        console.log(videoState.play)
      }

      // 定时获取录音的数据并播放
      config.compiling && (videoState.playTimer = setInterval(() => {
        if (!recorder) {
          return
        }

        let newData = recorder.getNextData()
        if (!newData.length) {
          return
        }
        let byteLength = newData[0].byteLength
        let buffer = new ArrayBuffer(newData.length * byteLength)
        let dataView = new DataView(buffer)

        // 数据合并
        for (let i = 0, iLen = newData.length; i < iLen; ++i) {
          for (let j = 0, jLen = newData[i].byteLength; j < jLen; ++j) {
            dataView.setInt8(i * byteLength + j, newData[i].getInt8(j))
          }
        }

        // 将录音数据转成WAV格式，并播放
        let a = encodeWAV(dataView, config.sampleRate, config.sampleRate, config.numChannels, config.sampleBits)
        let blob = new Blob([a], { type: 'audio/wav' })

        blob.arrayBuffer().then((arraybuffer) => {
          Player.play(arraybuffer)
        })
      }, 3000))
    } else {
      recorder.stop()
    }

    recorder.start().then(() => {
      console.log('开始录音')
    }, (error) => {
      console.log(`异常了,${error.name}:${error.message}`)
    })
    // 开始绘制canvas
    // drawPlay('record')
    drawRecord()
  }
  const endVideoRecord = () => {
    recorder && recorder.stop()
    console.log('结束录音')
    drawRecordId && cancelAnimationFrame(drawRecordId)
    drawRecordId = null
  }
  const stopVideoPlay = () => {
    videoState.play == false
    clearPlay()
    recorder && recorder.stopPlay()
    console.log('停止播放')
    stopDrawPlay()
  }
  const playVideoRecord = () => {
    videoState.play = 'play'
    recorder && recorder.play()
    drawRecordId && cancelAnimationFrame(drawRecordId)
    drawRecordId = null
    console.log('播放录音')
    recorder && drawPlay()

  }
  const drawPlay = () => {
    // 用requestAnimationFrame稳定60fps绘制
    drawPlayId = requestAnimationFrame(drawPlay)
    let dataArray, bufferLength
      // 实时获取音频大小数据 getRecordAnalyseData
      dataArray = recorder.getPlayAnalyseData(),
        bufferLength = dataArray.length

    // 填充背景色
    ctx.fillStyle = '  rgba(59, 130, 246, 0.5)'
    ctx.fillRect(0, 0, oCanvas.width, oCanvas.height)

    // 设定波形绘制颜色
    ctx.lineWidth = 2
    ctx.strokeStyle = 'rgb(0, 0, 0)'

    ctx.beginPath()

    var sliceWidth = oCanvas.width * 1.0 / bufferLength, // 一个点占多少位置，共有bufferLength个点要绘制
      x = 0          // 绘制点的x轴位置

    for (var i = 0; i < bufferLength; i++) {
      var v = dataArray[i] / 128.0
      var y = v * oCanvas.height / 2

      if (i === 0) {
        // 第一个点
        ctx.moveTo(x, y)
      } else {
        // 剩余的点
        ctx.lineTo(x, y)
      }
      // 依次平移，绘制所有点
      x += sliceWidth
    }

    ctx.lineTo(oCanvas.width, oCanvas.height / 2)
    ctx.stroke()
  }


  // 停止绘制canvas
  const stopDrawPlay = () => {
    drawPlayId && cancelAnimationFrame(drawPlayId)
    drawPlayId = null
  }
  const downloadVideoPCM = () => {
    if (recorder) {
      console.log('pcm: ', recorder.getPCMBlob())
      recorder.downloadPCM()
    }
  }
  const downloadVideoWAV = () => {
    if (recorder) {
      recorder.downloadWAV()
    } else {
      ElMessage.error('请先录音！')
    }
  }
  const getVideoWAV = () => {
    if (recorder) {
      console.log('wav: ', recorder.getWAVBlob()) 
      return recorder.getWAVBlob()
    }
  }
  const downloadVideoMP3 =async () => {
    if (recorder) {
      const mp3Blob =await  convertToMp3(recorder.getWAV())
      recorder.download(mp3Blob, 'recorder', 'mp3')
    }else {
      ElMessage.error('请先录音！')
    }
  }

  const clearPlay = () => {
    if (videoState.playTimer) {
      clearInterval(videoState.playTimer)
      videoState.playTimer = null
    }
    if (drawRecordId) {
      cancelAnimationFrame(drawRecordId)
      drawRecordId = null
    }
    stopDrawPlay()
  }

  // 暂停播放
  const pauseVideoPlay = () => {
     videoState.play = 'pause'
    stopDrawPlay();
    recorder && recorder.pausePlay();
    console.log('暂停播放');
  }

  // 恢复播放
  const resumeVideoPlay = () => {
   videoState.play = 'play'
    recorder && recorder.resumePlay();
    console.log('恢复播放');
    drawPlay();
}
  const componentDidMount = () => {
    oCanvas = document.getElementById('canvas')
    ctx = oCanvas?.getContext("2d")
    pCanvas = document.getElementById('playChart')
    pCtx = pCanvas && pCanvas.getContext("2d")
  }
  const drawRecord = () => {
    // 用requestAnimationFrame稳定60fps绘制
    drawRecordId = requestAnimationFrame(drawRecord)
  
    // 实时获取音频大小数据
    let dataArray = recorder.getRecordAnalyseData(),
        bufferLength = dataArray.length

    // 填充背景色
    ctx.fillStyle = ' rgba(59, 130, 246, 0.5)'
    ctx.fillRect(0, 0, oCanvas.width, oCanvas.height)

    // 设定波形绘制颜色
    ctx.lineWidth = 2
    ctx.strokeStyle = 'rgb(0, 0, 0)'

    ctx.beginPath()

    var sliceWidth = oCanvas.width * 1.0 / bufferLength, // 一个点占多少位置，共有bufferLength个点要绘制
      x = 0          // 绘制点的x轴位置

    for (var i = 0; i < bufferLength; i++) {
      var v = dataArray[i] / 128.0
      var y = v * oCanvas.height / 2

      if (i === 0) {
        // 第一个点
        ctx.moveTo(x, y)
      } else {
        // 剩余的点
        ctx.lineTo(x, y)
      }
      // 依次平移，绘制所有点
      x += sliceWidth
    }

    ctx.lineTo(oCanvas.width, oCanvas.height / 2)
    ctx.stroke()
  }
 
  const convertToMp3 =async (wavDataView) => {
    // 获取wav头信息
    const wav = lamejs.WavHeader.readHeader(wavDataView); // 此处其实可以不用去读wav头信息，毕竟有对应的config配置
    const { channels, sampleRate } = wav;
    console.log('wav', wav)
    const mp3enc = new lamejs.Mp3Encoder(channels, sampleRate, 128);
    // 获取左右通道数据
    const result = recorder.getChannelData()
    const buffer = [];

    const leftData = result.left && new Int16Array(result.left.buffer, 0, result.left.byteLength / 2);
    const rightData = result.right && new Int16Array(result.right.buffer, 0, result.right.byteLength / 2);
    const remaining = leftData.length + (rightData ? rightData.length : 0);

    const maxSamples = 1152;
    for (let i = 0; i < remaining; i += maxSamples) {
        const left = leftData.subarray(i, i + maxSamples);
        let right = null;
        let mp3buf = null;

        if (channels === 2) {
            right = rightData.subarray(i, i + maxSamples);
            mp3buf = mp3enc.encodeBuffer(left, right);
        } else {
            mp3buf = mp3enc.encodeBuffer(left);
        }

        if (mp3buf.length > 0) {
            buffer.push(mp3buf);
        }
    }

    const enc = mp3enc.flush();

    if (enc.length > 0) {
        buffer.push(enc);
    }

    return new Blob(buffer, { type: 'audio/mp3' });
  }
 


  return {
    startVideoRecord,
    endVideoRecord,
    stopVideoPlay,
    playVideoRecord,
    downloadVideoPCM,pauseVideoPlay,resumeVideoPlay,downloadVideoMP3,
    downloadVideoWAV,videoState,getVideoWAV
  }
}