<template>
  <svg class="icon voice" aria-hidden="true" v-if="!handleIng"><use xlink:href="#icon-yuyin" @click="startHandle"></use></svg>
  <img class="voice" @click="startHandle" src="../../assets/images/voice.gif" v-if="handleIng">
</template>

<script setup>
import {onMounted,onBeforeUnmount, ref} from "vue";
import eventBus from "src/utils/eventBus.js";
import {useSysStore} from "stores/sysStore.js";
import {tips} from "src/utils/util.js";
import {event, useQuasar} from "quasar";

let $q = useQuasar()
let handleIng = ref(false)
let aliConfig = useSysStore().aliConfig;

const startHandle = async () => {
  if (!aliConfig.appkey)
    return tips.warning($q,'未获取到相关配置，请联系管理员检查！')
  if(handleIng.value) {
    handleIng.value = false
    disconnectWebSocket()
    stopRecording();
  }else{
    handleIng.value = true
    await connectWebSocket()
    await startRecording()
    // eventBus.$emit('sendCommand','voice');
  }
}

eventBus.$on('receiveCommand',(command)=>{
  if (command == 'voice'){
    startHandle();
  }
})

// 页面卸载时清理资源
onBeforeUnmount(() => {
  stopRecording()
  disconnectWebSocket()
})

/**
 * 建立 WebSocket 并发送启动识别指令
 */
let websocket = null
let audioContext = null
let scriptProcessor = null
let audioInput = null
let audioStream = null
function connectWebSocket() {
  return new Promise((resolve, reject) => {
    const socketUrl = `wss://nls-gateway.cn-shanghai.aliyuncs.com/ws/v1?token=${aliConfig.token}`
    websocket = new WebSocket(socketUrl)

    websocket.onopen = () => {
      const startTranscriptionMessage = {
        header: {
          appkey:aliConfig.appkey,
          namespace: 'SpeechTranscriber',
          name: 'StartTranscription',
          task_id: generateUUID(),
          message_id: generateUUID(),
        },
        payload: {
          format: 'pcm',
          sample_rate: 16000,
          enable_intermediate_result: true,
          enable_punctuation_prediction: true,
          enable_inverse_text_normalization: true
        }
      }
      websocket.send(JSON.stringify(startTranscriptionMessage))
    }

    websocket.onmessage = (event) => {
      const msg = JSON.parse(event.data)
      if (msg.header.name === 'SentenceEnd') {
        const text = msg.payload?.result
        if (text) {
          console.log(text)
          eventBus.$emit('voiceText', text)
        }
      } else {
        resolve()
      }
    }

    websocket.onerror = (err) => reject(err)
    websocket.onclose = () => {
      //todo
    }
  })
}
/**
 * 生成唯一 ID
 */
function generateUUID() {
  return ([1e7] + -1e3 + -4e3 + -8e3 + -1e11).replace(/[018]/g, c =>
    (c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)
  ).replace(/-/g, '')
}
//关闭websocket连接
function disconnectWebSocket() {
  websocket?.close()
  websocket = null
}
/**
 * 开启录音并推送音频数据
 */
async function startRecording() {
  try {
    audioStream = await navigator.mediaDevices.getUserMedia({audio: true})
    audioContext = new (window.AudioContext || window.webkitAudioContext)({sampleRate: 16000})

    // Load the audio worklet module using base64 encoded data URI
    const base64AudioProcessor = 'Y2xhc3MgQXVkaW9Qcm9jZXNzb3IgZXh0ZW5kcyBBdWRpb1dvcmtsZXRQcm9jZXNzb3IgewogIGNvbnN0cnVjdG9yKG9wdGlvbnMpIHsKICAgIHN1cGVyKG9wdGlvbnMpOwogICAgdGhpcy5idWZmZXJTaXplID0gb3B0aW9ucy5wcm9jZXNzb3JPcHRpb25zPy5idWZmZXJTaXplIHx8IDIwNDg7CiAgICB0aGlzLnBvcnQub25tZXNzYWdlID0gKGV2ZW50KSA9PiB7CiAgICAgIC8vIEhhbmRsZSBtZXNzYWdlcyBmcm9tIHRoZSBtYWluIHRocmVhZCBpZiBuZWVkZWQKICAgIH07CiAgfQoKICBwcm9jZXNzKGlucHV0cywgb3V0cHV0cywgcGFyYW1ldGVycykgewogICAgY29uc3QgaW5wdXQgPSBpbnB1dHNbMF07CiAgICBjb25zdCBvdXRwdXQgPSBvdXRwdXRzWzBdOwogICAgCiAgICBpZiAoaW5wdXQubGVuZ3RoID4gMCkgewogICAgICAvLyBDb3B5IGlucHV0IHRvIG91dHB1dAogICAgICBmb3IgKGxldCBjaGFubmVsID0gMDsgY2hhbm5lbCA8IG91dHB1dC5sZW5ndGg7IGNoYW5uZWwrKykgewogICAgICAgIGlmIChpbnB1dFtjaGFubmVsXSAmJiBvdXRwdXRbY2hhbm5lbF0pIHsKICAgICAgICAgIG91dHB1dFtjaGFubmVsXS5zZXQoaW5wdXRbY2hhbm5lbF0pOwogICAgICAgIH0KICAgICAgfQogICAgICAKICAgICAgLy8gU2VuZCBkYXRhIHRvIG1haW4gdGhyZWFkCiAgICAgIHRoaXMucG9ydC5wb3N0TWVzc2FnZShpbnB1dFswXSk7CiAgICB9CiAgICAKICAgIHJldHVybiB0cnVlOyAvLyBLZWVwIHByb2Nlc3NvciBhbGl2ZQogIH0KfQoKcmVnaXN0ZXJQcm9jZXNzb3IoJ2F1ZGlvLXByb2Nlc3NvcicsIEF1ZGlvUHJvY2Vzc29yKTs=';
    const audioProcessorCode = atob(base64AudioProcessor);
    const blob = new Blob([audioProcessorCode], { type: 'application/javascript' });
    const audioWorkletUrl = URL.createObjectURL(blob);
    await audioContext.audioWorklet.addModule(audioWorkletUrl);
    
    audioInput = audioContext.createMediaStreamSource(audioStream)
    // 使用 AudioWorkletNode 替代已弃用的 ScriptProcessorNode
    scriptProcessor = new AudioWorkletNode(audioContext, 'audio-processor', {
      numberOfInputs: 1,
      numberOfOutputs: 1,
      channelCount: 1,
      processorOptions: {
        bufferSize: 2048
      }
    })

    // Listen for messages from the audio worklet processor
    scriptProcessor.port.onmessage = function (event) {
      const inputData = event.data;
      const inputData16 = new Int16Array(inputData.length)
      for (let i = 0; i < inputData.length; ++i) {
        inputData16[i] = Math.max(-1, Math.min(1, inputData[i])) * 0x7FFF
      }
      if (websocket && websocket.readyState === WebSocket.OPEN) {
        websocket.send(inputData16.buffer)
      }
    }

    audioInput.connect(scriptProcessor)
    scriptProcessor.connect(audioContext.destination)
  } catch (e) {
    console.error('录音失败', e)
  }
}
/**
 * 停止录音并释放资源
 */
function stopRecording() {
  scriptProcessor?.disconnect()
  audioInput?.disconnect()
  audioStream?.getTracks().forEach(track => track.stop())
  audioContext?.close?.()
  scriptProcessor = null
  audioInput = null
  audioStream = null
  audioContext = null
}
</script>

<style scoped lang="scss">
.voice{
  width: 30px;
  height: 30px;
  margin-right: 10px;
  cursor: pointer;
  transition: all .2s ease;
  color: #494949;
}

.voice:hover {
  transform: scale(1.2);
  transition: all .2s ease;
}
</style>
