<template>
  <div class="ai">
    <Navbar title="语音助手" />
    <div class="content">
      <div v-show="voiceStatus && !recognizeNotFinished" class="loading-bar">
        <div class="anim-wrapper">
          <img src="@/assets/ai/yvyinload.gif" />
        </div>
      </div>
      <div ref="refContent" class="wrapper">
        <EmptyUi v-if="!conversation.length" @select="handlePresetSelect" />
        <ChatItem
          v-for="(item, idx) in conversation"
          :key="idx"
          :item="item"
          @menu="handleChatMenu(item)"
        />

        <!-- <div
          v-for="(item, idx) in conversation"
          :key="idx"
          class="conversion"
          :class="['type-' + item.type]"
        >
          <div class="bubble">
            <span>
              {{ item.content }}
            </span>
            <span v-if="item.ellipsis" class="expand" @click="expandItem(item, idx)"> 展开</span>
          </div>
        </div> -->
        <div v-if="requesting" class="requesting">正在请求中，请稍等</div>
      </div>
    </div>
    <div class="operation-bar">
      <div class="method-toggle" @click="handleMethodChange">
        <img v-if="method === METHODS.VOICE" src="@/assets/ai/jianpan.png" />
        <img v-if="method === METHODS.INPUT" src="@/assets/ai/yvyin.png" />
      </div>
      <div class="input-area">
        <div v-if="method === METHODS.INPUT" class="input-wrapper">
          <input v-model="inputText" placeholder="点击输入文字" />
          <div class="generate-report" @click="sendQuestion(true)">提问</div>
        </div>
        <button
          v-else
          @touchstart="handleVoiceStart"
          @touchcancel="handleVoiceEnd"
          @touchend="handleVoiceEnd"
        >
          按住 说话
        </button>
      </div>
    </div>
  </div>
  <ExportPage
    class="export-view"
    v-if="exportId"
    :content="exportContent"
    :id="exportId"
    @close="exportId = ''"
  />
</template>

<script lang="ts" setup>
import { ref, onMounted, onUnmounted, nextTick, computed } from 'vue'
import { useRouter } from 'vue-router'
import { showToast } from 'vant'
import Navbar from '@/components/Navbar/index.vue'
import WebAudioSpeechRecognizer from '@/utils/webaudiospeechrecognizer.js'
import { signCallback, config } from '@/utils/tecentsign.js'
import ChatItem from './comp/chatItem.vue'
import EmptyUi from './comp/EmptyUi.vue'
import ExportPage from './comp/export.vue'

import { useUserStore } from '@/store'
import AiServe from '@/api/ai'

const params = {
  signCallback: signCallback, // 鉴权函数
  // 用户参数
  secretid: config.secretId,
  appid: config.appId,
  // 实时识别接口参数
  engine_model_type: '16k_zh', // 因为内置WebRecorder采样16k的数据，所以参数 engineModelType 需要选择16k的引擎，为 '16k_zh'
  // 以下为非必填参数，可跟据业务自行修改
  voice_format: 1,
  hotword_id: '08003a00000000000000000000000000',
  needvad: 1,
  filter_dirty: 1,
  filter_modal: 2,
  filter_punc: 0,
  convert_num_mode: 1,
  word_info: 2
}

const METHODS = {
  INPUT: 1,
  VOICE: 2
}

const CONVERSION_TYPE = {
  SENT: 1,
  RECEIVED: 2,
  DIRECTIVE: 10
}
const inputText = ref<string>('')
const sendQuestion = (isInput: boolean) => {
  if (inputText.value) {
    AiServe.sendQaMessage({
      deviceId: 'foo',
      text: inputText.value
    }).then(() => {
      console.log('sendqamsg finish')
    })
    inputText.value = ''
  } else {
    if (isInput) {
      showToast('问题不能为空')
    }
  }
}

const handlePresetSelect = (q: string) => {
  inputText.value = q
  sendQuestion()
}
const router = useRouter()
const method = ref(METHODS.INPUT)
const conversation = ref<TypeConversationItem[]>([
  // {
  //   type: CONVERSION_TYPE.SENT,
  //   content: '青岛市GDP是多少',
  //   ellipsis: false
  // },
  // {
  //   type: CONVERSION_TYPE.RECEIVED,
  //   rawContent: '2023年青岛市GDP为***',
  //   content: '2023年青岛市GDP为***'
  //   // ellipsis: true
  // }
])

const requesting = computed(() => {
  if (conversation.value.length) {
    const c = conversation.value[conversation.value.length - 1]
    return c.type == CONVERSION_TYPE.SENT
  }
  return false
})

const handleMethodChange = () => {
  if (method.value == METHODS.INPUT) {
    method.value = METHODS.VOICE
  } else {
    method.value = METHODS.INPUT
  }
}

const handleChatMenu = (item: TypeConversationItem) => {
  router.push({
    path: '/ai/export',
    query: {
      id: item.id,
      content: item.content
    }
  })
}

// 报告
const exportId = ref('')
const exportContent = computed(() => {
  if (!exportId.value) return ''
  return conversation.value.find(c => c.id == exportId.value && c.type == CONVERSION_TYPE.RECEIVED)
    ?.content
})

const voiceStatus = ref(false)
let Recognizer: any
const recognizeNotFinished = ref(false)

const handleVoiceStart = (e: TouchEvent) => {
  e.preventDefault()
  voiceStatus.value = true
  recognizeNotFinished.value = true
  Recognizer = new WebAudioSpeechRecognizer(params)

  Recognizer.OnRecognitionStart = res => {
    console.log('开始识别', res)
    recognizeNotFinished.value = false
  }
  Recognizer.OnSentenceBegin = res => {
    console.log('一句话开始', res)
  }
  Recognizer.OnSentenceEnd = res => {
    console.log('一句话结束', res)
    const result = res.result.voice_text_str
    inputText.value += result
  }
  Recognizer.OnRecognitionComplete = res => {
    console.log('识别结束', res)
    if (inputText.value) {
      sendQuestion()
    }
  }
  Recognizer.OnError = res => {
    console.log('识别失败', res, res.type)
    // showToast('语音识别失败，请重试！')
  }
  Recognizer.start()
}

const handleVoiceEnd = () => {
  setTimeout(() => {
    if (recognizeNotFinished.value) {
      showToast('说话时间太短！')
    }
    recognizeNotFinished.value = false
    voiceStatus.value = false
    Recognizer && Recognizer.stop()
  }, 200)
}

const refContent = ref<HTMLElement | null>(null)
let ws: NullAble<WebSocket> = null
let rid: number | undefined
let pid: number | undefined
const initWebsocket = () => {
  const url = import.meta.env.VITE_APP_WS_URL
  const { token } = useUserStore()

  ws = new WebSocket(`${url}?token=${token}`)
  // "{\"data\":{\"answer\":\"{\\\"data\\\":{\\\"classId\\\":1,\\\"domain\\\":4,\\\"textAnswer\\\":\\\"。\\\"},\\\"statusCode\\\":0}\",\"content\":\"1231231\",\"id\":\"bd57b337861c4e44a15581398d31aac1\"},\"type\":0}"
  ws.onopen = () => {
    pid = window.setInterval(() => {
      ws?.send('ping')
    }, 1000 * 10)
  }
  ws.onmessage = evt => {
    const resp = JSON.parse(evt.data)
    const { data, type } = resp
    console.log('onmessage', data, type)
    if (type == 9) {
      // question
      conversation.value.push({
        type: CONVERSION_TYPE.SENT,
        id: data.id,
        content: data.content
      })
    }
    if (type == 10) {
      conversation.value.push({
        type: CONVERSION_TYPE.DIRECTIVE,
        content: '',
        isReport: false,
        id: data.id
      })
    }
    if (type == 0 || type == 8) {
      // answer
      const conversationItem = conversation.value.find(
        c => c.id == data.id && c.type == CONVERSION_TYPE.RECEIVED
      )
      const answer = JSON.parse(data.answer)
      if (conversationItem) {
        conversationItem.content += answer.data.textAnswer
      } else {
        conversation.value.push({
          type: CONVERSION_TYPE.RECEIVED,
          content: answer.data.textAnswer,
          isReport: type == 8,
          id: data.id
        })
        if (type == 8) {
          exportId.value = data.id
        }
      }
    }
    nextTick(() => {
      if (refContent.value) {
        refContent.value.scrollTop = refContent.value.scrollHeight
      }
    })
  }
  ws.onclose = e => {
    console.log('onclose', e)
    clearInterval(pid)
  }
  ws.onerror = e => {
    console.log('onerror', e)
    ws = null
    rid = window.setTimeout(() => {
      initWebsocket()
    }, 3000)
  }
}

onMounted(() => {
  initWebsocket()
})

onUnmounted(() => {
  try {
    clearInterval(pid)
    clearTimeout(rid)
    if (ws) {
      ws.close()
      ws = null
    }
  } catch (e) {
    console.error(e)
  }
})
</script>

<style lang="less" scoped>
.export-view {
  position: absolute;
  width: 100%;
  top: 0;
}
.ai {
  display: flex;
  flex-direction: column;
  height: 100vh;
  height: 100dvh;

  .requesting {
    color: #00000066;
    font-weight: medium;
    font-size: 12px;
    text-align: center;
    margin: 16px 0px;
  }

  .content {
    position: relative;
    flex-grow: 1;
    background: #f4f8fb;
    display: flex;
    flex-direction: column;
    .wrapper {
      flex-grow: 1;
      width: 100%;
      height: 0;
      overflow-y: scroll;
    }

    .loading-bar {
      position: absolute;
      z-index: 99;
      width: 100%;
      height: 100%;
      bottom: 0;
      background: rgba(0, 0, 0, 0.4);
      display: flex;
      align-items: center;
      justify-content: center;
      .anim-wrapper {
        background: #fff;
        border-radius: 7.5px;
        padding: 12px;
        img {
          width: 304px;
          height: 48px;
        }
      }
    }
  }

  .operation-bar {
    background-color: #edf5ff;
    padding: 12px;
    flex-shrink: 0;
    box-sizing: border-box;
    display: flex;
    height: 79.5px;
    > div {
      flex-shrink: 0;
    }

    .method-toggle {
      img {
        width: 35px;
        height: 35px;
      }
    }
    .input-area {
      flex-grow: 1;
      height: 36px;
      padding: 0px 10px;
      display: flex;

      .input-wrapper {
        display: flex;
        width: 100%;
        background: #fff;
        border-radius: 7.5px;
      }
      button {
        user-select: none;
      }
      input {
        width: 0;
      }
      input,
      button {
        outline: none;
        border: none;
        background-color: #fff;
        border-radius: 7.5px;
        flex-grow: 1;
        padding: 0px 10px;
      }
    }
    .generate-report {
      width: 84px;
      background: #2c84fc;
      color: #fff;
      height: 36px;
      border-radius: 7.5px;
      line-height: 36px;
      text-align: center;
    }
  }
}
</style>
