<template>
  <div>
    <n-card :bordered="false">
      <n-card :bordered="false" style="margin-left: 10px">
          <n-scrollbar id="scrollRef" :style="{ height: clientHeight - (layoutHeaderHeight  + tabsHeight + 232) + 'px'}" ref="scrollRef">
            <n-empty  v-if="!dataSources.length" description="暂无信息">
            </n-empty>
             <Message
                v-for="(item, index) of dataSources"
                :key="index"
                :date-time="item.dateTime"
                :text="item.text"
                :inversion="item.inversion"
                :error="item.error"
                :loading="item.loading"
                @regenerate="onRegenerate(index)"
                @delete="handleDelete(index)"
                style="margin-right: 20px;"
              />
          </n-scrollbar>
      </n-card>
      <n-card  content-style="padding-top:5;padding-bottom:0;width:98%">
          <n-space>
             <n-tag size="large" v-if="llmInfo">{{llmInfo.type}}</n-tag>

            <n-select v-if="llmInfo && llmInfo.type==='ChatGPT'" size="medium" :options="modelOptions" style="width: 200px" v-model:value="gptSetting.model">
            </n-select>

              <n-select v-if="llmInfo && llmInfo.type==='KimiAI'" size="medium" :options="kimiModelOption" style="width: 200px" v-model:value="gptSetting.model">
              </n-select>
          </n-space>

        <n-form-item>
          <n-auto-complete v-model:value="prompt"  :options="searchOptions" @keydown.enter="handleEnter">
            <template #default="{ handleInput, handleBlur, handleFocus }">
              <n-input
                  ref="inputRef"
                  v-model:value="prompt"
                  type="textarea"
                  :placeholder="placeholder"
                  :autosize="{ minRows: 1, maxRows: 3 }"
                  @input="handleInput"
                  @focus="handleFocus"
                  @blur="handleBlur"
              />
            </template>
          </n-auto-complete>
          <NButton :disabled="buttonDisabled" @click="handleSubmit" style="margin-left: 5px">
            <template #icon>
                      <span class="dark:text-white">
                        <SvgIcon icon="ri:send-plane-fill"/>
                      </span>
            </template>
          </NButton>
        </n-form-item>
      </n-card>
    </n-card>
  </div>
</template>


<script setup lang='ts'>
import type {Ref} from 'vue'
import {computed, onMounted, onUnmounted, ref, watch} from 'vue'
import {storeToRefs} from 'pinia'
import {NAutoComplete, NButton, NInput, useDialog, useMessage} from 'naive-ui'
import html2canvas from 'html2canvas'
import {Message} from './components'
import {useScroll} from './hooks/useScroll'
import {useChat} from './hooks/useChat'
import {useCopyCode} from './hooks/useCopyCode'
import {useUsingContext} from './hooks/useUsingContext'
import {HoverButton, SvgIcon} from '@/components/chat'
import {useChatStore, usePromptStore} from '@/store'
import {fetchChatAPIProcess} from '@/api/chat/chat'
//@ts-ignore
import Bus from "@/components/file/box/GlobalUploader/utils/bus";

import {t} from '@/locales'
import {chatMessageApi} from '@/api/chat/chatMessage.api'
import GptSetting = Chat.GptSetting;
import {useSystemStore} from "@/store/modules/useSystemStore";
import {resourceController} from "@/api/resource/resourceController.api";
let controller = new AbortController()
const dialog = useDialog()
const ms = useMessage()
const chatStore = useChatStore()
useCopyCode()
const {updateChat, updateChatSome, getChatByUuidAndIndex} = useChat()
const {scrollRef, scrollToBottom, scrollToBottomIfAtBottom} = useScroll()
const {usingContext, toggleUsingContext} = useUsingContext()

const {
  clientHeight,
  layoutHeaderHeight,
  logoHeight,
  darkTheme,
  tabsHeight
} = storeToRefs(useSystemStore());

const llmInfo = ref(null)

const props = defineProps({
  uuid: String,
  title: String,
  gptSetting: Object
})
const {uuid} = props as { uuid: string }
const {gptSetting} = props as { gptSetting: GptSetting }
const dataSources = ref<Chat.Chat[]>([]);
const prompt = ref<string>('')
const loading = ref<boolean>(false)
const inputRef = ref<Ref | null>(null)
// 添加PromptStore
const promptStore = usePromptStore()
// 使用storeToRefs，保证store修改后，联想部分能够重新渲染
const {promptList: promptTemplate} = storeToRefs<any>(promptStore)

const modelOptions = [
  {
    label: 'gpt-3.5-turbo',
    value: 'gpt-3.5-turbo'
  },
  {
    label: 'gpt-3.5-turbo-16k',
    value: 'gpt-3.5-turbo-16k'
  }
]

const kimiModelOption = ref([
    {
        label: 'moonshot-v1-8k',
        value: 'moonshot-v1-8k'
    },
    {
        label: 'moonshot-v1-32k',
        value: 'moonshot-v1-32k'
    },
    {
        label: 'moonshot-v1-128k',
        value: 'moonshot-v1-128k'
    }
])

onMounted(() => {
  getChatMessage()
    getLlmInfo()
  Bus.on('loadPrompt', (promptTemplate: string) => {
      prompt.value = promptTemplate.promptDescription
  })
  if (inputRef.value)
    inputRef.value?.focus()
})

const getLlmInfo = () => {
   resourceController.getLLMInfo().then((res) => {
     if (res.code === 200) {
         llmInfo.value = res.data
         if (llmInfo && llmInfo.value.type ==='KimiAI'){
             gptSetting.model = 'moonshot-v1-8k'
         }
     }
   })
}

onUnmounted(() => {
  if (loading.value)
    controller.abort()
})

watch(() => props.uuid, (newUuid) => {
  getChatMessage();
});


// 获取当前登录人的信息
function getChatMessage() {
  console.log(props.uuid)
  // 如果UUID不为空
  // if (props.uuid != null && props.uuid.length > 0) {
    console.log(props.uuid)
    // 加载message
    chatMessageApi.listMessage(props.uuid).then(res => {
      console.log(res)
      dataSources.value = res.data;
      scrollToBottom()
    })
  // }
}


/**
 * 查找上一个上下文信息
 */
function findLastContext() {
  const conversationList = dataSources.value.filter((chat: Chat.Chat) => (!chat.inversion && !chat.error))
  return conversationList[conversationList.length - 1]?.conversationOptions;
}

/***
 *
 * 新增对话
 */
function addChat(chat: Chat.Chat) {
  dataSources.value.push(chat);
}

/**
 * @description: 提交对话框中的内容
 * @return {*}
 */
function handleSubmit() {
  onConversation()
}

/**
 * @description: 生成这句对话
 * @return {*}
 */
async function onConversation() {
  const message = prompt.value
  if (loading.value) {
    return
  }
  if (!message || message.trim() === '') {
    return
  }
  controller = new AbortController()
  let userChat: Chat.Chat =
  {
    dateTime: new Date().toLocaleString(),
    text: message,
    inversion: true,
    error: false,
    conversationOptions: null,
    requestOptions: {
        prompt: message,
        temperature: gptSetting.temperature,
        topP: gptSetting.topP,
        presencePenalty: gptSetting.presencePenalty,
        frequencyPenalty: gptSetting.frequencyPenalty,
        modelName: gptSetting.model,
        options: null},
  }
  addChat(userChat);
  scrollToBottom()

  loading.value = true
  prompt.value = ''

  let options: Chat.ConversationRequest = {}
  const lastContext = findLastContext();

  if (lastContext && usingContext.value)
    options = {...lastContext}

  let replaceChat: Chat.Chat =
  {
    dateTime: new Date().toLocaleString(),
    text: '',
    loading: true,
    inversion: false,
    error: false,
    conversationOptions: null,
    requestOptions: {
        prompt: message,
        temperature: gptSetting.temperature,
        topP: gptSetting.topP,
        presencePenalty: gptSetting.presencePenalty,
        frequencyPenalty: gptSetting.frequencyPenalty,
        modelName: gptSetting.model, options: {...options}}
  }
  addChat(replaceChat);
  scrollToBottom()

  try {
    const lastText = ''
    const fetchChatAPIOnce = async () => {
      await fetchChatAPIProcess<Chat.ConversationResponse>({
        prompt: message,
        temperature: gptSetting.temperature,
        topP: gptSetting.topP,
        presencePenalty: gptSetting.presencePenalty,
        frequencyPenalty: gptSetting.frequencyPenalty,
        modelName: gptSetting.model,
        options,
        signal: controller.signal,
        onDownloadProgress: ({event}) => {
          const xhr = event.target
          const {responseText} = xhr
          // Always process the final line
          const lastIndex = responseText.lastIndexOf('\n', responseText.length - 2)
          let chunk = responseText
          if (lastIndex !== -1) {
            chunk = responseText.substring(lastIndex)
          }
          try {
            const data = JSON.parse(chunk)
            dataSources.value[dataSources.value.length - 1].text = lastText + (data.text ?? '');
            dataSources.value[dataSources.value.length - 1].conversationOptions = {
              conversationId: data.conversationId,
              parentMessageId: data.id
            };
            // 更新chat
            // replaceChat.text = lastText + (data.text ?? '');
            // replaceChat.conversationOptions = {conversationId: data.conversationId, parentMessageId: data.id};
            scrollToBottomIfAtBottom()
            // // 更新聊天室信息
            // if(props.title==='New Chat'){
            //   Bus.emit('reloadChatRoom')
            // }
          } catch (error) {
            console.log("loading:" + dataSources.value[dataSources.value.length - 1].loading);
            // 本次对话结束
            // replaceChat.loading = false;
            dataSources.value[dataSources.value.length - 1].loading = false;
          }
        },
      })
      // 本次对话结束
      // replaceChat.loading = false;
      dataSources.value[dataSources.value.length - 1].loading = false;
      updateChatSome(+uuid, dataSources.value.length - 1, {loading: false})
    }
    await fetchChatAPIOnce()
    // 更新
    if(props.title==='New Chat'){
      Bus.emit('reloadChatRoom')
    }
  } catch (error: any) {
    const errorMessage = error?.message ?? t('common.wrong')

    if (error.message === 'canceled') {
      updateChatSome(
          +uuid,
          dataSources.value.length - 1,
          {
            loading: false,
          },
      )
      scrollToBottomIfAtBottom()
      return
    }

    const currentChat = getChatByUuidAndIndex(+uuid, dataSources.value.length - 1)

    if (currentChat?.text && currentChat.text !== '') {
      updateChatSome(
          +uuid,
          dataSources.value.length - 1,
          {
            text: `${currentChat.text}\n[${errorMessage}]`,
            error: false,
            loading: false,
          },
      )
      return
    }

    updateChat(
        +uuid,
        dataSources.value.length - 1,
        {
          dateTime: new Date().toLocaleString(),
          text: errorMessage,
          inversion: false,
          error: true,
          loading: false,
          conversationOptions: null,
          requestOptions: {
              prompt: message,
              temperature: gptSetting.temperature,
              topP: gptSetting.topP,
              presencePenalty: gptSetting.presencePenalty,
              frequencyPenalty: gptSetting.frequencyPenalty,
              modelName: gptSetting.model,
              options: {...options}},
        },
    )
    scrollToBottomIfAtBottom()
  } finally {
    loading.value = false
  }
}

/**
 * @description: 再次生成这句对话
 * @param {*} index
 * @return {*}
 */
async function onRegenerate(index: number) {
  if (loading.value)
    return

  controller = new AbortController()

  const {requestOptions} = dataSources.value[index]

  const message = requestOptions?.prompt ?? ''

  let options: Chat.ConversationRequest = {}

  if (requestOptions.options)
    options = {...requestOptions.options}

  loading.value = true

  updateChat(
      +uuid,
      index,
      {
        dateTime: new Date().toLocaleString(),
        text: '',
        inversion: false,
        error: false,
        loading: true,
        conversationOptions: null,
        requestOptions: {prompt: message, options: {...options}},
      },
  )

  try {
    const lastText = ''
    const fetchChatAPIOnce = async () => {
      await fetchChatAPIProcess<Chat.ConversationResponse>({
        prompt: message,
       temperature: gptSetting.temperature,
       topP: gptSetting.topP,
       presencePenalty: gptSetting.presencePenalty,
       frequencyPenalty: gptSetting.frequencyPenalty,
       modelName: gptSetting.model,

        options,
        signal: controller.signal,
        onDownloadProgress: ({event}) => {
          const xhr = event.target
          const {responseText} = xhr
          // Always process the final line
          const lastIndex = responseText.lastIndexOf('\n', responseText.length - 2)
          let chunk = responseText
          if (lastIndex !== -1)
            chunk = responseText.substring(lastIndex)
          try {
            const data = JSON.parse(chunk)
            updateChat(
                +uuid,
                index,
                {
                  dateTime: new Date().toLocaleString(),
                  text: lastText + (data.text ?? ''),
                  inversion: false,
                  error: false,
                  loading: true,
                  conversationOptions: {conversationId: data.conversationId, parentMessageId: data.id},
                  requestOptions: {prompt: message, options: {...options}},
                },
            )
          } catch (error) {
            //
          }
        },
      })
      updateChatSome(+uuid, index, {loading: false})
    }
    await fetchChatAPIOnce()
  } catch (error: any) {
    if (error.message === 'canceled') {
      updateChatSome(
          +uuid,
          index,
          {
            loading: false,
          },
      )
      return
    }

    const errorMessage = error?.message ?? t('common.wrong')

    updateChat(
        +uuid,
        index,
        {
          dateTime: new Date().toLocaleString(),
          text: errorMessage,
          inversion: false,
          error: true,
          loading: false,
          conversationOptions: null,
          requestOptions: {prompt: message, options: {...options}},
        },
    )
  } finally {
    loading.value = false
  }
}

/**
 * @description: 保存会话到图片
 * @return {*}
 */
function handleExport() {
  if (loading.value)
    return

  const d = dialog.warning({
    title: t('chat.exportImage'),
    content: t('chat.exportImageConfirm'),
    positiveText: t('common.yes'),
    negativeText: t('common.no'),
    onPositiveClick: async () => {
      try {
        d.loading = true
        const ele = document.getElementById('image-wrapper')
        const canvas = await html2canvas(ele as HTMLDivElement, {
          useCORS: true,
        })
        const imgUrl = canvas.toDataURL('image/png')
        const tempLink = document.createElement('a')
        tempLink.style.display = 'none'
        tempLink.href = imgUrl
        tempLink.setAttribute('download', 'chat-shot.png')
        if (typeof tempLink.download === 'undefined')
          tempLink.setAttribute('target', '_blank')

        document.body.appendChild(tempLink)
        tempLink.click()
        document.body.removeChild(tempLink)
        window.URL.revokeObjectURL(imgUrl)
        d.loading = false
        ms.success(t('chat.exportSuccess'))
        Promise.resolve()
      } catch (error: any) {
        ms.error(t('chat.exportFailed'))
      } finally {
        d.loading = false
      }
    },
  })
}

/**
 * @description: 删除当前消息
 * @param {*} index
 * @return {*}
 */
function handleDelete(index: number) {
  if (loading.value)
    return

  dialog.warning({
    title: t('chat.deleteMessage'),
    content: t('chat.deleteMessageConfirm'),
    positiveText: t('common.yes'),
    negativeText: t('common.no'),
    onPositiveClick: () => {
      chatStore.deleteChatByUuid(+uuid, index)
    },
  })
}

/**
 * @description: 清除聊天记录
 * @return {*}
 */
function handleClear() {
  if (loading.value)
    return

  dialog.warning({
    title: t('chat.clearChat'),
    content: t('chat.clearChatConfirm'),
    positiveText: t('common.yes'),
    negativeText: t('common.no'),
    onPositiveClick: () => {
      chatStore.clearChatByUuid(+uuid)
    },
  })
}

/**
 * @description: 提交对话框中的内容
 * @return {*}
 */
function handleEnter(event: KeyboardEvent) {
  if (event.key === 'Enter' && !event.ctrlKey && gptSetting.enter) {
    event.preventDefault()
    handleSubmit()
  }
}

/**
 * @description: 停止生成回答
 * @return {*}
 */
function handleStop() {
  if (loading.value) {
    controller.abort()
    loading.value = false
  }
}

/**
 * @description: TODO 提示词?
 * @param {*} computed
 * @return {*}
 */
const searchOptions = computed(() => {
  if (prompt.value.startsWith('/')) {
    return promptTemplate.value.filter((item: {
      key: string
    }) => item.key.toLowerCase().includes(prompt.value.substring(1).toLowerCase())).map((obj: { value: any }) => {
      return {
        label: obj.value,
        value: obj.value,
      }
    })
  } else {
    return []
  }
})

// value反渲染key
const renderOption = (option: { label: string }) => {
  for (const i of promptTemplate.value) {
    if (i.value === option.label)
      return [i.key]
  }
  return []
}

const placeholder = computed(() => {
  return t('chat.placeholder')
})

const buttonDisabled = computed(() => {
  return loading.value || !prompt.value || prompt.value.trim() === ''
})




</script>


