<template>
  <McLayout class="container">
    <McHeader :title="'联搜AI'" :logoImg="'https://matechat.gitcode.com/logo.svg'">
      <template #operationArea>
        <div class="operations">
          <i class="icon-helping"></i>
        </div>
      </template>
    </McHeader>
    <McLayoutContent
      v-if="startPage"
      style="display: flex; flex-direction: column; align-items: center; justify-content: center; gap: 12px"
    >
      <McIntroduction
        :logoImg="'https://matechat.gitcode.com/logo.svg'"
        :title="'联搜AI'"
        :subTitle="'Hi，欢迎使用 联搜AI'"
        :description="description"
      ></McIntroduction>
      <McPrompt
        :list="introPrompt.list"
        :direction="introPrompt.direction"
        class="intro-prompt"
        @itemClick="onSubmit($event.label)"
      ></McPrompt>
    </McLayoutContent>
    <McLayoutContent class="content-container" v-else>
      <template v-for="(msg, idx) in messages" :key="idx">
        <McBubble
          v-if="msg.from === 'user'"
          :content="msg.content"
          :align="'right'"
          :avatarConfig="{ imgSrc: 'https://matechat.gitcode.com/png/demo/userAvatar.svg' }"
        >
        </McBubble>
        <McBubble v-else :content="msg.content" :avatarConfig="{ imgSrc: 'https://matechat.gitcode.com/logo.svg' }" :loading="msg.loading"> </McBubble>
      </template>
    </McLayoutContent>
    <div class="shortcut" style="display: flex; align-items: center; gap: 8px">
      <McPrompt
        v-if="!startPage"
        :list="simplePrompt"
        :direction="'horizontal'"
        style="flex: 1"
        @itemClick="onSubmit($event.label)"
      ></McPrompt>
      <Button
        style="margin-left: auto"
        icon="add"
        shape="circle"
        title="新建对话"
        size="md"
        @click="newConversation"
      />
    </div>
    <div class="api-config" style="display:flex;gap:8px;align-items:center;margin:8px 0">
      <input class="apikey" v-model="apiKey" placeholder="API Key (optional)" />
      <input class="apikey" v-model="baseUrl" placeholder="Base URL (e.g. https://api.example.com)" />
      <input class="apikey" v-model="modelName" placeholder="Model (e.g. gpt-4o-mini)" />
      <label style="display:flex;align-items:center;gap:6px;margin-left:8px"><input type="checkbox" v-model="useMcp" /> Use MCP</label>
    </div>
    <div v-if="useMcp" style="display:flex;gap:8px;align-items:center;margin-bottom:8px">
      <input class="apikey" v-model="mcpLabel" placeholder="MCP label (eg. BrightData)" />
      <input class="apikey" v-model="mcpServerUrl" placeholder="MCP server URL (eg. https://mcp.example.com/sse?token=API_TOKEN)" />
      <input class="apikey" v-model="mcpToken" placeholder="MCP API_TOKEN (frontend input)" />
    </div>
    <McLayoutSender>
      <McInput :value="inputValue" :maxLength="2000" @change="(e) => (inputValue = e)" @submit="onSubmit">
        <template #extra>
          <div class="input-foot-wrapper">
            <div class="input-foot-left">
              <span v-for="(item, index) in inputFootIcons" :key="index">
                <i :class="item.icon"></i>
                {{ item.text }}
              </span>
              <span class="input-foot-dividing-line"></span>
              <span class="input-foot-maxlength">{{ inputValue.length }}/2000</span>
            </div>
            <div class="input-foot-right">
              <Button icon="op-clearup" shape="round" :disabled="!inputValue" @click="inputValue = ''"><span class="demo-button-content">清空输入</span></Button>
            </div>
          </div>
        </template>
      </McInput>
    </McLayoutSender>
    <div style="margin-top:12px;">
      <button style="margin-bottom:8px;" @click="showDebug = !showDebug">{{ showDebug ? '隐藏调试信息' : '显示调试信息' }}</button>
      <div v-if="showDebug">
        <div style="margin-top:8px;font-weight:600">已截断的消息 (用于请求):</div>
        <pre style="max-height:140px;overflow:auto;background:#f6f8fa;padding:8px;border-radius:6px">{{ JSON.stringify(lastTrimmedMessages, null, 2) }}</pre>
        <div style="margin-top:8px;font-weight:600">最近的 MCP / 原始请求与响应 (调试用):</div>
        <textarea readonly style="width:100%;height:160px;background:#111827;color:#e6eef6;padding:8px;border-radius:6px">{{ lastMcpRaw }}</textarea>
      </div>
    </div>
  </McLayout>
</template>

<script setup lang="ts">
import { ref } from 'vue';
import { Button } from 'vue-devui/button';
import 'vue-devui/button/style.css';

const description = [
  '联搜AI 可以辅助研发人员编码、查询知识和相关作业信息、编写文档等。',
  '作为AI模型，联搜AI 提供的答案可能不总是确定或准确的，但您的反馈可以帮助联搜AI 做得更好。',
];
const introPrompt = {
  direction: 'horizontal',
  list: [
    {
      value: 'quickSort',
      label: '帮我写一个快速排序',
      iconConfig: { name: 'icon-info-o', color: '#5e7ce0' },
      desc: '使用 js 实现一个快速排序',
    },
    {
      value: 'helpMd',
      label: '你可以帮我做些什么？',
      iconConfig: { name: 'icon-star', color: 'rgb(255, 215, 0)' },
      desc: '了解当前大模型可以帮你做的事',
    },
    {
      value: 'bindProjectSpace',
      label: '怎么绑定项目空间',
      iconConfig: { name: 'icon-priority', color: '#3ac295' },
      desc: '如何绑定云空间中的项目',
    },
  ],
};
const simplePrompt = [
  {
    value: 'quickSort',
    iconConfig: { name: 'icon-info-o', color: '#5e7ce0' },
    label: '帮我写一个快速排序',
  },
  {
    value: 'helpMd',
    iconConfig: { name: 'icon-star', color: 'rgb(255, 215, 0)' },
    label: '你可以帮我做些什么？',
  },
];
const startPage = ref(true);
const inputValue = ref('');
const inputFootIcons = [
  { icon: 'icon-at', text: '智能体' },
  { icon: 'icon-standard', text: '词库' },
  { icon: 'icon-add', text: '附件' },
];

const messages = ref<any[]>([]);

// Debug / persistence for MCP and truncation results
const lastMcpRaw = ref('')
const lastTrimmedMessages = ref<any[]>([])
const showDebug = ref(false)

// API & MCP controls
const apiKey = ref('')
const baseUrl = ref('https://api.chatanywhere.tech')
const modelName = ref('gpt-4o-mini')
const useMcp = ref(false)
const mcpLabel = ref('BrightData')
const mcpServerUrl = ref('https://mcp.brightdata.com/sse?token=API_TOKEN')
const mcpToken = ref('')

const newConversation = () => {
  startPage.value = true;
  messages.value = [];
}

// --- Token estimation & truncation helpers ---
// Very rough token estimate: 1 token ≈ 4 chars (approx). Use conservative estimate.
const estimateTokens = (text: string) => {
  if (!text) return 0
  return Math.ceil(text.length / 4)
}

//  128000 32768 32768
const modelWindowSize = (model: string) => {
  const m = (model || '').toLowerCase()
  if (m.includes('gpt-4o-mini')) return 100000
  if (m.includes('gpt-4o')) return 30000
  if (m.includes('gpt-4')) return 30000
  return 8192
}

// Truncate oldest messages until estimated tokens fit within maxContext (reserve for reply)
const truncateHistoryForModel = (history: Array<{role:string,content:string}>, model: string, reserve = 1024) => {
  const maxWindow = modelWindowSize(model)
  let estimated = history.reduce((s, m) => s + estimateTokens(m.content || ''), 0)
  const limit = Math.max(0, maxWindow - reserve)
  if (estimated <= limit) return { messages: history, truncated: false }

  // remove oldest non-system messages until under limit
  const copy = history.slice()
  while (copy.length > 1 && estimated > limit) {
    const removed = copy.shift()
    if (removed) estimated -= estimateTokens(removed.content || '')
  }
  return { messages: copy, truncated: true }
}

const onSubmit = (evt) => {
  const text = (typeof evt === 'string' ? evt : inputValue.value)
  inputValue.value='';
  startPage.value = false;

  // push user message
  messages.value.push({ from: 'user', content: text })

  // push placeholder for model reply
  messages.value.push({ from: 'model', content: '', loading: true })
  const modelIndex = messages.value.length - 1

  // If no API key provided, fallback to local mock (keep old behavior)
  if (!apiKey.value) {
    const reply = text
    setTimeout(() => {
      messages.value[modelIndex].content = reply
      messages.value[modelIndex].loading = false
    }, 200)
    return
  }

  // otherwise call remote model (supports MCP tools)
  fetchData(text, modelIndex)
}

// fetchData: call model endpoint with optional MCP tools, streaming when available
const fetchData = async (ques: string, modelIndex: number) => {
  if (modelIndex == null || modelIndex < 0) return

  const used_api_key = apiKey.value && apiKey.value.trim() ? apiKey.value.trim() : ''
  const used_base = baseUrl.value && baseUrl.value.trim() ? baseUrl.value.trim() : 'https://api.chatanywhere.tech'

  try {
    const url = used_base.replace(/\/+$|\/+$/,'') + '/chat/completions'

    // Build messages history for API, convert local format to role/content
    const history = messages.value
      .slice(0, -1) // exclude the placeholder model entry we pushed
      .map((m: any) => ({ role: m.from === 'user' ? 'user' : 'assistant', content: m.content || '' }))

    // append current user message
    history.push({ role: 'user', content: ques })

    // truncate history to fit model context window if needed
    const { messages: trimmedMessages, truncated } = truncateHistoryForModel(history, modelName.value, 1024)
    // persist trimmed messages for debugging
    lastTrimmedMessages.value = trimmedMessages.slice()
    if (truncated) {
      // insert a short system note to indicate truncation (non-intrusive)
      messages.value.splice(messages.value.length - 1, 0, { from: 'model', content: '（已截断早期对话以适配上下文窗口）', loading: false })
    }

    const bodyObj: any = {
      model: modelName.value,
      messages: trimmedMessages,
      stream: true,
    }

    if (useMcp.value) {
      let server_url = (mcpServerUrl && mcpServerUrl.value) ? mcpServerUrl.value : ''
      if (server_url.includes('API_TOKEN')) {
        server_url = server_url.replace(/API_TOKEN/g, encodeURIComponent(mcpToken.value || ''))
      } else if (mcpToken && mcpToken.value) {
        server_url += (server_url.includes('?') ? '&' : '?') + 'token=' + encodeURIComponent(mcpToken.value)
      }

      bodyObj.tools = [
        { type: 'mcp', server_label: mcpLabel.value || 'MCP', server_url, require_approval: 'never' },
      ]
    }

    // persist the request body for debugging if MCP used
    if (useMcp.value) {
      lastMcpRaw.value = `--REQUEST (tools included) --\n${JSON.stringify(bodyObj, null, 2)}`
    } else {
      lastMcpRaw.value = `--REQUEST --\n${JSON.stringify(bodyObj, null, 2)}`
    }

    const resp = await fetch(url, {
      method: 'POST',
      headers: { 'Content-Type': 'application/json', 'Authorization': `Bearer ${used_api_key}` },
      body: JSON.stringify(bodyObj),
    })

    // save initial request/response to localStorage for persistence
    try { localStorage.setItem('lastMcpRaw_request', JSON.stringify(bodyObj)) } catch (e) {}

    if (!resp.ok) {
      const t = await resp.text()
      // persist error response
      lastMcpRaw.value = `[HTTP ${resp.status}]\n${t}`
      try { localStorage.setItem('lastMcpRaw_response', lastMcpRaw.value) } catch (e) {}
      console.warn('Primary request failed, retrying with non-stream fallback... HTTP', resp.status)
      // retry once with stream:false to capture full error body
      try {
        const fallback = await fetch(url, {
          method: 'POST',
          headers: { 'Content-Type': 'application/json', 'Authorization': `Bearer ${used_api_key}` },
          body: JSON.stringify({ ...bodyObj, stream: false }),
        })
        const ftext = await fallback.text()
        lastMcpRaw.value += `\n--FALLBACK RESPONSE (stream:false)--\n${ftext}`
        try { localStorage.setItem('lastMcpRaw_response_fallback', lastMcpRaw.value) } catch (e) {}
        messages.value[modelIndex].content = '[Error] ' + ftext
      } catch (ef) {
        console.error('Fallback request also failed', ef)
        messages.value[modelIndex].content = '[Error] request failed and fallback failed: ' + String(ef)
      }
      messages.value[modelIndex].loading = false
      return
    }

    const ct = resp.headers.get('content-type') || ''
    if (ct.includes('application/json')) {
      const data = await resp.json()
      // persist raw JSON response
      try { lastMcpRaw.value = `--RESPONSE JSON--\n${JSON.stringify(data, null, 2)}`; localStorage.setItem('lastMcpRaw_response', lastMcpRaw.value) } catch(e){}
      messages.value[modelIndex].content = data.choices?.[0]?.message?.content || data.text || JSON.stringify(data)
      // if no content, attempt non-stream fallback to get fuller error/details
      if (!messages.value[modelIndex].content) {
        console.warn('JSON response contained no content; attempting non-stream fallback to capture server message')
        try {
          const fb = await fetch(url, { method: 'POST', headers: { 'Content-Type': 'application/json', 'Authorization': `Bearer ${used_api_key}` }, body: JSON.stringify({ ...bodyObj, stream: false }) })
          const fbText = await fb.text()
          lastMcpRaw.value += `\n--FALLBACK RESPONSE (stream:false)--\n${fbText}`
          try { localStorage.setItem('lastMcpRaw_response_fallback', lastMcpRaw.value) } catch (e) {}
          messages.value[modelIndex].content = fbText || '模型无返回结果，服务端返回了空响应。'
        } catch (ef) {
          console.error('Fallback failed', ef)
          messages.value[modelIndex].content = '模型无返回结果，且尝试回退获取详情时失败。'
        }
      }
    } else if (resp.body) {
      const reader = resp.body.getReader()
      const decoder = new TextDecoder()
      let buffer = ''
      let rawBuffer = ''
      while (true) {
        const { value, done } = await reader.read()
        if (done) break
        const chunk = decoder.decode(value, { stream: true })
        rawBuffer += chunk
        buffer += chunk
        const lines = buffer.split(/\r?\n/)
        buffer = lines.pop() || ''
        for (const rawLine of lines) {
          const line = rawLine.trim()
          if (!line) continue
          const dataLine = line.startsWith('data:') ? line.replace(/^data:\s*/, '') : line
          if (dataLine === '[DONE]') continue
          try {
            const obj = JSON.parse(dataLine)
            const content = obj.choices?.[0]?.delta?.content ?? obj.choices?.[0]?.message?.content ?? obj.choices?.[0]?.text ?? ''
            if (content) {
              messages.value[modelIndex].content += content
            }
          } catch (e) {
            messages.value[modelIndex].content += dataLine
          }
        }
      }
      // persist raw stream buffer for debugging
      lastMcpRaw.value = rawBuffer || buffer || ''
      try { localStorage.setItem('lastMcpRaw_stream', lastMcpRaw.value) } catch (e) {}
      if (buffer) {
        const maybe = buffer.trim()
        if (maybe && maybe !== '[DONE]') {
          try {
            const obj = JSON.parse(maybe)
            const content = obj.choices?.[0]?.delta?.content ?? obj.choices?.[0]?.message?.content ?? obj.choices?.[0]?.text ?? ''
            if (content) messages.value[modelIndex].content += content
          } catch (e) {
            messages.value[modelIndex].content += maybe
          }
        }
      }
    } else {
      const text = await resp.text()
      messages.value[modelIndex].content = text
    }
  } catch (err) {
    console.error(err)
    messages.value[modelIndex].content = '[Request failed] ' + String(err)
  } finally {
    messages.value[modelIndex].loading = false
  }
}
</script>

<style>
.container {
  width: 1000px;
  margin: 20px auto;
  height: calc(100vh - 82px);
  padding: 20px;
  gap: 8px;
  background: #fff;
  border: 1px solid #ddd;
  border-radius: 16px;
}

.content-container {
  display: flex;
  flex-direction: column;
  gap: 8px;
  overflow: auto;
}

.input-foot-wrapper {
  display: flex;
  justify-content: space-between;
  align-items: center;
  width: 100%;
  height: 100%;
  margin-right: 8px;

  .input-foot-left {
    display: flex;
    align-items: center;
    gap: 8px;

    span {
      font-size: 14px;
      line-height: 18px;
      color: #252b3a;
      cursor: pointer;
    }

    .input-foot-dividing-line {
      width: 1px;
      height: 14px;
      background-color: #d7d8da;
    }

    .input-foot-maxlength {
      font-size: 14px;
      color: #71757f;
    }
  }

  .input-foot-right {
    .demo-button-content {
      font-size: 14px;
    }

    & > *:not(:first-child) {
      margin-left: 8px;
    }
  }
}
</style>