// AI服务配置 - 有道智云DeepSeek模型
// DeepSeek 接口最小封装
class AIService {
  constructor() {
    this.config = {
      id: '1424eb75e906d248',
      apiKey: '46e95637e9341e95',
      apiSecret: 'HeWhvz6zehreZ2QjHrw6m9ME7Wg888uk',
      apiUrl: 'https://openapi.youdao.com/llmgateway/api/v1/chat/completions',
      model: 'deepseek-r1-250120',
      headers: {
        'Content-Type': 'application/json',
        'Authorization': 'Bearer 46e95637e9341e95'
      },
      defaults: {
        stream: false,
        max_tokens: 1000,
        presence_penalty: 0,
        frequency_penalty: 0,
        temperature: 0,
        top_p: 0,
        stream_options: { include_usage: true }
      }
    }
  }

  buildMessages(userContent, context = [], systemPrompt) {
    const base = []
    if (systemPrompt) {
      base.push({ role: 'system', content: systemPrompt })
    }
    const history = Array.isArray(context) ? context.map(item => {
      if (item.type === 'tool') {
        return { role: 'tool', tool_call_id: item.tool_call_id, content: item.content }
      }
      return { role: item.type === 'user' ? 'user' : 'assistant', content: item.content }
    }) : []
    return [
      ...base,
      ...history,
      { role: 'user', content: userContent }
    ]
  }

  async sendMessage(message, context = [], options = {}) {
    const {
      stream = this.config.defaults.stream,
      systemPrompt = '你是医院AI智能客服，提供挂号、缴费、报告、就诊指导等咨询，回答简洁、专业、友好，并在需要时建议及时线下就医。'
    } = options

    const body = {
      model: this.config.model,
      messages: this.buildMessages(message, context, systemPrompt),
      stream,
      max_tokens: this.config.defaults.max_tokens,
      stream_options: this.config.defaults.stream_options,
      presence_penalty: this.config.defaults.presence_penalty,
      frequency_penalty: this.config.defaults.frequency_penalty,
      temperature: this.config.defaults.temperature,
      top_p: this.config.defaults.top_p
    }

    try {
      const resp = await fetch(this.config.apiUrl, {
        method: 'POST',
        headers: this.config.headers,
        body: JSON.stringify(body)
      })
      if (!resp.ok) {
        const text = await resp.text()
        throw new Error(`HTTP ${resp.status}: ${text}`)
      }
      if (!stream) {
        const data = await resp.json()
        return this.processResponse(data)
      }
      // 流式：调用方应使用 streamMessage，更安全
      return resp.body
    } catch (err) {
      console.error('DeepSeek 接口调用失败:', err)
      return this.getFallback(message)
    }
  }

  // 流式消息：解析SSE，将每个 data: 行回调出去，并返回聚合文本
  async streamMessage(message, context = [], onChunk, options = {}) {
    const { systemPrompt } = options
    const body = {
      model: this.config.model,
      messages: this.buildMessages(message, context, systemPrompt || '你是医院AI智能客服，提供挂号、缴费、报告、就诊指导等咨询，回答简洁、专业、友好，并在需要时建议及时线下就医。'),
      stream: true,
      max_tokens: this.config.defaults.max_tokens,
      stream_options: this.config.defaults.stream_options,
      presence_penalty: this.config.defaults.presence_penalty,
      frequency_penalty: this.config.defaults.frequency_penalty,
      temperature: this.config.defaults.temperature,
      top_p: this.config.defaults.top_p
    }

    const resp = await fetch(this.config.apiUrl, {
      method: 'POST',
      headers: this.config.headers,
      body: JSON.stringify(body)
    })
    if (!resp.ok || !resp.body) {
      const text = await resp.text().catch(() => '')
      throw new Error(`HTTP ${resp.status}: ${text}`)
    }

    const reader = resp.body.getReader()
    const decoder = new TextDecoder('utf-8')
    let buffer = ''
    let aggregated = ''
    while (true) {
      const { done, value } = await reader.read()
      if (done) break
      buffer += decoder.decode(value, { stream: true })
      let index
      while ((index = buffer.indexOf('\n\n')) !== -1) {
        const chunk = buffer.slice(0, index).trim()
        buffer = buffer.slice(index + 2)
        if (!chunk) continue
        // 处理 "data:" 行
        const lines = chunk.split('\n')
        for (const line of lines) {
          const trimmed = line.trim()
          if (trimmed === 'data:[DONE]') {
            onChunk && onChunk({ done: true })
            return aggregated
          }
          if (trimmed.startsWith('data:')) {
            const jsonStr = trimmed.slice(5)
            try {
              const json = JSON.parse(jsonStr)
              const delta = json?.choices?.[0]?.delta
              const text = (delta?.content || '') + (delta?.reasoning_content || '')
              if (text) aggregated += text
              onChunk && onChunk({ json, text, aggregated })
            } catch {
              onChunk && onChunk({ error: true, raw: jsonStr })
            }
          }
        }
      }
    }
    if (buffer.trim()) {
      try {
        const maybe = buffer.trim().replace(/^data:/, '')
        const json = JSON.parse(maybe)
        const delta = json?.choices?.[0]?.delta
        const text = (delta?.content || '') + (delta?.reasoning_content || '')
        if (text) aggregated += text
        onChunk && onChunk({ json, text, aggregated })
      } catch {
        // ignore
      }
    }
    onChunk && onChunk({ done: true, aggregated })
    return aggregated
  }

  processResponse(data) {
    if (data && Array.isArray(data.choices) && data.choices.length > 0) {
      const choice = data.choices[0]
      const content = choice?.message?.content || choice?.delta?.content || ''
      return {
        id: data.id,
        created: data.created,
        content: content || '（无内容）',
        raw: data
      }
    }
    return {
      id: `chatcmpl-${Date.now()}`,
      created: Math.floor(Date.now() / 1000),
      content: '响应格式异常',
      raw: data || null
    }
  }

  getFallback(message) {
    return {
      id: `chatcmpl-${Date.now()}`,
      created: Math.floor(Date.now() / 1000),
      content: `抱歉，服务暂不可用。（问题：${message}）`,
      raw: null
    }
  }

  // 心跳检测方法
  async heartbeat() {
    try {
      const response = await fetch(this.config.apiUrl, {
        method: 'POST',
        headers: this.config.headers,
        body: JSON.stringify({
          model: this.config.model,
          messages: [{ role: 'user', content: 'ping' }],
          stream: false,
          max_tokens: 10
        })
      })
      
      if (response.ok) {
        const data = await response.json()
        return {
          status: 'ok',
          timestamp: Date.now(),
          response: data
        }
      } else {
        return {
          status: 'error',
          timestamp: Date.now(),
          error: `HTTP ${response.status}`
        }
      }
    } catch (error) {
      return {
        status: 'error',
        timestamp: Date.now(),
        error: error.message
      }
    }
  }
}

const aiService = new AIService()
export default aiService
