// 简易 AI 代理服务（开发环境用）
// 支持两种提供方：
// 1) 千帆文心一言（AI_PROVIDER=qianfan，默认）需环境变量：BAIDU_API_KEY, BAIDU_SECRET_KEY, 可选 BAIDU_WENXIN_MODEL
// 2) OpenAI 兼容接口（AI_PROVIDER=openai）需环境变量：OPENAI_API_BASE, OPENAI_API_KEY, OPENAI_MODEL

const express = require('express')
const cors = require('cors')
const https = require('https')
const http = require('http')
const { URL } = require('url')

const app = express()
app.use(cors())
app.use(express.json({ limit: '1mb' }))

function httpGetJson(urlStr) {
  return new Promise((resolve, reject) => {
    const urlObj = new URL(urlStr)
    const mod = urlObj.protocol === 'http:' ? http : https
    const req = mod.get(
      {
        hostname: urlObj.hostname,
        path: urlObj.pathname + urlObj.search,
        protocol: urlObj.protocol,
      },
      (res) => {
        let data = ''
        res.on('data', (chunk) => (data += chunk))
        res.on('end', () => {
          try {
            const json = JSON.parse(data)
            resolve(json)
          } catch (err) {
            reject(err)
          }
        })
      }
    )
    req.on('error', reject)
  })
}

function httpPostJson(urlStr, bodyObj, extraHeaders) {
  return new Promise((resolve, reject) => {
    const urlObj = new URL(urlStr)
    const body = JSON.stringify(bodyObj || {})
    const mod = urlObj.protocol === 'http:' ? http : https
    const req = mod.request(
      {
        hostname: urlObj.hostname,
        path: urlObj.pathname + urlObj.search,
        protocol: urlObj.protocol,
        method: 'POST',
        headers: Object.assign({
          'Content-Type': 'application/json',
          'Content-Length': Buffer.byteLength(body),
        }, extraHeaders || {}),
      },
      (res) => {
        let data = ''
        res.on('data', (chunk) => (data += chunk))
        res.on('end', () => {
          try {
            const json = JSON.parse(data)
            resolve(json)
          } catch (err) {
            reject(err)
          }
        })
      }
    )
    req.on('error', reject)
    req.write(body)
    req.end()
  })
}

// 返回原始文本的 POST，便于在上游返回非 JSON（如 SSE 流）时做回退解析
function httpPostText(urlStr, bodyObj, extraHeaders) {
  return new Promise((resolve, reject) => {
    const urlObj = new URL(urlStr)
    const body = JSON.stringify(bodyObj || {})
    const mod = urlObj.protocol === 'http:' ? http : https
    const req = mod.request(
      {
        hostname: urlObj.hostname,
        path: urlObj.pathname + urlObj.search,
        protocol: urlObj.protocol,
        method: 'POST',
        headers: Object.assign(
          {
            'Content-Type': 'application/json',
            'Content-Length': Buffer.byteLength(body),
          },
          extraHeaders || {}
        ),
      },
      (res) => {
        let data = ''
        res.on('data', (chunk) => (data += chunk))
        res.on('end', () => {
          resolve({ statusCode: res.statusCode || 0, text: data })
        })
      }
    )
    req.on('error', reject)
    req.write(body)
    req.end()
  })
}

// 解析常见 SSE 文本，尽可能提取内容
function extractTextFromSse(raw) {
  if (!raw || typeof raw !== 'string') return ''
  const lines = raw.split(/\r?\n/)
  let out = ''
  for (const line of lines) {
    const trimmed = line.trim()
    if (!trimmed) continue
    if (trimmed.startsWith('data:')) {
      const payload = trimmed.slice(5).trim()
      if (!payload || payload === '[DONE]') continue
      try {
        const obj = JSON.parse(payload)
        // OpenAI 兼容：choices[0].delta.content
        let piece = ''
        if (
          obj &&
          obj.choices &&
          obj.choices[0] &&
          obj.choices[0].delta &&
          typeof obj.choices[0].delta.content === 'string'
        ) {
          piece = obj.choices[0].delta.content
        }
        // 文心可能返回 result 或其他字段
        if (!piece && typeof obj.result === 'string') piece = obj.result
        if (!piece && typeof obj.output === 'string') piece = obj.output
        if (piece) out += piece
      } catch (_) {
        // 若不是 JSON，就直接拼接文本
        out += payload
      }
    }
  }
  return out.trim()
}

async function getAccessToken() {
  const apiKey = process.env.BAIDU_API_KEY
  const secretKey = process.env.BAIDU_SECRET_KEY
  if (!apiKey || !secretKey) {
    throw new Error('Missing BAIDU_API_KEY or BAIDU_SECRET_KEY env')
  }
  const tokenUrl =
    'https://aip.baidubce.com/oauth/2.0/token?grant_type=client_credentials' +
    `&client_id=${encodeURIComponent(apiKey)}` +
    `&client_secret=${encodeURIComponent(secretKey)}`
  const resp = await httpGetJson(tokenUrl)
  if (!resp || !resp.access_token) {
    throw new Error('Failed to obtain access_token')
  }
  return resp.access_token
}

async function chatWithErnie(messages) {
  const accessToken = await getAccessToken()
  const model = process.env.BAIDU_WENXIN_MODEL || 'eb-instant'
  const chatUrl =
    `https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/${encodeURIComponent(model)}` +
    `?access_token=${encodeURIComponent(accessToken)}`

  const payload = {
    messages: messages || [],
    temperature: 0.7,
    stream: false,
  }

  // 先尝试以文本拿回，再做多策略解析
  const { text } = await httpPostText(chatUrl, payload)
  let resp
  try {
    resp = JSON.parse(text)
  } catch (_) {
    resp = null
  }
  if (resp && (resp.error_code || resp.error_msg)) {
    const code = resp.error_code || 'UNKNOWN'
    const msg = resp.error_msg || 'unknown error'
    const desc = resp.error_description || ''
    throw new Error(`Wenxin error ${code}: ${msg} ${desc}`.trim())
  }
  // 优先解析标准 JSON
  if (resp) {
    if (typeof resp.result === 'string') {
      return resp.result
    }
    if (
      Array.isArray(resp.choices) &&
      resp.choices[0] &&
      resp.choices[0].message &&
      typeof resp.choices[0].message.content === 'string'
    ) {
      return resp.choices[0].message.content
    }
  }
  // 回退：尝试 SSE 文本
  const sse = extractTextFromSse(text)
  if (sse) return sse
  // 回退：直接返回原始文本片段
  if (text && typeof text === 'string') return text.slice(0, 4000)
  return '（未能解析模型返回结果）'
}

async function chatWithOpenAI(messages) {
  const base = process.env.OPENAI_API_BASE
  const apiKey = process.env.OPENAI_API_KEY
  const model = process.env.OPENAI_MODEL || 'gpt-3.5-turbo'
  if (!base || !apiKey) {
    throw new Error('Missing OPENAI_API_BASE or OPENAI_API_KEY env')
  }
  const baseNorm = base.replace(/\/$/, '')
  const url = baseNorm.endsWith('/v1')
    ? `${baseNorm}/chat/completions`
    : `${baseNorm}/v1/chat/completions`
  const payload = {
    model,
    messages: messages || [],
    temperature: 0.7,
    stream: false,
  }
  // 以文本拿回，增强兼容流式/非 JSON 返回
  const { text } = await httpPostText(url, payload, { Authorization: `Bearer ${apiKey}` })
  let resp
  try {
    resp = JSON.parse(text)
  } catch (_) {
    resp = null
  }
  if (resp && resp.error) {
    const { code, message, type } = resp.error
    throw new Error(`OpenAI compatible error ${code || type || ''}: ${message || 'unknown'}`.trim())
  }
  if (
    resp &&
    Array.isArray(resp.choices) &&
    resp.choices[0] &&
    resp.choices[0].message &&
    typeof resp.choices[0].message.content === 'string'
  ) {
    return resp.choices[0].message.content
  }
  // 回退：尝试解析 SSE 文本
  const sse = extractTextFromSse(text)
  if (sse) return sse
  // 回退：直接返回原始文本片段
  if (text && typeof text === 'string') return text.slice(0, 4000)
  return '（未能解析模型返回结果）'
}

app.get('/api/health', (req, res) => {
  res.json({ ok: true, provider: (process.env.AI_PROVIDER || 'qianfan'), time: Date.now() })
})

app.post('/api/ai/chat', async (req, res) => {
  try {
    const { messages } = req.body || {}
    if (!messages || !Array.isArray(messages)) {
      return res.status(400).json({ error: 'messages 必须是数组' })
    }
    const provider = (process.env.AI_PROVIDER || 'qianfan').toLowerCase()
    const reply = provider === 'openai' ? await chatWithOpenAI(messages) : await chatWithErnie(messages)
    res.json({ reply })
  } catch (err) {
    console.error('chat error:', err)
    res.status(500).json({ error: err.message || '服务器错误' })
  }
})

const port = process.env.PORT || 3001
app.listen(port, () => {
  console.log(`[AI Proxy] 服务已启动: http://localhost:${port}`)
})


