import { getSystemPrompt, getRefinementPrompt } from './promptTemplates'
import type { LLMResponse, ComponentSchema } from '@/types'

// 模拟LLM响应（实际项目中替换为真实的API调用）
export async function mockLLMResponse(
  userInput: string, 
  currentSchema?: ComponentSchema[]
): Promise<LLMResponse> {
  // 模拟网络延迟
  await new Promise(resolve => setTimeout(resolve, 1000))

  let prompt = getSystemPrompt()

  // 如果是修改请求，使用不同的prompt（文章中的多轮对话修改）
  if (currentSchema && currentSchema.length > 0) {
    prompt = getRefinementPrompt(JSON.stringify(currentSchema), userInput)
  }

  // 根据不同的输入返回不同的模拟响应
  if (userInput.includes('销售') || userInput.includes('报表')) {
    return {
      components: [
        {
          component: 'd-card',
          props: { title: '销售数据报表', shadow: true },
          children: [
            {
              component: 'd-table',
              props: {
                columns: [
                  { field: 'region', header: '地区' },
                  { field: 'q1', header: 'Q1销售额' },
                  { field: 'q2', header: 'Q2销售额' },
                  { field: 'q3', header: 'Q3销售额' },
                  { field: 'total', header: '总计' }
                ],
                data: [
                  { region: '华东', q1: 1000, q2: 1200, q3: 1500, total: 3700 },
                  { region: '华南', q1: 800, q2: 900, q3: 1100, total: 2800 },
                  { region: '华北', q1: 600, q2: 700, q3: 900, total: 2200 }
                ],
                stripe: true,
                headerBg: '#2c3e50'
              }
            }
          ]
        }
      ]
    }
  } else if (userInput.includes('用户') || userInput.includes('统计')) {
    return {
      components: [
        {
          component: 'd-card',
          props: { title: '用户统计概览' },
          children: [
            {
              component: 'd-table',
              props: {
                columns: [
                  { field: 'metric', header: '指标' },
                  { field: 'value', header: '数值' },
                  { field: 'growth', header: '增长率' }
                ],
                data: [
                  { metric: '总用户数', value: '125,430', growth: '+12.5%' },
                  { metric: '活跃用户', value: '89,210', growth: '+8.3%' },
                  { metric: '新增用户', value: '5,432', growth: '+15.2%' }
                ]
              }
            },
            {
              component: 'd-button',
              props: { 
                text: '查看详细报告',
                bsStyle: 'primary',
                onClick: 'emit:view_report'
              }
            }
          ]
        }
      ]
    }
  } else {
    // 默认响应
    return {
      components: [
        {
          component: 'd-card',
          props: { title: '自定义面板' },
          children: [
            {
              component: 'd-button',
              props: { 
                text: '开始构建',
                bsStyle: 'primary',
                onClick: 'emit:start_building'
              }
            }
          ]
        }
      ]
    }
  }
}

// 实际API调用函数（文章中的OpenAI集成示例）
export async function callOpenAI(userInput: string): Promise<LLMResponse> {
  // 实际项目中取消注释并配置正确的API密钥
  /*
  import OpenAI from 'openai'

  const client = new OpenAI({
    apiKey: import.meta.env.VITE_OPENAI_API_KEY,
    baseURL: import.meta.env.VITE_OPENAI_BASE_URL,
    dangerouslyAllowBrowser: true,
  })

  const completion = await client.chat.completions.create({
    model: 'gpt-4',
    messages: [
      { role: 'system', content: getSystemPrompt() },
      { role: 'user', content: userInput }
    ],
    temperature: 0.7,
    max_tokens: 2000
  })

  const content = completion.choices[0]?.message?.content
  if (!content) {
    throw new Error('No response from AI')
  }

  try {
    return JSON.parse(content)
  } catch (error) {
    throw new Error('Invalid JSON response from AI')
  }
  */

  // 暂时使用模拟响应
  return mockLLMResponse(userInput)
}