<template>
  <div class="llm-assistant-container">
    <el-row :gutter="20">
      <!-- 左侧主交互区 -->
      <el-col :span="16">
        <el-card class="box-card">
          <template #header>
            <div class="card-header">
              <span>通过自然语言生成工作流</span>
            </div>
          </template>
          <LLMInput />
        </el-card>
        
        <el-card class="box-card" style="margin-top: 20px;">
           <template #header>
            <div class="card-header">
              <span>智能问答助手</span>
            </div>
          </template>
          <LLMQA />
        </el-card>
      </el-col>

      <!-- 右侧历史记录 -->
      <el-col :span="8">
        <el-card class="box-card history-card">
          <template #header>
            <div class="card-header">
              <span>历史指令</span>
            </div>
          </template>
          <LLMHistory />
        </el-card>
      </el-col>
    </el-row>
  </div>
</template>

<script setup>
import LLMInput from '../components/llm/LLMInput.vue';
import LLMHistory from '../components/llm/LLMHistory.vue';
import LLMQA from '../components/llm/LLMQA.vue';

import { ref, reactive } from 'vue'
import { ElMessage, ElMessageBox } from 'element-plus'
import aiInteraction from '@/utils/ai-interaction'

// 响应式数据
const userInput = ref('')
const isProcessing = ref(false)
const showHistory = ref(false)
const showResultDialog = ref(false)
const generatedWorkflow = ref(null)
const progress = ref(0)
const progressIndeterminate = ref(true)
const progressText = ref('正在处理您的请求...')

const chatHistory = ref([
  {
    role: 'assistant',
    content: '您好！我是您的AI助手，请告诉我您需要进行什么数据分析？',
    time: new Date().toLocaleTimeString()
  }
])

// 处理用户查询
async function processUserQuery() {
  if (!userInput.value.trim()) {
    ElMessage.warning('请输入您的需求')
    return
  }

  isProcessing.value = true
  progress.value = 0
  progressIndeterminate.value = true
  progressText.value = '正在分析您的需求...'

  // 添加用户消息到历史
  chatHistory.value.push({
    role: 'user',
    content: userInput.value,
    time: new Date().toLocaleTimeString()
  })

  try {
    // 模拟上下文信息
    const context = {
      currentData: null,
      historyOperations: []
    }

    // 调用AI翻译用户查询
    progressText.value = '正在生成工作流...'
    const workflow = await aiInteraction.translateUserQuery(userInput.value, context)
    
    // 保存生成的工作流
    generatedWorkflow.value = workflow
    showResultDialog.value = true
    
    // 添加成功消息到历史
    chatHistory.value.push({
      role: 'assistant',
      content: '已为您生成工作流，请确认是否使用',
      time: new Date().toLocaleTimeString()
    })
    
    ElMessage.success('工作流生成成功！')
  } catch (error) {
    ElMessage.error(error.message || '处理失败，请重试')
    
    // 添加错误消息到历史
    chatHistory.value.push({
      role: 'assistant',
      content: '处理失败：' + (error.message || '未知错误'),
      time: new Date().toLocaleTimeString()
    })
  } finally {
    isProcessing.value = false
  }
}

// 确认使用工作流
function confirmWorkflow() {
  ElMessage.success('已确认使用该工作流')
  showResultDialog.value = false
  // 这里可以触发将工作流加载到画布的逻辑
  emit('workflow-generated', generatedWorkflow.value)
}

// 清空输入
function clearInput() {
  userInput.value = ''
}

// 关闭结果对话框
function handleResultDialogClose(done) {
  ElMessageBox.confirm('确认关闭？生成的工作流将丢失')
    .then(() => {
      done()
    })
    .catch(() => {
      // 不关闭
    })
}

// 事件发射
const emit = defineEmits(['workflow-generated'])
</script>

<style scoped>
.llm-assistant-container {
  padding: 20px;
  background-color: #f0f2f5;
  min-height: calc(100vh - 110px);
}
.box-card {
    border: none;
}
.card-header {
  font-weight: bold;
  font-size: 16px;
}
.history-card {
  height: calc(100vh - 150px); /* 调整以适应布局 */
}
.history-card .el-card__body {
  height: calc(100% - 60px);
  padding: 0;
}
</style>