import readline from 'readline';
import { TextProcessor } from './utils/textProcessor';
import { VectorStoreService } from './services/vectorStore';
import { Document } from '@langchain/core/documents';
import { config } from './config/config';
import OpenAI from 'openai';
import path from 'path';
import { fileURLToPath } from 'url';

// 获取当前文件的目录名
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);

async function main() {
  // 1. 文本分块
  const textProcessor = new TextProcessor();
  const filePath = path.resolve(__dirname, './examples/sample.txt');
  const chunks = await textProcessor.processFile(filePath);
  const documents = chunks.map(
    (chunk, idx) =>
      new Document({ pageContent: chunk, metadata: { chunk: idx } })
  );

  // 2. 向量化与存储
  const vectorStoreService = new VectorStoreService();
  await vectorStoreService.addDocuments(documents);

  // 3. 用户输入问题
  const rl = readline.createInterface({
    input: process.stdin,
    output: process.stdout
  });
  rl.question('请输入您的问题：', async (userQuestion) => {
    // 4. 检索相关内容
    const relatedDocs = await vectorStoreService.similaritySearch(
      userQuestion,
      4
    );
    const context = relatedDocs.map((doc) => doc.pageContent).join('\n');

    // 5. LLM 生成回答
    const openai = new OpenAI({
      apiKey: config.dashscope.apiKey,
      baseURL: config.dashscope.baseURL
    });
    const completion = await openai.chat.completions.create({
      model: 'qwen-plus',
      messages: [
        {
          role: 'system',
          content: '你是一个专业的中文AI助手，请结合给定的上下文回答用户问题。'
        },
        {
          role: 'user',
          content: `已知内容：${context}\n\n问题：${userQuestion}`
        }
      ]
    });
    console.log('\nAI回答：', completion.choices[0].message.content);
    rl.close();
  });
}

main().catch(console.error);
