const { Ollama } = require('@langchain/ollama');
const { StringOutputParser } = require('@langchain/core/output_parsers');
const { RunnableSequence } = require('@langchain/core/runnables');
const { PromptTemplate } = require('@langchain/core/prompts');
const VectorStoreService = require('../core/vectorStore');
const ResultWrapper = require('../utils/resultWrapper');

// 初始化 Ollama 模型
const ollama = new Ollama({
  baseUrl: process.env.OLLAMA_BASE_URL || 'http://localhost:11434',
  model: process.env.OLLAMA_MAIN_MODEL || "deepseek-r1:8b",
  temperature: 0.7,
});

// 创建输出解析器
const outputParser = new StringOutputParser();

// 创建提示模板
const qaPrompt = PromptTemplate.fromTemplate(`使用以下上下文来回答问题。如果你不知道答案，就说你不知道，不要试图编造答案。

上下文：{context}

问题：{question}

答案：`);

class QAService {
  static COLLECTION_NAME = 'documents';

  static async qa(question) {
    try {
      // 从向量存储中检索相关文档
      const relevantDocs = await VectorStoreService.similaritySearch(
        this.COLLECTION_NAME,
        question,
        5
      );

      if (!relevantDocs || relevantDocs.length === 0) {
        return ResultWrapper.success('抱歉，我没有找到相关的信息来回答这个问题。');
      }

      // 将检索到的文档内容合并为上下文
      const context = relevantDocs
        .map(doc => doc.content)
        .join('\n\n');

      // 创建QA链
      const qaChain = RunnableSequence.from([
        {
          context: (input) => input.context,
          question: (input) => input.question,
        },
        qaPrompt,
        ollama,
        outputParser,
      ]);

      // 执行QA链
      const response = await qaChain.invoke({
        context,
        question,
      });

      return ResultWrapper.success(response);
    } catch (error) {
      console.error('QA处理失败:', error);
      return ResultWrapper.error(error.message);
    }
  }
}

module.exports = QAService;