const { Ollama } = require('@langchain/ollama');
const { StringOutputParser } = require('@langchain/core/output_parsers');
const ResultWrapper = require('../utils/resultWrapper');
const VectorStoreService = require('../core/vectorStore');

// 初始化 Ollama 模型
const ollama = new Ollama({
  baseUrl: process.env.OLLAMA_BASE_URL || 'http://localhost:11434',
  model: process.env.OLLAMA_MAIN_MODEL || "deepseek-r1:8b",
});

// 创建输出解析器
const outputParser = new StringOutputParser();

class DocumentService {
  // 添加文档到向量存储
  static async addDocument(collectionName, text, metadata = {}) {
    try {
      await VectorStoreService.initCollection(collectionName);
      const result = await VectorStoreService.addDocuments(collectionName, [{
        text,
        metadata
      }]);
      return result;
    } catch (error) {
      return ResultWrapper.error(error.message);
    }
  }

  static async qa(question, context) {
    try {
      // 使用向量存储进行相关文档检索
      const searchResults = await VectorStoreService.similaritySearch('documents', question);
      
      // 构建增强的上下文
      const enhancedContext = searchResults.data
        .map(result => result.content)
        .join('\n\n');
      
      // 如果有提供特定上下文，也加入进来
      const finalContext = context 
        ? `${context}\n\n相关文档：\n${enhancedContext}`
        : enhancedContext;
      
      const prompt = `上下文: ${finalContext}\n\n问题: ${question}\n\n请根据上下文回答问题。`;
      
      const response = await ollama
        .pipe(outputParser)
        .invoke(prompt);
      
      return ResultWrapper.success(response);
    } catch (error) {
      return ResultWrapper.error(error.message);
    }
  }

  static async summarize(text) {
    try {
      const prompt = `请为以下文本生成一个简洁的摘要：\n\n${text}`;
      
      const response = await ollama
        .pipe(outputParser)
        .invoke(prompt);
      
      return ResultWrapper.success(response);
    } catch (error) {
      return ResultWrapper.error(error.message);
    }
  }
}

module.exports = DocumentService;
