const { FaissStore } = require("@langchain/community/vectorstores/faiss");
const { OllamaEmbeddings } = require("@langchain/ollama");
const { RecursiveCharacterTextSplitter } = require("@langchain/textsplitters")
const { TextLoader } = require("@langchain/community/document_loaders/fs/text");

async function buildKnowledgeBase() {
    // 1. 加载文档（支持PDF/TEXT）
    const loaders = [
        new TextLoader("./test/1.txt"),
    ]
    const docs = [];
    for (const loader of loaders) {
        try {
            const docsPart = await loader.load()
            docs.push(...docsPart);
            console.log(`成功加载 ${docsPart.length} 篇 ${loader.constructor.name}`);
        } catch (error) {
            console.error(`加载${loader.constructor.name}失败:`, e.message);
        }
    }

    // 2. 分割文档
    const textSplitter = new RecursiveCharacterTextSplitter({
        chunkSize: 500,
        chunkOverlap: 50,
        lengthFunction: (text) => Buffer.byteLength(text, "utf8"),
        separators: ["\n\n", "\n", " ", ""],
        isChinese: true,
    });
    const chunks = await textSplitter.splitDocuments(docs);
    console.log(`生成 ${chunks.length} 个知识块`);

    const embeddings = new OllamaEmbeddings({
        model: "smartcreation/bge-large-zh-v1.5:latest",
    });

    // 3. 向量存储
    const vectorStore = new FaissStore(embeddings, {});
    await vectorStore.addDocuments(chunks, { ids: chunks.map((_, i) => i.toString()) });
    console.log("向量数据库已保存");

    return vectorStore;
}

module.exports = {
    buildKnowledgeBase
}