
import { OllamaEmbeddings } from "@langchain/community/embeddings/ollama";
import { PDFLoader } from "langchain/document_loaders/fs/pdf";
import { Chroma } from "@langchain/community/vectorstores/chroma";
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter";
import { PromptTemplate } from "@langchain/core/prompts";
import { ChatOllama } from "@langchain/community/chat_models/ollama";
import { createStuffDocumentsChain } from "langchain/chains/combine_documents";
import { StringOutputParser } from "@langchain/core/output_parsers";

const splitPDF =async()=>{
    const loader = new PDFLoader("./doc/愚公移山.pdf");
    const docs = await loader.load();
    console.log(docs.length)
    // console.log(docs[0])
    const textSplitter = new RecursiveCharacterTextSplitter({
        chunkSize: 500,
        chunkOverlap: 200,
      });
    const splitDocs = await textSplitter.splitDocuments(docs);
    // console.log({ splitDocs });
    console.log(splitDocs.length)
    return splitDocs

}

const saveVector=async (docs)=>{
    const embeddings = new OllamaEmbeddings({
            model: "nomic-embed-text", // default value
            baseUrl: "http://117.72.38.226:11434", // default value
            requestOptions: {
            // useMMap: true,
            numThread: 6,
            // numGpu: 1,
            },
        });
    // const txetContent =[]
    // for(let index = 0; index < docs.length; index++){
    //     txetContent.push(docs[index].pageContent)
    // }
    // const documentEmbeddings = await embeddings.embedDocuments(txetContent);

    // console.log(documentEmbeddings)
    await Chroma.fromDocuments(docs, embeddings, {
    collectionName: "test-hcy-3", // id  14486ba6-2d74-4658-a3f0-0a48a7e3455d
    url: "http://117.72.38.226:8000", // Optional, will default to this value
    });
}

const getVectorPoint=async()=>{
    const embeddings = new OllamaEmbeddings({
        model: "nomic-embed-text", // default value
        baseUrl: "http://117.72.38.226:11434", // default value
        requestOptions: {
        // useMMap: true,
        numThread: 6,
        // numGpu: 1,
        },
    });
    const vectorStore = await Chroma.fromExistingCollection(
        embeddings,
        {
            collectionName: "test-hcy-3",
            url: "http://117.72.38.226:8000",
        }
      );
    return vectorStore
}

const ragSupport =async (vectorStore)=>{
    // 给大模型的提示信息
    const prompt = PromptTemplate.fromTemplate(
        "对这个文档内容进行总结: {context}"
      );
    // 实例化一个用于对话的大模型
    const qwen = new ChatOllama({
        baseUrl: "http://117.72.38.226:11434", // Default value
        model: "qwen:4b", // Default value
    });
    // 创建一个调用链
    const chain = await createStuffDocumentsChain({
        llm: qwen,
        outputParser: new StringOutputParser(),
        prompt,
    });
    const question = "是谁把问题上报给了天帝?";
    const docs = await vectorStore.similaritySearch(question);
    console.log(docs)
    const response=await chain.invoke({
        context: docs,
    });
    console.log(response)

}

// const docs= await splitPDF()
// console.log(docs)
// saveVector(docs)
// 获取到数据库句柄
const store =await getVectorPoint()

// 创建一个搜索器
// const retriever = store.asRetriever()
ragSupport(store)


