web-llm-embed / src /utils /make-chain.ts
matt HOFFNER
handle file embedding
e82c85b
raw
history blame
2.12 kB
import { CallbackManager } from 'langchain/callbacks';
import { ConversationalRetrievalQAChain } from 'langchain/chains';
import { OpenAIChat } from 'langchain/llms';
import { PromptTemplate } from 'langchain/prompts';
import { BufferMemory } from "langchain/memory";
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
export const defaultPrompts = {
CONDENSE_PROMPT: `Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question.
Chat History:
{chat_history}
Follow Up Input: {question}
Standalone question:`,
QA_PROMPT: `You are an AI assistant providing helpful advice. You are given the following extracted parts of a long document and a question. Provide a conversational answer based on the context provided.
You should only provide hyperlinks that reference the context below. Do NOT make up hyperlinks.
If you can't find the answer in the context below, just say "Hmm, I'm not sure." Don't try to make up an answer.
If the question is not related to the context, politely respond that you are tuned to only answer questions that are related to the context.
Question: {question}
=========
{context}
=========
Answer:`,
};
const CONDENSE_PROMPT = PromptTemplate.fromTemplate(
defaultPrompts.CONDENSE_PROMPT,
);
const QA_PROMPT = PromptTemplate.fromTemplate(defaultPrompts.QA_PROMPT);
export const makeChain = (
vectorstore: HNSWLib,
onTokenStream?: (token: string) => void,
) => {
const model = new OpenAIChat({
temperature: 0.8,
modelName: "OpenAIModelID.GPT_3_5",
streaming: false,
callbackManager: onTokenStream
? CallbackManager.fromHandlers({
async handleLLMNewToken(token) {
onTokenStream(token);
},
})
: undefined,
})
return ConversationalRetrievalQAChain.fromLLM(
model, vectorstore.asRetriever(),
{
memory: new BufferMemory({
memoryKey: "chat_history", // Must be set to "chat_history"
}), qaTemplate: defaultPrompts.QA_PROMPT, questionGeneratorTemplate: defaultPrompts.CONDENSE_PROMPT })
};