import useLLM from "@react-llm/headless"; import Image from "next/image"; import { useCallback, useEffect, useState } from "react"; import MessageList from './MessageList'; import {FileLoader} from './FileLoader'; import Loader from "./Loader"; import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter'; import { XenovaTransformersEmbeddings } from '../embed/hf'; import { MemoryVectorStore } from "langchain/vectorstores/memory"; import { SEND_MESSAGE } from '../utils' import { Button, TextInput, } from "react95"; function ChatWindow({ stopStrings, maxTokens, }) { const { loadingStatus, send, isGenerating, deleteMessages } = useLLM(); const [fileText, setFileText] = useState(); const [userInput, setUserInput] = useState(""); const [isLoading, setIsLoading] = useState(false); const handleChange = (event) => { setUserInput(event.target.value); }; const isReady = loadingStatus.progress === 1; const handleClearChat = () => { deleteMessages(); } const handleClearFile = () => { setFileText(null); } const qaHandler = async (fileText, userInput) => { const textSplitter = new RecursiveCharacterTextSplitter({ chunkSize: 1000 }); const docs = await textSplitter.createDocuments([fileText]); let qaPrompt; try { const vectorStore = await MemoryVectorStore.fromTexts( [...docs.map(doc => doc.pageContent)], [...docs.map((v, k) => k)], new XenovaTransformersEmbeddings() ) const queryResult = await vectorStore.similaritySearch(userInput, 2); qaPrompt = `You are an AI assistant providing helpful advice. You are given the following extracted parts of a long document and a question. Provide a conversational answer based on the context provided. You should only provide hyperlinks that reference the context below. Do NOT make up hyperlinks. If you can't find the answer in the context below, just say "Hmm, I'm not sure." Don't try to make up an answer. If the question is not related to the context, politely respond that you are tuned to only answer questions that are related to the context. Question: ${userInput} ========= ${queryResult.map(result => result.pageContent).join('')} ========= Answer: ` return qaPrompt; } catch (err) { console.log(err); } } const handleSubmit = useCallback(async () => { if (isGenerating || !isReady) { return; } if (fileText) { const qaPrompt = await qaHandler(fileText, userInput); send(qaPrompt, maxTokens, stopStrings); } else { send(userInput, maxTokens, stopStrings); } setUserInput(""); }, [ userInput, send, isGenerating, isReady, maxTokens, stopStrings, fileText ]); useEffect(() => { const handleKeyPress = (event) => { if (event.key === "Enter") { event.preventDefault(); handleSubmit(); } }; window.addEventListener("keydown", handleKeyPress); return () => { window.removeEventListener("keydown", handleKeyPress); }; }, [handleSubmit]); const loadFile = async (fileText) => { console.log('file loaded, demo mode'); if (fileText) { setIsLoading(true); const qaPrompt = await qaHandler(fileText, "Based on the context provide a summary of the document as a helpful assistant"); send(qaPrompt, maxTokens, stopStrings); setIsLoading(false); } } useEffect(() => { loadFile(fileText); }, [fileText]) return (
{/* */}
{isReady && (
)} {!isReady && }
); } export default ChatWindow;