Spaces:
Runtime error
Runtime error
matt HOFFNER
commited on
Commit
Β·
70ff588
1
Parent(s):
80d4a52
add clear, tweaks to length, improve search
Browse files
src/components/ChatV2.jsx
CHANGED
@@ -7,7 +7,7 @@ export default function Chat() {
|
|
7 |
<div>
|
8 |
<div className="flex justify-center m-3 gap-2">
|
9 |
<div className="sm:w-[500px] w-full">
|
10 |
-
<ChatWindow />
|
11 |
</div>
|
12 |
</div>
|
13 |
</div>
|
|
|
7 |
<div>
|
8 |
<div className="flex justify-center m-3 gap-2">
|
9 |
<div className="sm:w-[500px] w-full">
|
10 |
+
<ChatWindow maxTokens={1000}/>
|
11 |
</div>
|
12 |
</div>
|
13 |
</div>
|
src/components/ChatWindow.jsx
CHANGED
@@ -12,7 +12,7 @@ function ChatWindow({
|
|
12 |
stopStrings,
|
13 |
maxTokens,
|
14 |
}) {
|
15 |
-
const { loadingStatus, send, isGenerating } = useLLM();
|
16 |
const [fileText, setFileText] = useState();
|
17 |
const [userInput, setUserInput] = useState("");
|
18 |
|
@@ -22,6 +22,10 @@ function ChatWindow({
|
|
22 |
|
23 |
const isReady = loadingStatus.progress === 1;
|
24 |
|
|
|
|
|
|
|
|
|
25 |
const handleSubmit = useCallback(async () => {
|
26 |
if (isGenerating || !isReady) {
|
27 |
return;
|
@@ -37,7 +41,7 @@ function ChatWindow({
|
|
37 |
[...docs.map((v, k) => k)],
|
38 |
new XenovaTransformersEmbeddings()
|
39 |
)
|
40 |
-
const queryResult = await vectorStore.similaritySearch(userInput,
|
41 |
console.log("queryResult", queryResult);
|
42 |
qaPrompt =
|
43 |
`You are an AI assistant providing helpful advice. You are given the following extracted parts of a long document and a question. Provide a conversational answer based on the context provided.
|
@@ -46,7 +50,7 @@ function ChatWindow({
|
|
46 |
If the question is not related to the context, politely respond that you are tuned to only answer questions that are related to the context.
|
47 |
Question: ${userInput}
|
48 |
=========
|
49 |
-
${queryResult
|
50 |
=========
|
51 |
Answer:
|
52 |
`
|
@@ -142,6 +146,7 @@ function ChatWindow({
|
|
142 |
height="40"
|
143 |
/>
|
144 |
</button>
|
|
|
145 |
<FileLoader setFileText={setFileText} />
|
146 |
</div>
|
147 |
|
|
|
12 |
stopStrings,
|
13 |
maxTokens,
|
14 |
}) {
|
15 |
+
const { loadingStatus, send, isGenerating, deleteMessages } = useLLM();
|
16 |
const [fileText, setFileText] = useState();
|
17 |
const [userInput, setUserInput] = useState("");
|
18 |
|
|
|
22 |
|
23 |
const isReady = loadingStatus.progress === 1;
|
24 |
|
25 |
+
const handleClear = () => {
|
26 |
+
deleteMessages();
|
27 |
+
}
|
28 |
+
|
29 |
const handleSubmit = useCallback(async () => {
|
30 |
if (isGenerating || !isReady) {
|
31 |
return;
|
|
|
41 |
[...docs.map((v, k) => k)],
|
42 |
new XenovaTransformersEmbeddings()
|
43 |
)
|
44 |
+
const queryResult = await vectorStore.similaritySearch(userInput, 2);
|
45 |
console.log("queryResult", queryResult);
|
46 |
qaPrompt =
|
47 |
`You are an AI assistant providing helpful advice. You are given the following extracted parts of a long document and a question. Provide a conversational answer based on the context provided.
|
|
|
50 |
If the question is not related to the context, politely respond that you are tuned to only answer questions that are related to the context.
|
51 |
Question: ${userInput}
|
52 |
=========
|
53 |
+
${queryResult.map(result => result.pageContent).join('')}
|
54 |
=========
|
55 |
Answer:
|
56 |
`
|
|
|
146 |
height="40"
|
147 |
/>
|
148 |
</button>
|
149 |
+
<button onClick={handleClear}>Clear</button>
|
150 |
<FileLoader setFileText={setFileText} />
|
151 |
</div>
|
152 |
|