Commit
•
f00349e
1
Parent(s):
7482c01
🐛 Fix when model sends back prompt
Browse files
src/routes/conversation/[id]/+server.ts
CHANGED
@@ -25,8 +25,7 @@ export async function POST({ request, fetch, locals, params }) {
|
|
25 |
const json = await request.json();
|
26 |
|
27 |
const messages = [...conv.messages, { from: 'user', content: json.inputs }] satisfies Message[];
|
28 |
-
|
29 |
-
json.inputs = buildPrompt(messages);
|
30 |
|
31 |
const resp = await fetch(PUBLIC_MODEL_ENDPOINT, {
|
32 |
headers: {
|
@@ -34,13 +33,21 @@ export async function POST({ request, fetch, locals, params }) {
|
|
34 |
Authorization: `Basic ${HF_TOKEN}`
|
35 |
},
|
36 |
method: 'POST',
|
37 |
-
body: JSON.stringify(
|
|
|
|
|
|
|
38 |
});
|
39 |
|
40 |
const [stream1, stream2] = resp.body!.tee();
|
41 |
|
42 |
async function saveMessage() {
|
43 |
-
|
|
|
|
|
|
|
|
|
|
|
44 |
|
45 |
messages.push({ from: 'assistant', content: generated_text });
|
46 |
|
|
|
25 |
const json = await request.json();
|
26 |
|
27 |
const messages = [...conv.messages, { from: 'user', content: json.inputs }] satisfies Message[];
|
28 |
+
const prompt = buildPrompt(messages);
|
|
|
29 |
|
30 |
const resp = await fetch(PUBLIC_MODEL_ENDPOINT, {
|
31 |
headers: {
|
|
|
33 |
Authorization: `Basic ${HF_TOKEN}`
|
34 |
},
|
35 |
method: 'POST',
|
36 |
+
body: JSON.stringify({
|
37 |
+
...json,
|
38 |
+
inputs: prompt
|
39 |
+
})
|
40 |
});
|
41 |
|
42 |
const [stream1, stream2] = resp.body!.tee();
|
43 |
|
44 |
async function saveMessage() {
|
45 |
+
let generated_text = await parseGeneratedText(stream2);
|
46 |
+
|
47 |
+
// We could also check if PUBLIC_ASSISTANT_MESSAGE_TOKEN is present and use it to slice the text
|
48 |
+
if (generated_text.startsWith(prompt)) {
|
49 |
+
generated_text = generated_text.slice(prompt.length);
|
50 |
+
}
|
51 |
|
52 |
messages.push({ from: 'assistant', content: generated_text });
|
53 |
|