Spaces:
Runtime error
Runtime error
File size: 4,828 Bytes
1b66f8d 5da61b4 4a6603b 5da61b4 2606dde 5da61b4 3e9f86e 9405a81 831f161 69830ae 4a6603b 2606dde 1b66f8d b56bba1 1b66f8d 831f161 2772555 831f161 1b66f8d 3aa8136 1b66f8d 4a6603b 833fd87 06ffd82 1b66f8d 06ffd82 1b66f8d cf7ac8d 5da61b4 1b66f8d 4a6603b 5da61b4 2606dde 1b66f8d da1e5da 3aa8136 da1e5da 2772555 1b66f8d 0abf663 da1e5da 9c038aa 0abf663 da1e5da 0abf663 da1e5da 9c038aa 91e621a 5da61b4 1b66f8d 4a6603b da1e5da 4a6603b 1b66f8d da1e5da 91e621a 1b66f8d 3e9f86e 831f161 3e9f86e 9c038aa 1b66f8d 2772555 1b66f8d 3aa8136 1b66f8d 4a6603b 91e621a 4a6603b 7482c01 3e9f86e 831f161 3e9f86e 34857c4 69830ae 34857c4 1b66f8d 4a6603b 5da61b4 4a6603b 1b66f8d 4a6603b 1b66f8d b56bba1 1b66f8d b56bba1 9405a81 4a6603b 9405a81 2772555 2606dde 9405a81 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 |
<script lang="ts">
import ChatWindow from "$lib/components/chat/ChatWindow.svelte";
import { pendingMessage } from "$lib/stores/pendingMessage";
import { pendingMessageIdToRetry } from "$lib/stores/pendingMessageIdToRetry";
import { onMount } from "svelte";
import { page } from "$app/stores";
import { textGenerationStream, type Options } from "@huggingface/inference";
import { invalidate } from "$app/navigation";
import { base } from "$app/paths";
import { shareConversation } from "$lib/shareConversation";
import { UrlDependency } from "$lib/types/UrlDependency";
import { ERROR_MESSAGES, error } from "$lib/stores/errors";
import { randomUUID } from "$lib/utils/randomUuid";
import { findCurrentModel } from "$lib/utils/models.js";
export let data;
let messages = data.messages;
let lastLoadedMessages = data.messages;
let isAborted = false;
// Since we modify the messages array locally, we don't want to reset it if an old version is passed
$: if (data.messages !== lastLoadedMessages) {
messages = data.messages;
lastLoadedMessages = data.messages;
}
let loading = false;
let pending = false;
async function getTextGenerationStream(inputs: string, messageId: string, isRetry = false) {
let conversationId = $page.params.id;
const response = textGenerationStream(
{
model: $page.url.href,
inputs,
parameters: {
...data.models.find((m) => m.id === data.model)?.parameters,
return_full_text: false,
},
},
{
id: messageId,
is_retry: isRetry,
use_cache: false,
} as Options
);
for await (const output of response) {
pending = false;
if (!output) {
break;
}
if (conversationId !== $page.params.id) {
fetch(`${base}/conversation/${conversationId}/stop-generating`, {
method: "POST",
}).catch(console.error);
break;
}
if (isAborted) {
isAborted = false;
fetch(`${base}/conversation/${conversationId}/stop-generating`, {
method: "POST",
}).catch(console.error);
break;
}
// final message
if (output.generated_text) {
const lastMessage = messages[messages.length - 1];
if (lastMessage) {
lastMessage.content = output.generated_text;
messages = [...messages];
}
break;
}
if (!output.token.special) {
const lastMessage = messages[messages.length - 1];
if (lastMessage?.from !== "assistant") {
// First token has a space at the beginning, trim it
messages = [
...messages,
// id doesn't match the backend id but it's not important for assistant messages
{ from: "assistant", content: output.token.text.trimStart(), id: randomUUID() },
];
} else {
lastMessage.content += output.token.text;
messages = [...messages];
}
}
}
}
async function summarizeTitle(id: string) {
await fetch(`${base}/conversation/${id}/summarize`, {
method: "POST",
});
}
async function writeMessage(message: string, messageId = randomUUID()) {
if (!message.trim()) return;
try {
isAborted = false;
loading = true;
pending = true;
let retryMessageIndex = messages.findIndex((msg) => msg.id === messageId);
const isRetry = retryMessageIndex !== -1;
if (!isRetry) {
retryMessageIndex = messages.length;
}
messages = [
...messages.slice(0, retryMessageIndex),
{ from: "user", content: message, id: messageId },
];
await getTextGenerationStream(message, messageId, isRetry);
if (messages.filter((m) => m.from === "user").length === 1) {
summarizeTitle($page.params.id)
.then(() => invalidate(UrlDependency.ConversationList))
.catch(console.error);
} else {
await invalidate(UrlDependency.ConversationList);
}
} catch (err) {
if (err instanceof Error && err.message.includes("overloaded")) {
$error = "Too much traffic, please try again.";
} else if (err instanceof Error) {
$error = err.message;
} else {
$error = ERROR_MESSAGES.default;
}
console.error(err);
} finally {
loading = false;
}
}
onMount(async () => {
if ($pendingMessage) {
const val = $pendingMessage;
const messageId = $pendingMessageIdToRetry || undefined;
$pendingMessage = "";
$pendingMessageIdToRetry = null;
writeMessage(val, messageId);
}
});
$: title = data.conversations.find((conv) => conv.id === $page.params.id)?.title ?? data.title;
</script>
<svelte:head>
<title>{title}</title>
</svelte:head>
<ChatWindow
{loading}
{pending}
{messages}
on:message={(message) => writeMessage(message.detail)}
on:retry={(message) => writeMessage(message.detail.content, message.detail.id)}
on:share={() => shareConversation($page.params.id, data.title)}
on:stop={() => (isAborted = true)}
currentModel={findCurrentModel(data.models, data.model)}
settings={data.settings}
/>
|