More refactor (#41)
Browse files- src/lib/components/InferencePlayground/InferencePlayground.svelte +10 -8
- src/lib/components/InferencePlayground/InferencePlaygroundCodeSnippets.svelte +16 -8
- src/lib/components/InferencePlayground/InferencePlaygroundConversation.svelte +5 -3
- src/lib/components/InferencePlayground/InferencePlaygroundGenerationConfig.svelte +1 -0
- src/lib/components/InferencePlayground/InferencePlaygroundHFTokenModal.svelte +5 -4
- src/lib/components/InferencePlayground/InferencePlaygroundMessage.svelte +1 -1
- src/lib/components/InferencePlayground/InferencePlaygroundModelSelector.svelte +3 -2
- src/lib/components/InferencePlayground/InferencePlaygroundModelSelectorModal.svelte +5 -3
- src/lib/components/InferencePlayground/inferencePlaygroundUtils.ts +2 -1
- src/lib/{types/index.d.ts β components/InferencePlayground/types.ts} +1 -1
- src/routes/+page.server.ts +2 -2
src/lib/components/InferencePlayground/InferencePlayground.svelte
CHANGED
@@ -1,18 +1,20 @@
|
|
1 |
<script lang="ts">
|
|
|
|
|
|
|
|
|
2 |
import {
|
3 |
createHfInference,
|
4 |
handleStreamingResponse,
|
5 |
handleNonStreamingResponse,
|
6 |
isSystemPromptSupported,
|
7 |
} from "./inferencePlaygroundUtils";
|
|
|
|
|
8 |
import GenerationConfig from "./InferencePlaygroundGenerationConfig.svelte";
|
9 |
import HFTokenModal from "./InferencePlaygroundHFTokenModal.svelte";
|
10 |
import ModelSelector from "./InferencePlaygroundModelSelector.svelte";
|
11 |
import Conversation from "./InferencePlaygroundConversation.svelte";
|
12 |
-
import { onDestroy } from "svelte";
|
13 |
-
import { type ChatCompletionInputMessage } from "@huggingface/tasks";
|
14 |
-
import type { ModelEntryWithTokenizer } from "$lib/types";
|
15 |
-
import { defaultGenerationConfig } from "./generationConfigSettings";
|
16 |
import IconShare from "../Icons/IconShare.svelte";
|
17 |
import IconDelete from "../Icons/IconDelete.svelte";
|
18 |
import IconCode from "../Icons/IconCode.svelte";
|
@@ -39,10 +41,6 @@
|
|
39 |
|
40 |
$: systemPromptSupported = isSystemPromptSupported(conversation.model);
|
41 |
|
42 |
-
onDestroy(() => {
|
43 |
-
abortController?.abort();
|
44 |
-
});
|
45 |
-
|
46 |
function addMessage() {
|
47 |
conversation.messages = [
|
48 |
...conversation.messages,
|
@@ -132,6 +130,10 @@
|
|
132 |
submit();
|
133 |
}
|
134 |
}
|
|
|
|
|
|
|
|
|
135 |
</script>
|
136 |
|
137 |
{#if showTokenModal}
|
|
|
1 |
<script lang="ts">
|
2 |
+
import type { ModelEntryWithTokenizer } from "./types";
|
3 |
+
import { type ChatCompletionInputMessage } from "@huggingface/tasks";
|
4 |
+
|
5 |
+
import { defaultGenerationConfig } from "./generationConfigSettings";
|
6 |
import {
|
7 |
createHfInference,
|
8 |
handleStreamingResponse,
|
9 |
handleNonStreamingResponse,
|
10 |
isSystemPromptSupported,
|
11 |
} from "./inferencePlaygroundUtils";
|
12 |
+
|
13 |
+
import { onDestroy } from "svelte";
|
14 |
import GenerationConfig from "./InferencePlaygroundGenerationConfig.svelte";
|
15 |
import HFTokenModal from "./InferencePlaygroundHFTokenModal.svelte";
|
16 |
import ModelSelector from "./InferencePlaygroundModelSelector.svelte";
|
17 |
import Conversation from "./InferencePlaygroundConversation.svelte";
|
|
|
|
|
|
|
|
|
18 |
import IconShare from "../Icons/IconShare.svelte";
|
19 |
import IconDelete from "../Icons/IconDelete.svelte";
|
20 |
import IconCode from "../Icons/IconCode.svelte";
|
|
|
41 |
|
42 |
$: systemPromptSupported = isSystemPromptSupported(conversation.model);
|
43 |
|
|
|
|
|
|
|
|
|
44 |
function addMessage() {
|
45 |
conversation.messages = [
|
46 |
...conversation.messages,
|
|
|
130 |
submit();
|
131 |
}
|
132 |
}
|
133 |
+
|
134 |
+
onDestroy(() => {
|
135 |
+
abortController?.abort();
|
136 |
+
});
|
137 |
</script>
|
138 |
|
139 |
{#if showTokenModal}
|
src/lib/components/InferencePlayground/InferencePlaygroundCodeSnippets.svelte
CHANGED
@@ -1,11 +1,13 @@
|
|
1 |
<script lang="ts">
|
|
|
|
|
|
|
2 |
import hljs from "highlight.js/lib/core";
|
3 |
import javascript from "highlight.js/lib/languages/javascript";
|
4 |
import python from "highlight.js/lib/languages/python";
|
5 |
import http from "highlight.js/lib/languages/http";
|
6 |
-
|
7 |
import IconCopyCode from "../Icons/IconCopyCode.svelte";
|
8 |
-
import { onDestroy } from "svelte";
|
9 |
|
10 |
hljs.registerLanguage("javascript", javascript);
|
11 |
hljs.registerLanguage("python", python);
|
@@ -27,6 +29,12 @@
|
|
27 |
language?: Language;
|
28 |
}
|
29 |
|
|
|
|
|
|
|
|
|
|
|
|
|
30 |
$: snippetsByLanguage = {
|
31 |
javascript: getJavascriptSnippets(conversation),
|
32 |
python: getPythonSnippets(conversation),
|
@@ -50,14 +58,14 @@
|
|
50 |
}
|
51 |
|
52 |
function getJavascriptSnippets(conversation: Conversation) {
|
53 |
-
const formattedMessages = ({ sep, start, end }) =>
|
54 |
start +
|
55 |
getMessages()
|
56 |
.map(({ role, content }) => `{ role: "${role}", content: "${content}" }`)
|
57 |
.join(sep) +
|
58 |
end;
|
59 |
|
60 |
-
const formattedConfig = ({ sep, start, end }) =>
|
61 |
start +
|
62 |
Object.entries(conversation.config)
|
63 |
.map(([key, val]) => `${key}: ${val}`)
|
@@ -116,14 +124,14 @@ console.log(out.choices[0].message);`,
|
|
116 |
}
|
117 |
|
118 |
function getPythonSnippets(conversation: Conversation) {
|
119 |
-
const formattedMessages = ({ sep, start, end }) =>
|
120 |
start +
|
121 |
getMessages()
|
122 |
.map(({ role, content }) => `{ "role": "${role}", "content": "${content}" }`)
|
123 |
.join(sep) +
|
124 |
end;
|
125 |
|
126 |
-
const formattedConfig = ({ sep, start, end }) =>
|
127 |
start +
|
128 |
Object.entries(conversation.config)
|
129 |
.map(([key, val]) => `${key}: ${val}`)
|
@@ -176,14 +184,14 @@ print(output.choices[0].message)`,
|
|
176 |
}
|
177 |
|
178 |
function getHttpSnippets(conversation: Conversation) {
|
179 |
-
const formattedMessages = ({ sep, start, end }) =>
|
180 |
start +
|
181 |
getMessages()
|
182 |
.map(({ role, content }) => `{ "role": "${role}", "content": "${content}" }`)
|
183 |
.join(sep) +
|
184 |
end;
|
185 |
|
186 |
-
const formattedConfig = ({ sep, start, end }) =>
|
187 |
start +
|
188 |
Object.entries(conversation.config)
|
189 |
.map(([key, val]) => `"${key}": ${val}`)
|
|
|
1 |
<script lang="ts">
|
2 |
+
import type { Conversation } from "./types";
|
3 |
+
|
4 |
+
import { onDestroy } from "svelte";
|
5 |
import hljs from "highlight.js/lib/core";
|
6 |
import javascript from "highlight.js/lib/languages/javascript";
|
7 |
import python from "highlight.js/lib/languages/python";
|
8 |
import http from "highlight.js/lib/languages/http";
|
9 |
+
|
10 |
import IconCopyCode from "../Icons/IconCopyCode.svelte";
|
|
|
11 |
|
12 |
hljs.registerLanguage("javascript", javascript);
|
13 |
hljs.registerLanguage("python", python);
|
|
|
29 |
language?: Language;
|
30 |
}
|
31 |
|
32 |
+
interface MessagesJoiner {
|
33 |
+
sep: string;
|
34 |
+
start: string;
|
35 |
+
end: string;
|
36 |
+
}
|
37 |
+
|
38 |
$: snippetsByLanguage = {
|
39 |
javascript: getJavascriptSnippets(conversation),
|
40 |
python: getPythonSnippets(conversation),
|
|
|
58 |
}
|
59 |
|
60 |
function getJavascriptSnippets(conversation: Conversation) {
|
61 |
+
const formattedMessages = ({ sep, start, end }: MessagesJoiner) =>
|
62 |
start +
|
63 |
getMessages()
|
64 |
.map(({ role, content }) => `{ role: "${role}", content: "${content}" }`)
|
65 |
.join(sep) +
|
66 |
end;
|
67 |
|
68 |
+
const formattedConfig = ({ sep, start, end }: MessagesJoiner) =>
|
69 |
start +
|
70 |
Object.entries(conversation.config)
|
71 |
.map(([key, val]) => `${key}: ${val}`)
|
|
|
124 |
}
|
125 |
|
126 |
function getPythonSnippets(conversation: Conversation) {
|
127 |
+
const formattedMessages = ({ sep, start, end }: MessagesJoiner) =>
|
128 |
start +
|
129 |
getMessages()
|
130 |
.map(({ role, content }) => `{ "role": "${role}", "content": "${content}" }`)
|
131 |
.join(sep) +
|
132 |
end;
|
133 |
|
134 |
+
const formattedConfig = ({ sep, start, end }: MessagesJoiner) =>
|
135 |
start +
|
136 |
Object.entries(conversation.config)
|
137 |
.map(([key, val]) => `${key}: ${val}`)
|
|
|
184 |
}
|
185 |
|
186 |
function getHttpSnippets(conversation: Conversation) {
|
187 |
+
const formattedMessages = ({ sep, start, end }: MessagesJoiner) =>
|
188 |
start +
|
189 |
getMessages()
|
190 |
.map(({ role, content }) => `{ "role": "${role}", "content": "${content}" }`)
|
191 |
.join(sep) +
|
192 |
end;
|
193 |
|
194 |
+
const formattedConfig = ({ sep, start, end }: MessagesJoiner) =>
|
195 |
start +
|
196 |
Object.entries(conversation.config)
|
197 |
.map(([key, val]) => `"${key}": ${val}`)
|
src/lib/components/InferencePlayground/InferencePlaygroundConversation.svelte
CHANGED
@@ -1,13 +1,15 @@
|
|
1 |
<script lang="ts">
|
|
|
|
|
2 |
import { createEventDispatcher } from "svelte";
|
|
|
3 |
import CodeSnippets from "./InferencePlaygroundCodeSnippets.svelte";
|
4 |
import Message from "./InferencePlaygroundMessage.svelte";
|
5 |
import IconPlus from "../Icons/IconPlus.svelte";
|
6 |
-
import type { Conversation } from "$lib/types";
|
7 |
|
8 |
-
export let loading;
|
9 |
export let conversation: Conversation;
|
10 |
-
export let
|
|
|
11 |
|
12 |
const dispatch = createEventDispatcher<{
|
13 |
addMessage: void;
|
|
|
1 |
<script lang="ts">
|
2 |
+
import type { Conversation } from "$lib/types";
|
3 |
+
|
4 |
import { createEventDispatcher } from "svelte";
|
5 |
+
|
6 |
import CodeSnippets from "./InferencePlaygroundCodeSnippets.svelte";
|
7 |
import Message from "./InferencePlaygroundMessage.svelte";
|
8 |
import IconPlus from "../Icons/IconPlus.svelte";
|
|
|
9 |
|
|
|
10 |
export let conversation: Conversation;
|
11 |
+
export let loading: boolean;
|
12 |
+
export let viewCode: boolean;
|
13 |
|
14 |
const dispatch = createEventDispatcher<{
|
15 |
addMessage: void;
|
src/lib/components/InferencePlayground/InferencePlaygroundGenerationConfig.svelte
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
<script lang="ts">
|
2 |
import type { Conversation } from "$lib/types";
|
|
|
3 |
import {
|
4 |
GENERATION_CONFIG_KEYS,
|
5 |
GENERATION_CONFIG_KEYS_ADVANCED,
|
|
|
1 |
<script lang="ts">
|
2 |
import type { Conversation } from "$lib/types";
|
3 |
+
|
4 |
import {
|
5 |
GENERATION_CONFIG_KEYS,
|
6 |
GENERATION_CONFIG_KEYS_ADVANCED,
|
src/lib/components/InferencePlayground/InferencePlaygroundHFTokenModal.svelte
CHANGED
@@ -1,7 +1,8 @@
|
|
1 |
-
<!-- Main modal -->
|
2 |
<script lang="ts">
|
3 |
-
import { createEventDispatcher, onDestroy, onMount } from "svelte";
|
4 |
import { browser } from "$app/environment";
|
|
|
|
|
|
|
5 |
import IconCross from "../Icons/IconCross.svelte";
|
6 |
|
7 |
let backdropEl: HTMLDivElement;
|
@@ -10,8 +11,8 @@
|
|
10 |
const dispatch = createEventDispatcher<{ close: void }>();
|
11 |
|
12 |
function handleKeydown(event: KeyboardEvent) {
|
13 |
-
|
14 |
-
if (
|
15 |
event.preventDefault();
|
16 |
dispatch("close");
|
17 |
}
|
|
|
|
|
1 |
<script lang="ts">
|
|
|
2 |
import { browser } from "$app/environment";
|
3 |
+
|
4 |
+
import { createEventDispatcher, onDestroy, onMount } from "svelte";
|
5 |
+
|
6 |
import IconCross from "../Icons/IconCross.svelte";
|
7 |
|
8 |
let backdropEl: HTMLDivElement;
|
|
|
11 |
const dispatch = createEventDispatcher<{ close: void }>();
|
12 |
|
13 |
function handleKeydown(event: KeyboardEvent) {
|
14 |
+
const { key } = event;
|
15 |
+
if (key === "Escape") {
|
16 |
event.preventDefault();
|
17 |
dispatch("close");
|
18 |
}
|
src/lib/components/InferencePlayground/InferencePlaygroundMessage.svelte
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
<script lang="ts">
|
2 |
-
import { createEventDispatcher } from "svelte";
|
3 |
import { type ChatCompletionInputMessage } from "@huggingface/tasks";
|
|
|
4 |
|
5 |
export let message: ChatCompletionInputMessage;
|
6 |
export let autofocus: boolean = false;
|
|
|
1 |
<script lang="ts">
|
|
|
2 |
import { type ChatCompletionInputMessage } from "@huggingface/tasks";
|
3 |
+
import { createEventDispatcher } from "svelte";
|
4 |
|
5 |
export let message: ChatCompletionInputMessage;
|
6 |
export let autofocus: boolean = false;
|
src/lib/components/InferencePlayground/InferencePlaygroundModelSelector.svelte
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
<script lang="ts">
|
2 |
-
import type { Conversation, ModelEntryWithTokenizer } from "
|
|
|
3 |
import IconCaret from "../Icons/IconCaret.svelte";
|
4 |
import ModelSelectorModal from "./InferencePlaygroundModelSelectorModal.svelte";
|
5 |
|
@@ -20,7 +21,7 @@
|
|
20 |
return avatarUrl;
|
21 |
}
|
22 |
|
23 |
-
function changeModel(modelId:
|
24 |
const model = models.find(m => m.id === modelId);
|
25 |
if (!model) {
|
26 |
return;
|
|
|
1 |
<script lang="ts">
|
2 |
+
import type { Conversation, ModelEntryWithTokenizer } from "./types";
|
3 |
+
|
4 |
import IconCaret from "../Icons/IconCaret.svelte";
|
5 |
import ModelSelectorModal from "./InferencePlaygroundModelSelectorModal.svelte";
|
6 |
|
|
|
21 |
return avatarUrl;
|
22 |
}
|
23 |
|
24 |
+
function changeModel(modelId: ModelEntryWithTokenizer["id"]) {
|
25 |
const model = models.find(m => m.id === modelId);
|
26 |
if (!model) {
|
27 |
return;
|
src/lib/components/InferencePlayground/InferencePlaygroundModelSelectorModal.svelte
CHANGED
@@ -1,6 +1,8 @@
|
|
1 |
<script lang="ts">
|
2 |
-
import type { ModelEntryWithTokenizer } from "
|
|
|
3 |
import { createEventDispatcher } from "svelte";
|
|
|
4 |
import IconSearch from "../Icons/IconSearch.svelte";
|
5 |
import IconStar from "../Icons/IconStar.svelte";
|
6 |
|
@@ -11,8 +13,8 @@
|
|
11 |
const dispatch = createEventDispatcher<{ modelSelected: string; close: void }>();
|
12 |
|
13 |
function handleKeydown(event: KeyboardEvent) {
|
14 |
-
|
15 |
-
if (
|
16 |
event.preventDefault();
|
17 |
dispatch("close");
|
18 |
}
|
|
|
1 |
<script lang="ts">
|
2 |
+
import type { ModelEntryWithTokenizer } from "./types";
|
3 |
+
|
4 |
import { createEventDispatcher } from "svelte";
|
5 |
+
|
6 |
import IconSearch from "../Icons/IconSearch.svelte";
|
7 |
import IconStar from "../Icons/IconStar.svelte";
|
8 |
|
|
|
13 |
const dispatch = createEventDispatcher<{ modelSelected: string; close: void }>();
|
14 |
|
15 |
function handleKeydown(event: KeyboardEvent) {
|
16 |
+
const { key } = event;
|
17 |
+
if (key === "Escape") {
|
18 |
event.preventDefault();
|
19 |
dispatch("close");
|
20 |
}
|
src/lib/components/InferencePlayground/inferencePlaygroundUtils.ts
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
import { type ChatCompletionInputMessage } from "@huggingface/tasks";
|
|
|
|
|
2 |
import { HfInference } from "@huggingface/inference";
|
3 |
-
import type { Conversation, ModelEntryWithTokenizer } from "$lib/types";
|
4 |
|
5 |
export function createHfInference(token: string): HfInference {
|
6 |
return new HfInference(token);
|
|
|
1 |
import { type ChatCompletionInputMessage } from "@huggingface/tasks";
|
2 |
+
import type { Conversation, ModelEntryWithTokenizer } from "./types";
|
3 |
+
|
4 |
import { HfInference } from "@huggingface/inference";
|
|
|
5 |
|
6 |
export function createHfInference(token: string): HfInference {
|
7 |
return new HfInference(token);
|
src/lib/{types/index.d.ts β components/InferencePlayground/types.ts}
RENAMED
@@ -2,7 +2,7 @@ import type { GenerationConfig } from "$lib/components/InferencePlayground/gener
|
|
2 |
import type { ModelEntry } from "@huggingface/hub";
|
3 |
import type { ChatCompletionInputMessage } from "@huggingface/tasks";
|
4 |
|
5 |
-
type Conversation = {
|
6 |
model: ModelEntryWithTokenizer;
|
7 |
config: GenerationConfig;
|
8 |
messages: ChatCompletionInputMessage[];
|
|
|
2 |
import type { ModelEntry } from "@huggingface/hub";
|
3 |
import type { ChatCompletionInputMessage } from "@huggingface/tasks";
|
4 |
|
5 |
+
export type Conversation = {
|
6 |
model: ModelEntryWithTokenizer;
|
7 |
config: GenerationConfig;
|
8 |
messages: ChatCompletionInputMessage[];
|
src/routes/+page.server.ts
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
import type { ModelEntryWithTokenizer } from "$lib/types";
|
2 |
import type { ModelEntry } from "@huggingface/hub";
|
3 |
import type { PageServerLoad } from "./$types";
|
4 |
import { env } from "$env/dynamic/private";
|
@@ -16,7 +16,7 @@ export const load: PageServerLoad = async ({ fetch }) => {
|
|
16 |
compatibleModels.sort((a, b) => a.id.toLowerCase().localeCompare(b.id.toLowerCase()));
|
17 |
|
18 |
const promises = compatibleModels.map(async model => {
|
19 |
-
const configUrl = `https://huggingface.co/${model.
|
20 |
const res = await fetch(configUrl);
|
21 |
if (!res.ok) {
|
22 |
return null; // Ignore failed requests by returning null
|
|
|
1 |
+
import type { ModelEntryWithTokenizer } from "$lib/components/InferencePlayground/types";
|
2 |
import type { ModelEntry } from "@huggingface/hub";
|
3 |
import type { PageServerLoad } from "./$types";
|
4 |
import { env } from "$env/dynamic/private";
|
|
|
16 |
compatibleModels.sort((a, b) => a.id.toLowerCase().localeCompare(b.id.toLowerCase()));
|
17 |
|
18 |
const promises = compatibleModels.map(async model => {
|
19 |
+
const configUrl = `https://huggingface.co/${model.id}/raw/main/tokenizer_config.json`;
|
20 |
const res = await fetch(configUrl);
|
21 |
if (!res.ok) {
|
22 |
return null; // Ignore failed requests by returning null
|