Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
wip
Browse files
src/lib/components/InferencePlayground/InferencePlayground.svelte
CHANGED
|
@@ -8,7 +8,6 @@
|
|
| 8 |
import GenerationConfig from './InferencePlaygroundGenerationConfig.svelte';
|
| 9 |
import HFTokenModal from './InferencePlaygroundHFTokenModal.svelte';
|
| 10 |
import ModelSelector from './InferencePlaygroundModelSelector.svelte';
|
| 11 |
-
import ModelPickerModal from './InferencePlaygroundModelPickerModal.svelte';
|
| 12 |
import Conversation from './InferencePlaygroundConversation.svelte';
|
| 13 |
import { onDestroy } from 'svelte';
|
| 14 |
import { type ChatCompletionInputMessage } from '@huggingface/tasks';
|
|
@@ -33,7 +32,6 @@
|
|
| 33 |
let hfToken: string | undefined = import.meta.env.VITE_HF_TOKEN;
|
| 34 |
let viewCode = false;
|
| 35 |
let showTokenModal = false;
|
| 36 |
-
let showModelPickerModal = false;
|
| 37 |
let loading = false;
|
| 38 |
let latency = 0;
|
| 39 |
let abortController: AbortController | undefined = undefined;
|
|
@@ -134,14 +132,6 @@
|
|
| 134 |
submit();
|
| 135 |
}
|
| 136 |
}
|
| 137 |
-
|
| 138 |
-
function changeModel(modelId: string) {
|
| 139 |
-
const model = models.find((m) => m.id === modelId);
|
| 140 |
-
if (!model) {
|
| 141 |
-
return;
|
| 142 |
-
}
|
| 143 |
-
conversation.model = model;
|
| 144 |
-
}
|
| 145 |
</script>
|
| 146 |
|
| 147 |
{#if showTokenModal}
|
|
@@ -156,14 +146,6 @@
|
|
| 156 |
/>
|
| 157 |
{/if}
|
| 158 |
|
| 159 |
-
{#if showModelPickerModal}
|
| 160 |
-
<ModelPickerModal
|
| 161 |
-
{models}
|
| 162 |
-
on:modelSelected={(e) => changeModel(e.detail)}
|
| 163 |
-
on:close={(e) => (showModelPickerModal = false)}
|
| 164 |
-
/>
|
| 165 |
-
{/if}
|
| 166 |
-
|
| 167 |
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
| 168 |
<div
|
| 169 |
class="w-dvh grid divide-gray-200 overflow-hidden bg-gray-100/50 max-md:divide-y md:h-dvh md:grid-cols-[clamp(220px,20%,350px),minmax(0,1fr),clamp(270px,25%,300px)] dark:divide-gray-800 dark:bg-gray-900 dark:text-gray-300 dark:[color-scheme:dark]"
|
|
@@ -270,7 +252,7 @@
|
|
| 270 |
<div
|
| 271 |
class="flex flex-1 flex-col gap-6 overflow-y-hidden rounded-xl border border-gray-200/80 bg-gradient-to-b from-white via-white p-3 shadow-sm dark:border-white/5 dark:from-gray-800/40 dark:via-gray-800/40"
|
| 272 |
>
|
| 273 |
-
<ModelSelector {models} bind:conversation
|
| 274 |
|
| 275 |
<GenerationConfig bind:conversation />
|
| 276 |
<div class="mt-auto">
|
|
|
|
| 8 |
import GenerationConfig from './InferencePlaygroundGenerationConfig.svelte';
|
| 9 |
import HFTokenModal from './InferencePlaygroundHFTokenModal.svelte';
|
| 10 |
import ModelSelector from './InferencePlaygroundModelSelector.svelte';
|
|
|
|
| 11 |
import Conversation from './InferencePlaygroundConversation.svelte';
|
| 12 |
import { onDestroy } from 'svelte';
|
| 13 |
import { type ChatCompletionInputMessage } from '@huggingface/tasks';
|
|
|
|
| 32 |
let hfToken: string | undefined = import.meta.env.VITE_HF_TOKEN;
|
| 33 |
let viewCode = false;
|
| 34 |
let showTokenModal = false;
|
|
|
|
| 35 |
let loading = false;
|
| 36 |
let latency = 0;
|
| 37 |
let abortController: AbortController | undefined = undefined;
|
|
|
|
| 132 |
submit();
|
| 133 |
}
|
| 134 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 135 |
</script>
|
| 136 |
|
| 137 |
{#if showTokenModal}
|
|
|
|
| 146 |
/>
|
| 147 |
{/if}
|
| 148 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 149 |
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
| 150 |
<div
|
| 151 |
class="w-dvh grid divide-gray-200 overflow-hidden bg-gray-100/50 max-md:divide-y md:h-dvh md:grid-cols-[clamp(220px,20%,350px),minmax(0,1fr),clamp(270px,25%,300px)] dark:divide-gray-800 dark:bg-gray-900 dark:text-gray-300 dark:[color-scheme:dark]"
|
|
|
|
| 252 |
<div
|
| 253 |
class="flex flex-1 flex-col gap-6 overflow-y-hidden rounded-xl border border-gray-200/80 bg-gradient-to-b from-white via-white p-3 shadow-sm dark:border-white/5 dark:from-gray-800/40 dark:via-gray-800/40"
|
| 254 |
>
|
| 255 |
+
<ModelSelector {models} bind:conversation />
|
| 256 |
|
| 257 |
<GenerationConfig bind:conversation />
|
| 258 |
<div class="mt-auto">
|
src/lib/components/InferencePlayground/InferencePlaygroundModelSelector.svelte
CHANGED
|
@@ -1,10 +1,13 @@
|
|
| 1 |
<script lang="ts">
|
| 2 |
import type { Conversation, ModelEntryWithTokenizer } from '$lib/types';
|
| 3 |
import IconCaret from '../Icons/IconCaret.svelte';
|
|
|
|
| 4 |
|
| 5 |
export let models: ModelEntryWithTokenizer[] = [];
|
| 6 |
export let conversation: Conversation;
|
| 7 |
|
|
|
|
|
|
|
| 8 |
async function getAvatarUrl(orgName: string) {
|
| 9 |
const url = `https://huggingface.co/api/organizations/${orgName}/avatar`;
|
| 10 |
const res = await fetch(url);
|
|
@@ -17,9 +20,25 @@
|
|
| 17 |
return avatarUrl;
|
| 18 |
}
|
| 19 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
$: [nameSpace, modelName] = conversation.model.id.split('/');
|
| 21 |
</script>
|
| 22 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 23 |
<div class="flex flex-col gap-2">
|
| 24 |
<label
|
| 25 |
for="countries"
|
|
@@ -29,7 +48,7 @@
|
|
| 29 |
|
| 30 |
<button
|
| 31 |
class="flex items-center justify-between gap-6 overflow-hidden whitespace-nowrap rounded-lg border bg-gray-100/80 px-3 py-1.5 leading-tight shadow dark:bg-gray-700"
|
| 32 |
-
on:click
|
| 33 |
>
|
| 34 |
<div class="flex flex-col items-start">
|
| 35 |
<div class="flex items-center gap-1 text-sm text-gray-500 dark:text-gray-300">
|
|
|
|
| 1 |
<script lang="ts">
|
| 2 |
import type { Conversation, ModelEntryWithTokenizer } from '$lib/types';
|
| 3 |
import IconCaret from '../Icons/IconCaret.svelte';
|
| 4 |
+
import ModelPickerModal from './InferencePlaygroundModelPickerModal.svelte';
|
| 5 |
|
| 6 |
export let models: ModelEntryWithTokenizer[] = [];
|
| 7 |
export let conversation: Conversation;
|
| 8 |
|
| 9 |
+
let showModelPickerModal = false;
|
| 10 |
+
|
| 11 |
async function getAvatarUrl(orgName: string) {
|
| 12 |
const url = `https://huggingface.co/api/organizations/${orgName}/avatar`;
|
| 13 |
const res = await fetch(url);
|
|
|
|
| 20 |
return avatarUrl;
|
| 21 |
}
|
| 22 |
|
| 23 |
+
function changeModel(modelId: string) {
|
| 24 |
+
const model = models.find((m) => m.id === modelId);
|
| 25 |
+
if (!model) {
|
| 26 |
+
return;
|
| 27 |
+
}
|
| 28 |
+
conversation.model = model;
|
| 29 |
+
}
|
| 30 |
+
|
| 31 |
$: [nameSpace, modelName] = conversation.model.id.split('/');
|
| 32 |
</script>
|
| 33 |
|
| 34 |
+
{#if showModelPickerModal}
|
| 35 |
+
<ModelPickerModal
|
| 36 |
+
{models}
|
| 37 |
+
on:modelSelected={(e) => changeModel(e.detail)}
|
| 38 |
+
on:close={(e) => (showModelPickerModal = false)}
|
| 39 |
+
/>
|
| 40 |
+
{/if}
|
| 41 |
+
|
| 42 |
<div class="flex flex-col gap-2">
|
| 43 |
<label
|
| 44 |
for="countries"
|
|
|
|
| 48 |
|
| 49 |
<button
|
| 50 |
class="flex items-center justify-between gap-6 overflow-hidden whitespace-nowrap rounded-lg border bg-gray-100/80 px-3 py-1.5 leading-tight shadow dark:bg-gray-700"
|
| 51 |
+
on:click={() => (showModelPickerModal = true)}
|
| 52 |
>
|
| 53 |
<div class="flex flex-col items-start">
|
| 54 |
<div class="flex items-center gap-1 text-sm text-gray-500 dark:text-gray-300">
|