mishig HF staff commited on
Commit
f2e5687
1 Parent(s): 5213b80

Feature: chat with two models simultaneously (#11)

Browse files
src/lib/components/CodeSnippets.svelte CHANGED
@@ -7,8 +7,6 @@
7
  export let maxTokens: number;
8
  export let messages: ChatCompletionInputMessage[];
9
 
10
- $: console.log(messages);
11
-
12
  const npmSnippet = `import { HfInference } from '@huggingface/inference'
13
 
14
  const hf = new HfInference('your access token')`;
 
7
  export let maxTokens: number;
8
  export let messages: ChatCompletionInputMessage[];
9
 
 
 
10
  const npmSnippet = `import { HfInference } from '@huggingface/inference'
11
 
12
  const hf = new HfInference('your access token')`;
src/lib/components/Conversation.svelte CHANGED
@@ -1,18 +1,15 @@
1
  <script lang="ts">
2
  import { createEventDispatcher } from 'svelte';
3
- import PlaygroundCode from '$lib/components/CodeSnippets.svelte';
4
- import PlaygroundMessage from '$lib/components/Message.svelte';
5
 
6
  export let loading;
7
- export let streamingMessage;
8
- export let conversations;
9
  export let conversation;
10
  export let index;
11
- export let currentConversation;
12
  export let viewCode;
13
- export let messages;
14
 
15
- const dispatch = createEventDispatcher<{ addMessage: void; deleteMessage: number }>();
16
 
17
  let messageContainer: HTMLDivElement | null = null;
18
 
@@ -23,7 +20,7 @@
23
  }
24
 
25
  $: {
26
- if (currentConversation.messages.at(-1)) {
27
  scrollToBottom();
28
  }
29
  }
@@ -32,10 +29,10 @@
32
  <div
33
  class="flex max-h-[calc(100dvh-5.8rem)] flex-col overflow-y-auto overflow-x-hidden @container"
34
  class:pointer-events-none={loading}
35
- class:animate-pulse={loading && !streamingMessage}
36
  bind:this={messageContainer}
37
  >
38
- {#if conversations.length > 1}
39
  <div
40
  class="sticky top-0 flex h-11 flex-none items-center gap-2 whitespace-nowrap rounded-lg border border-gray-200/80 bg-white pl-3 pr-2 text-sm leading-none shadow-sm *:flex-none dark:border-gray-800 dark:bg-gray-800/70 dark:hover:bg-gray-800"
41
  class:mr-3={index === 0}
@@ -45,7 +42,7 @@
45
  <div>{conversation.model}</div>
46
  <button
47
  class="ml-auto flex size-6 items-center justify-center rounded bg-gray-50 text-xs hover:bg-gray-100 dark:bg-gray-800 dark:hover:bg-gray-700"
48
- on:click={() => (conversations = conversations.filter((_, i) => i !== index))}
49
  >
50
 
51
  </button>
@@ -65,12 +62,15 @@
65
  </div>
66
  {/if}
67
  {#if !viewCode}
68
- {#each messages as message, i}
69
- <PlaygroundMessage
70
  class="border-b"
71
  {message}
72
- on:delete={() => dispatch('deleteMessage', i)}
73
- autofocus={conversations.length === 1 && !loading && i === messages.length - 1}
 
 
 
74
  />
75
  {/each}
76
 
@@ -94,6 +94,6 @@
94
  </div>
95
  </button>
96
  {:else}
97
- <PlaygroundCode {...currentConversation} {...currentConversation.config} />
98
  {/if}
99
  </div>
 
1
  <script lang="ts">
2
  import { createEventDispatcher } from 'svelte';
3
+ import CodeSnippets from '$lib/components/CodeSnippets.svelte';
4
+ import Message from '$lib/components/Message.svelte';
5
 
6
  export let loading;
 
 
7
  export let conversation;
8
  export let index;
 
9
  export let viewCode;
10
+ export let sideBySide = false;
11
 
12
+ const dispatch = createEventDispatcher<{ addMessage: void; deleteMessage: number, deleteConversation: number }>();
13
 
14
  let messageContainer: HTMLDivElement | null = null;
15
 
 
20
  }
21
 
22
  $: {
23
+ if (conversation.messages.at(-1)) {
24
  scrollToBottom();
25
  }
26
  }
 
29
  <div
30
  class="flex max-h-[calc(100dvh-5.8rem)] flex-col overflow-y-auto overflow-x-hidden @container"
31
  class:pointer-events-none={loading}
32
+ class:animate-pulse={loading && !conversation.config.streaming}
33
  bind:this={messageContainer}
34
  >
35
+ {#if sideBySide}
36
  <div
37
  class="sticky top-0 flex h-11 flex-none items-center gap-2 whitespace-nowrap rounded-lg border border-gray-200/80 bg-white pl-3 pr-2 text-sm leading-none shadow-sm *:flex-none dark:border-gray-800 dark:bg-gray-800/70 dark:hover:bg-gray-800"
38
  class:mr-3={index === 0}
 
42
  <div>{conversation.model}</div>
43
  <button
44
  class="ml-auto flex size-6 items-center justify-center rounded bg-gray-50 text-xs hover:bg-gray-100 dark:bg-gray-800 dark:hover:bg-gray-700"
45
+ on:click={() => dispatch('deleteConversation', index)}
46
  >
47
 
48
  </button>
 
62
  </div>
63
  {/if}
64
  {#if !viewCode}
65
+ {#each conversation.messages as message, messageIdx}
66
+ <Message
67
  class="border-b"
68
  {message}
69
+ conversationIdx={index}
70
+ {messageIdx}
71
+ on:messageValueChanged
72
+ on:delete={() => dispatch('deleteMessage', messageIdx)}
73
+ autofocus={!sideBySide && !loading && messageIdx === conversation.messages.length - 1}
74
  />
75
  {/each}
76
 
 
94
  </div>
95
  </button>
96
  {:else}
97
+ <CodeSnippets {...conversation} {...conversation.config} />
98
  {/if}
99
  </div>
src/lib/components/GenerationConfig.svelte CHANGED
@@ -1,8 +1,10 @@
1
  <script lang="ts">
2
- export let temperature = 0.5;
3
- export let maxTokens = 2048;
4
- export let streaming = true;
5
- export let jsonMode = true;
 
 
6
  </script>
7
 
8
  <div>
@@ -14,7 +16,7 @@
14
  <input
15
  type="number"
16
  class="w-16 rounded border bg-transparent px-1 py-0.5 text-right text-sm dark:border-gray-700"
17
- bind:value={temperature}
18
  min="0"
19
  max="1"
20
  step="0.1"
@@ -23,7 +25,7 @@
23
  <input
24
  id="temperature-range"
25
  type="range"
26
- bind:value={temperature}
27
  min="0"
28
  max="1"
29
  step="0.1"
@@ -39,7 +41,7 @@
39
  <input
40
  type="number"
41
  class="w-20 rounded border bg-transparent px-1 py-0.5 text-right text-sm dark:border-gray-700"
42
- bind:value={maxTokens}
43
  min="0"
44
  max="4096"
45
  step="512"
@@ -48,7 +50,7 @@
48
  <input
49
  id="max-tokens-range"
50
  type="range"
51
- bind:value={maxTokens}
52
  min="0"
53
  max="4096"
54
  step="512"
@@ -57,7 +59,7 @@
57
  </div>
58
  <div class="mt-2">
59
  <label class="flex cursor-pointer items-center justify-between">
60
- <input type="checkbox" bind:checked={streaming} class="peer sr-only" />
61
  <span class="text-sm font-medium text-gray-900 dark:text-gray-300">Streaming</span>
62
  <div
63
  class="peer relative h-5 w-9 rounded-full bg-gray-200 after:absolute after:start-[2px] after:top-[2px] after:h-4 after:w-4 after:rounded-full after:border after:border-gray-300 after:bg-white after:transition-all after:content-[''] peer-checked:bg-black peer-checked:after:translate-x-full peer-checked:after:border-white peer-focus:outline-none dark:border-gray-600 dark:bg-gray-700 dark:peer-checked:bg-blue-600"
@@ -66,7 +68,7 @@
66
  </div>
67
  <div class="mt-2">
68
  <label class="flex cursor-pointer items-center justify-between">
69
- <input type="checkbox" value="" class="peer sr-only" disabled bind:checked={jsonMode} />
70
  <span class="text-sm font-medium text-gray-900 dark:text-gray-300">JSON Mode</span>
71
  <div
72
  class="peer relative h-5 w-9 rounded-full bg-gray-200 after:absolute after:start-[2px] after:top-[2px] after:h-4 after:w-4 after:rounded-full after:border after:border-gray-300 after:bg-white after:transition-all after:content-[''] peer-checked:bg-black peer-checked:after:translate-x-full peer-checked:after:border-white peer-focus:outline-none dark:border-gray-600 dark:bg-gray-700 dark:peer-checked:bg-blue-600"
 
1
  <script lang="ts">
2
+ // export let temperature = 0.5;
3
+ // export let maxTokens = 2048;
4
+ // export let streaming = true;
5
+ // export let jsonMode = true;
6
+
7
+ export let config;
8
  </script>
9
 
10
  <div>
 
16
  <input
17
  type="number"
18
  class="w-16 rounded border bg-transparent px-1 py-0.5 text-right text-sm dark:border-gray-700"
19
+ bind:value={config.temperature}
20
  min="0"
21
  max="1"
22
  step="0.1"
 
25
  <input
26
  id="temperature-range"
27
  type="range"
28
+ bind:value={config.temperature}
29
  min="0"
30
  max="1"
31
  step="0.1"
 
41
  <input
42
  type="number"
43
  class="w-20 rounded border bg-transparent px-1 py-0.5 text-right text-sm dark:border-gray-700"
44
+ bind:value={config.maxTokens}
45
  min="0"
46
  max="4096"
47
  step="512"
 
50
  <input
51
  id="max-tokens-range"
52
  type="range"
53
+ bind:value={config.maxTokens}
54
  min="0"
55
  max="4096"
56
  step="512"
 
59
  </div>
60
  <div class="mt-2">
61
  <label class="flex cursor-pointer items-center justify-between">
62
+ <input type="checkbox" bind:checked={config.streaming} class="peer sr-only" />
63
  <span class="text-sm font-medium text-gray-900 dark:text-gray-300">Streaming</span>
64
  <div
65
  class="peer relative h-5 w-9 rounded-full bg-gray-200 after:absolute after:start-[2px] after:top-[2px] after:h-4 after:w-4 after:rounded-full after:border after:border-gray-300 after:bg-white after:transition-all after:content-[''] peer-checked:bg-black peer-checked:after:translate-x-full peer-checked:after:border-white peer-focus:outline-none dark:border-gray-600 dark:bg-gray-700 dark:peer-checked:bg-blue-600"
 
68
  </div>
69
  <div class="mt-2">
70
  <label class="flex cursor-pointer items-center justify-between">
71
+ <input type="checkbox" value="" class="peer sr-only" disabled bind:checked={config.jsonMode} />
72
  <span class="text-sm font-medium text-gray-900 dark:text-gray-300">JSON Mode</span>
73
  <div
74
  class="peer relative h-5 w-9 rounded-full bg-gray-200 after:absolute after:start-[2px] after:top-[2px] after:h-4 after:w-4 after:rounded-full after:border after:border-gray-300 after:bg-white after:transition-all after:content-[''] peer-checked:bg-black peer-checked:after:translate-x-full peer-checked:after:border-white peer-focus:outline-none dark:border-gray-600 dark:bg-gray-700 dark:peer-checked:bg-blue-600"
src/lib/components/Message.svelte CHANGED
@@ -3,9 +3,11 @@
3
  import { type ChatCompletionInputMessage } from '@huggingface/tasks';
4
 
5
  export let message: ChatCompletionInputMessage;
 
 
6
  export let autofocus: boolean = false;
7
 
8
- const dispatch = createEventDispatcher();
9
  </script>
10
 
11
  <div
@@ -16,7 +18,8 @@
16
  </div>
17
  <textarea
18
  {autofocus}
19
- bind:value={message.content}
 
20
  placeholder="Enter {message.role} message"
21
  class="resize-none rounded bg-transparent px-2 py-2.5 ring-gray-100 [field-sizing:content] hover:resize-y hover:bg-white focus:resize-y focus:bg-white focus:ring group-hover/message:ring @2xl:px-3 dark:ring-gray-600 dark:hover:bg-gray-900 dark:focus:bg-gray-900"
22
  rows="1"
 
3
  import { type ChatCompletionInputMessage } from '@huggingface/tasks';
4
 
5
  export let message: ChatCompletionInputMessage;
6
+ export let conversationIdx: number;
7
+ export let messageIdx: number;
8
  export let autofocus: boolean = false;
9
 
10
+ const dispatch = createEventDispatcher<{ delete: void; messageValueChanged: {conversationIdx: number, messageIdx: number, value: string} }>();
11
  </script>
12
 
13
  <div
 
18
  </div>
19
  <textarea
20
  {autofocus}
21
+ value={message.content}
22
+ on:input={(e) => dispatch("messageValueChanged", {conversationIdx, messageIdx, value: e.target.value})}
23
  placeholder="Enter {message.role} message"
24
  class="resize-none rounded bg-transparent px-2 py-2.5 ring-gray-100 [field-sizing:content] hover:resize-y hover:bg-white focus:resize-y focus:bg-white focus:ring group-hover/message:ring @2xl:px-3 dark:ring-gray-600 dark:hover:bg-gray-900 dark:focus:bg-gray-900"
25
  rows="1"
src/routes/+page.svelte CHANGED
@@ -1,5 +1,4 @@
1
  <script lang="ts">
2
- import PlaygroundCode from '$lib/components/CodeSnippets.svelte';
3
  import {
4
  createHfInference,
5
  prepareRequestMessages,
@@ -10,7 +9,7 @@
10
  import PlaygroundTokenModal from '$lib/components/HFTokenModal.svelte';
11
  import PlaygroundModelSelector from '$lib/components/ModelSelector.svelte';
12
  import Conversation from '$lib/components/Conversation.svelte';
13
- import { onDestroy, onMount } from 'svelte';
14
  import { type ModelEntry } from '@huggingface/hub';
15
  import { type ChatCompletionInputMessage } from '@huggingface/tasks';
16
 
@@ -31,18 +30,14 @@
31
  viewCode = false;
32
  }
33
 
34
- let currentConversation = conversations[0];
35
  let systemMessage: ChatCompletionInputMessage = { role: 'system', content: '' };
36
- $: messages = currentConversation.messages;
37
-
38
  let hfToken: string | null = import.meta.env.VITE_HF_TOKEN;
39
  let viewCode = false;
40
  let showTokenModal = false;
41
  let loading = false;
42
- let streamingMessage: ChatCompletionInputMessage | null = null;
43
  let tokens = 0;
44
  let latency = 0;
45
- let abortController: AbortController | null = null;
46
  let waitForNonStreaming = true;
47
 
48
  onMount(() => {
@@ -54,22 +49,38 @@
54
  compatibleModels = (await res.json()) as ModelEntry[];
55
  compatibleModels.sort((a, b) => a.id.toLowerCase().localeCompare(b.id.toLowerCase()));
56
  })();
57
- });
58
 
59
- onDestroy(() => {
60
- if (abortController) {
61
- abortController.abort();
 
62
  }
63
  });
64
 
65
  function addMessage() {
66
- currentConversation.messages = [
67
- ...currentConversation.messages,
68
- {
69
- role: currentConversation.messages.at(-1)?.role === 'user' ? 'assistant' : 'user',
70
- content: ''
71
- }
72
- ];
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
73
  conversations = conversations;
74
  }
75
 
@@ -81,103 +92,117 @@
81
  }
82
 
83
  function deleteMessage(idx: number) {
84
- const deletedMsg = deleteAndGetItem<ChatCompletionInputMessage>(
85
- currentConversation.messages,
86
- idx
87
- );
88
- // delete messages in user/assistant pairs. otherwise, the chat template will be broken
89
- if (deletedMsg) {
90
- const { role } = deletedMsg;
91
- const pairIdx = role === 'user' ? idx : idx - 1;
92
- deleteAndGetItem<ChatCompletionInputMessage>(currentConversation.messages, pairIdx);
93
- }
 
 
 
 
 
 
 
94
  conversations = conversations;
95
  }
96
 
97
  function reset() {
98
- currentConversation.messages = [...startMessages];
99
  systemMessage.content = '';
100
- conversations = conversations;
 
 
 
101
  }
102
 
103
  function abort() {
104
- if (streamingMessage && abortController) {
105
- abortController.abort();
106
- abortController = null;
 
 
107
  }
108
  loading = false;
109
- streamingMessage = null;
110
  waitForNonStreaming = false;
111
  }
112
 
113
- async function submit() {
114
- // last message has to be from user
115
- if (currentConversation.messages?.at(-1)?.role !== 'user') {
116
- addMessage();
117
- return;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
118
  }
 
 
 
 
 
 
 
 
 
 
 
119
  if (!hfToken) {
120
  showTokenModal = true;
121
  return;
122
  }
123
  (document.activeElement as HTMLElement).blur();
124
  loading = true;
125
- const startTime = performance.now();
126
-
127
- try {
128
- const hf = createHfInference(hfToken);
129
- const requestMessages = prepareRequestMessages(systemMessage, messages);
130
-
131
- if (currentConversation.config.streaming) {
132
- streamingMessage = { role: 'assistant', content: '' };
133
- currentConversation.messages = [...currentConversation.messages, streamingMessage];
134
- abortController = new AbortController();
135
-
136
- await handleStreamingResponse(
137
- hf,
138
- currentConversation.model,
139
- requestMessages,
140
- currentConversation.config.temperature,
141
- currentConversation.config.maxTokens,
142
- currentConversation.config.jsonMode,
143
- (content) => {
144
- if (streamingMessage) {
145
- streamingMessage.content = content;
146
- currentConversation.messages = [...currentConversation.messages];
147
- conversations = conversations;
148
- }
149
- },
150
- abortController
151
- );
152
- } else {
153
- streamingMessage = null;
154
- waitForNonStreaming = true;
155
- const newMessage = await handleNonStreamingResponse(
156
- hf,
157
- currentConversation.model,
158
- requestMessages,
159
- currentConversation.config.temperature,
160
- currentConversation.config.maxTokens,
161
- currentConversation.config.jsonMode
162
- );
163
- // check if the user did not abort the request
164
- if (waitForNonStreaming) {
165
- currentConversation.messages = [...currentConversation.messages, newMessage];
166
- conversations = conversations;
167
- }
168
- }
169
 
 
 
 
170
  addMessage();
171
- } catch (error) {
172
  if (error.name !== 'AbortError') {
173
  alert('error: ' + (error as Error).message);
174
  }
175
  } finally {
176
- const endTime = performance.now();
177
- latency = Math.round(endTime - startTime);
178
  loading = false;
179
- streamingMessage = null;
180
- abortController = null;
181
  }
182
  }
183
 
@@ -235,15 +260,17 @@
235
  {#each conversations as conversation, index}
236
  <Conversation
237
  {loading}
238
- {streamingMessage}
239
- {conversations}
240
  {conversation}
241
  {index}
242
- {currentConversation}
243
  {viewCode}
244
- {messages}
245
  on:addMessage={addMessage}
 
 
 
 
246
  on:deleteMessage={(e) => deleteMessage(e.detail)}
 
247
  />
248
  {/each}
249
  </div>
@@ -348,7 +375,7 @@
348
  <div
349
  class="flex flex-1 flex-col gap-6 overflow-y-hidden rounded-xl border border-gray-200/80 bg-gradient-to-b from-white via-white p-3 shadow-sm dark:border-white/5 dark:from-gray-800/40 dark:via-gray-800/40"
350
  >
351
- <PlaygroundModelSelector {compatibleModels} bind:currentModel={currentConversation.model} />
352
  <div
353
  class="group relative -mt-4 flex h-[26px] w-full items-center justify-center gap-2 rounded-lg bg-black px-5 text-sm text-white hover:bg-gray-900 focus:outline-none focus:ring-4 focus:ring-gray-300 dark:border-gray-700 dark:bg-blue-600 dark:hover:bg-blue-700 dark:focus:ring-gray-700"
354
  >
@@ -377,7 +404,7 @@
377
  id: String(Math.random()),
378
  model: e.target.value,
379
  config: { temperature: 0.5, maxTokens: 2048, streaming: true, jsonMode: false },
380
- messages: startMessages
381
  }
382
  ];
383
  }}
@@ -388,12 +415,7 @@
388
  </select>
389
  </div>
390
 
391
- <PlaygroundOptions
392
- bind:temperature={currentConversation.config.temperature}
393
- bind:maxTokens={currentConversation.config.maxTokens}
394
- bind:jsonMode={currentConversation.config.jsonMode}
395
- bind:streaming={currentConversation.config.streaming}
396
- />
397
  <div class="mt-auto">
398
  <div class="mb-3 flex items-center justify-between gap-2">
399
  <label
 
1
  <script lang="ts">
 
2
  import {
3
  createHfInference,
4
  prepareRequestMessages,
 
9
  import PlaygroundTokenModal from '$lib/components/HFTokenModal.svelte';
10
  import PlaygroundModelSelector from '$lib/components/ModelSelector.svelte';
11
  import Conversation from '$lib/components/Conversation.svelte';
12
+ import { onMount } from 'svelte';
13
  import { type ModelEntry } from '@huggingface/hub';
14
  import { type ChatCompletionInputMessage } from '@huggingface/tasks';
15
 
 
30
  viewCode = false;
31
  }
32
 
 
33
  let systemMessage: ChatCompletionInputMessage = { role: 'system', content: '' };
 
 
34
  let hfToken: string | null = import.meta.env.VITE_HF_TOKEN;
35
  let viewCode = false;
36
  let showTokenModal = false;
37
  let loading = false;
 
38
  let tokens = 0;
39
  let latency = 0;
40
+ let abortControllers: AbortController[] = []
41
  let waitForNonStreaming = true;
42
 
43
  onMount(() => {
 
49
  compatibleModels = (await res.json()) as ModelEntry[];
50
  compatibleModels.sort((a, b) => a.id.toLowerCase().localeCompare(b.id.toLowerCase()));
51
  })();
 
52
 
53
+ return () => {
54
+ for(const abortController of abortControllers){
55
+ abortController.abort();
56
+ }
57
  }
58
  });
59
 
60
  function addMessage() {
61
+ conversations = conversations.map(conversation => {
62
+ conversation.messages = [
63
+ ...conversation.messages,
64
+ {
65
+ role: conversation.messages.at(-1)?.role === 'user' ? 'assistant' : 'user',
66
+ content: ''
67
+ }
68
+ ];
69
+ return conversation;
70
+ });
71
+ }
72
+
73
+ function updateMessage(value: string, conversationIdx: number, messageIdx: number) {
74
+ const lastMsgIdx = conversations[0].messages.length - 1;
75
+ const msg = conversations[conversationIdx].messages[messageIdx];
76
+ msg.content = value;
77
+ const { role } = msg;
78
+ if(messageIdx === lastMsgIdx && role === "user"){
79
+ conversations = conversations.map(conversation => {
80
+ conversation.messages[messageIdx].content = value;
81
+ return conversation;
82
+ });
83
+ }
84
  conversations = conversations;
85
  }
86
 
 
92
  }
93
 
94
  function deleteMessage(idx: number) {
95
+ conversations = conversations.map(conversation => {
96
+ const deletedMsg = deleteAndGetItem<ChatCompletionInputMessage>(
97
+ conversation.messages,
98
+ idx
99
+ );
100
+ // delete messages in user/assistant pairs. otherwise, the chat template will be broken
101
+ if (deletedMsg) {
102
+ const { role } = deletedMsg;
103
+ const pairIdx = role === 'user' ? idx : idx - 1;
104
+ deleteAndGetItem<ChatCompletionInputMessage>(conversation.messages, pairIdx);
105
+ }
106
+ return conversation;
107
+ });
108
+ }
109
+
110
+ function deleteConversation(idx: number) {
111
+ deleteAndGetItem(conversations, idx);
112
  conversations = conversations;
113
  }
114
 
115
  function reset() {
 
116
  systemMessage.content = '';
117
+ conversations = conversations.map(conversation => {
118
+ conversation.messages = [...startMessages];
119
+ return conversation;
120
+ });
121
  }
122
 
123
  function abort() {
124
+ if (abortControllers.length) {
125
+ for(const abortController of abortControllers){
126
+ abortController.abort();
127
+ }
128
+ abortControllers = [];
129
  }
130
  loading = false;
 
131
  waitForNonStreaming = false;
132
  }
133
 
134
+ async function runInference(conversation: Conversation){
135
+ const startTime = performance.now();
136
+ const hf = createHfInference(hfToken);
137
+ const requestMessages = prepareRequestMessages(systemMessage, conversation.messages);
138
+
139
+ if (conversation.config.streaming) {
140
+ const streamingMessage = { role: 'assistant', content: '' };
141
+ conversation.messages = [...conversation.messages, streamingMessage];
142
+ const abortController = new AbortController();
143
+ abortControllers.push(abortController)
144
+
145
+ await handleStreamingResponse(
146
+ hf,
147
+ conversation.model,
148
+ requestMessages,
149
+ conversation.config.temperature,
150
+ conversation.config.maxTokens,
151
+ conversation.config.jsonMode,
152
+ (content) => {
153
+ if (streamingMessage) {
154
+ streamingMessage.content = content;
155
+ conversation.messages = [...conversation.messages];
156
+ conversations = conversations;
157
+ }
158
+ },
159
+ abortController
160
+ );
161
+ } else {
162
+ waitForNonStreaming = true;
163
+ const newMessage = await handleNonStreamingResponse(
164
+ hf,
165
+ conversation.model,
166
+ requestMessages,
167
+ conversation.config.temperature,
168
+ conversation.config.maxTokens,
169
+ conversation.config.jsonMode
170
+ );
171
+ // check if the user did not abort the request
172
+ if (waitForNonStreaming) {
173
+ conversation.messages = [...conversation.messages, newMessage];
174
+ conversations = conversations;
175
+ }
176
  }
177
+
178
+ const endTime = performance.now();
179
+ latency = Math.round(endTime - startTime);
180
+ }
181
+
182
+ async function submit() {
183
+ // // last message has to be from user
184
+ // if (currentConversation.messages?.at(-1)?.role !== 'user') {
185
+ // addMessage();
186
+ // return;
187
+ // }
188
  if (!hfToken) {
189
  showTokenModal = true;
190
  return;
191
  }
192
  (document.activeElement as HTMLElement).blur();
193
  loading = true;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
194
 
195
+ try{
196
+ const promises = conversations.map(conversation => runInference(conversation));
197
+ await Promise.all(promises);
198
  addMessage();
199
+ }catch (error){
200
  if (error.name !== 'AbortError') {
201
  alert('error: ' + (error as Error).message);
202
  }
203
  } finally {
 
 
204
  loading = false;
205
+ abortControllers = [];
 
206
  }
207
  }
208
 
 
260
  {#each conversations as conversation, index}
261
  <Conversation
262
  {loading}
 
 
263
  {conversation}
264
  {index}
 
265
  {viewCode}
266
+ sideBySide={conversations.length > 1}
267
  on:addMessage={addMessage}
268
+ on:messageValueChanged={(e) => {
269
+ const {conversationIdx, messageIdx, value} = e.detail;
270
+ updateMessage(value, conversationIdx, messageIdx);
271
+ }}
272
  on:deleteMessage={(e) => deleteMessage(e.detail)}
273
+ on:deleteConversation={(e) => deleteConversation(e.detail)}
274
  />
275
  {/each}
276
  </div>
 
375
  <div
376
  class="flex flex-1 flex-col gap-6 overflow-y-hidden rounded-xl border border-gray-200/80 bg-gradient-to-b from-white via-white p-3 shadow-sm dark:border-white/5 dark:from-gray-800/40 dark:via-gray-800/40"
377
  >
378
+ <PlaygroundModelSelector {compatibleModels} bind:currentModel={conversations[0].model} />
379
  <div
380
  class="group relative -mt-4 flex h-[26px] w-full items-center justify-center gap-2 rounded-lg bg-black px-5 text-sm text-white hover:bg-gray-900 focus:outline-none focus:ring-4 focus:ring-gray-300 dark:border-gray-700 dark:bg-blue-600 dark:hover:bg-blue-700 dark:focus:ring-gray-700"
381
  >
 
404
  id: String(Math.random()),
405
  model: e.target.value,
406
  config: { temperature: 0.5, maxTokens: 2048, streaming: true, jsonMode: false },
407
+ messages: conversations[0].messages
408
  }
409
  ];
410
  }}
 
415
  </select>
416
  </div>
417
 
418
+ <PlaygroundOptions bind:config={conversations[0].config} />
 
 
 
 
 
419
  <div class="mt-auto">
420
  <div class="mb-3 flex items-center justify-between gap-2">
421
  <label