victor HF staff nsarrazin HF staff commited on
Commit
25c844d
1 Parent(s): bfec739

Models page (#849)

Browse files

* add models page

* add logo

* models discussion

* add default hint

* Update +page.svelte

* mishig review

* fix typecheck

---------

Co-authored-by: Nathan Sarrazin <sarrazin.nathan@gmail.com>

.env.template CHANGED
@@ -4,6 +4,7 @@ MODELS=`[
4
  {
5
  "name" : "mistralai/Mixtral-8x7B-Instruct-v0.1",
6
  "description" : "The latest MoE model from Mistral AI! 8x7B and outperforms Llama 2 70B in most benchmarks.",
 
7
  "websiteUrl" : "https://mistral.ai/news/mixtral-of-experts/",
8
  "modelUrl": "https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1",
9
  "preprompt" : "",
@@ -33,6 +34,7 @@ MODELS=`[
33
  {
34
  "name": "meta-llama/Llama-2-70b-chat-hf",
35
  "description": "The latest and biggest model from Meta, fine-tuned for chat.",
 
36
  "websiteUrl": "https://ai.meta.com/llama/",
37
  "modelUrl": "https://huggingface.co/meta-llama/Llama-2-70b-chat-hf",
38
  "preprompt": " ",
@@ -62,6 +64,7 @@ MODELS=`[
62
  {
63
  "name" : "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
64
  "description" : "Nous Hermes 2 Mixtral 8x7B DPO is the new flagship Nous Research model trained over the Mixtral 8x7B MoE LLM.",
 
65
  "websiteUrl" : "https://nousresearch.com/",
66
  "modelUrl": "https://huggingface.co/NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
67
  "chatPromptTemplate" : "{{#if @root.preprompt}}<|im_start|>system\n{{@root.preprompt}}<|im_end|>\n{{/if}}{{#each messages}}{{#ifUser}}<|im_start|>user\n{{content}}<|im_end|>\n<|im_start|>assistant\n{{/ifUser}}{{#ifAssistant}}{{content}}<|im_end|>\n{{/ifAssistant}}{{/each}}",
@@ -91,6 +94,7 @@ MODELS=`[
91
  "name": "codellama/CodeLlama-70b-Instruct-hf",
92
  "displayName": "codellama/CodeLlama-70b-Instruct-hf",
93
  "description": "Code Llama, a state of the art code model from Meta. Now in 70B!",
 
94
  "websiteUrl": "https://ai.meta.com/blog/code-llama-large-language-model-coding/",
95
  "modelUrl": "https://huggingface.co/codellama/CodeLlama-70b-Instruct-hf",
96
  "preprompt": "",
@@ -121,6 +125,7 @@ MODELS=`[
121
  "name": "mistralai/Mistral-7B-Instruct-v0.1",
122
  "displayName": "mistralai/Mistral-7B-Instruct-v0.1",
123
  "description": "Mistral 7B is a new Apache 2.0 model, released by Mistral AI that outperforms Llama2 13B in benchmarks.",
 
124
  "websiteUrl": "https://mistral.ai/news/announcing-mistral-7b/",
125
  "modelUrl": "https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.1",
126
  "preprompt": "",
@@ -152,6 +157,7 @@ MODELS=`[
152
  "name": "mistralai/Mistral-7B-Instruct-v0.2",
153
  "displayName": "mistralai/Mistral-7B-Instruct-v0.2",
154
  "description": "Mistral 7B is a new Apache 2.0 model, released by Mistral AI that outperforms Llama2 13B in benchmarks.",
 
155
  "websiteUrl": "https://mistral.ai/news/announcing-mistral-7b/",
156
  "modelUrl": "https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2",
157
  "preprompt": "",
@@ -182,6 +188,7 @@ MODELS=`[
182
  "name": "openchat/openchat-3.5-0106",
183
  "displayName": "openchat/openchat-3.5-0106",
184
  "description": "OpenChat 3.5 is the #1 model on MT-Bench, with only 7B parameters.",
 
185
  "websiteUrl": "https://huggingface.co/openchat/openchat-3.5-0106",
186
  "modelUrl": "https://huggingface.co/openchat/openchat-3.5-0106",
187
  "preprompt": "",
 
4
  {
5
  "name" : "mistralai/Mixtral-8x7B-Instruct-v0.1",
6
  "description" : "The latest MoE model from Mistral AI! 8x7B and outperforms Llama 2 70B in most benchmarks.",
7
+ "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/mistral-logo.png",
8
  "websiteUrl" : "https://mistral.ai/news/mixtral-of-experts/",
9
  "modelUrl": "https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1",
10
  "preprompt" : "",
 
34
  {
35
  "name": "meta-llama/Llama-2-70b-chat-hf",
36
  "description": "The latest and biggest model from Meta, fine-tuned for chat.",
37
+ "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/meta-logo.png",
38
  "websiteUrl": "https://ai.meta.com/llama/",
39
  "modelUrl": "https://huggingface.co/meta-llama/Llama-2-70b-chat-hf",
40
  "preprompt": " ",
 
64
  {
65
  "name" : "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
66
  "description" : "Nous Hermes 2 Mixtral 8x7B DPO is the new flagship Nous Research model trained over the Mixtral 8x7B MoE LLM.",
67
+ "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/nous-logo.png",
68
  "websiteUrl" : "https://nousresearch.com/",
69
  "modelUrl": "https://huggingface.co/NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
70
  "chatPromptTemplate" : "{{#if @root.preprompt}}<|im_start|>system\n{{@root.preprompt}}<|im_end|>\n{{/if}}{{#each messages}}{{#ifUser}}<|im_start|>user\n{{content}}<|im_end|>\n<|im_start|>assistant\n{{/ifUser}}{{#ifAssistant}}{{content}}<|im_end|>\n{{/ifAssistant}}{{/each}}",
 
94
  "name": "codellama/CodeLlama-70b-Instruct-hf",
95
  "displayName": "codellama/CodeLlama-70b-Instruct-hf",
96
  "description": "Code Llama, a state of the art code model from Meta. Now in 70B!",
97
+ "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/meta-logo.png",
98
  "websiteUrl": "https://ai.meta.com/blog/code-llama-large-language-model-coding/",
99
  "modelUrl": "https://huggingface.co/codellama/CodeLlama-70b-Instruct-hf",
100
  "preprompt": "",
 
125
  "name": "mistralai/Mistral-7B-Instruct-v0.1",
126
  "displayName": "mistralai/Mistral-7B-Instruct-v0.1",
127
  "description": "Mistral 7B is a new Apache 2.0 model, released by Mistral AI that outperforms Llama2 13B in benchmarks.",
128
+ "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/mistral-logo.png",
129
  "websiteUrl": "https://mistral.ai/news/announcing-mistral-7b/",
130
  "modelUrl": "https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.1",
131
  "preprompt": "",
 
157
  "name": "mistralai/Mistral-7B-Instruct-v0.2",
158
  "displayName": "mistralai/Mistral-7B-Instruct-v0.2",
159
  "description": "Mistral 7B is a new Apache 2.0 model, released by Mistral AI that outperforms Llama2 13B in benchmarks.",
160
+ "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/mistral-logo.png",
161
  "websiteUrl": "https://mistral.ai/news/announcing-mistral-7b/",
162
  "modelUrl": "https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2",
163
  "preprompt": "",
 
188
  "name": "openchat/openchat-3.5-0106",
189
  "displayName": "openchat/openchat-3.5-0106",
190
  "description": "OpenChat 3.5 is the #1 model on MT-Bench, with only 7B parameters.",
191
+ "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/openchat-logo.png",
192
  "websiteUrl": "https://huggingface.co/openchat/openchat-3.5-0106",
193
  "modelUrl": "https://huggingface.co/openchat/openchat-3.5-0106",
194
  "preprompt": "",
src/lib/components/NavMenu.svelte CHANGED
@@ -8,6 +8,7 @@
8
  import NavConversationItem from "./NavConversationItem.svelte";
9
  import type { LayoutData } from "../../routes/$types";
10
  import type { ConvSidebar } from "$lib/types/ConvSidebar";
 
11
  import { page } from "$app/stores";
12
 
13
  export let conversations: ConvSidebar[] = [];
@@ -41,6 +42,8 @@
41
  month: "This month",
42
  older: "Older",
43
  } as const;
 
 
44
  </script>
45
 
46
  <div class="sticky top-0 flex flex-none items-center justify-between px-3 py-3.5 max-sm:pt-0">
@@ -108,6 +111,18 @@
108
  >
109
  Theme
110
  </button>
 
 
 
 
 
 
 
 
 
 
 
 
111
  {#if $page.data.enableAssistants}
112
  <a
113
  href="{base}/assistants"
 
8
  import NavConversationItem from "./NavConversationItem.svelte";
9
  import type { LayoutData } from "../../routes/$types";
10
  import type { ConvSidebar } from "$lib/types/ConvSidebar";
11
+ import type { Model } from "$lib/types/Model";
12
  import { page } from "$app/stores";
13
 
14
  export let conversations: ConvSidebar[] = [];
 
42
  month: "This month",
43
  older: "Older",
44
  } as const;
45
+
46
+ const nModels: number = $page.data.models.filter((el: Model) => !el.unlisted).length;
47
  </script>
48
 
49
  <div class="sticky top-0 flex flex-none items-center justify-between px-3 py-3.5 max-sm:pt-0">
 
111
  >
112
  Theme
113
  </button>
114
+ {#if nModels > 1}
115
+ <a
116
+ href="{base}/models"
117
+ class="flex h-9 flex-none items-center gap-1.5 rounded-lg pl-2.5 pr-2 text-gray-500 hover:bg-gray-100 dark:text-gray-400 dark:hover:bg-gray-700"
118
+ >
119
+ Models
120
+ <span
121
+ class="ml-auto rounded-full border border-gray-300 px-2 py-0.5 text-xs text-gray-500 dark:border-gray-500 dark:text-gray-400"
122
+ >{nModels}</span
123
+ >
124
+ </a>
125
+ {/if}
126
  {#if $page.data.enableAssistants}
127
  <a
128
  href="{base}/assistants"
src/lib/server/models.ts CHANGED
@@ -25,6 +25,7 @@ const modelConfig = z.object({
25
  name: z.string().default(""),
26
  displayName: z.string().min(1).optional(),
27
  description: z.string().min(1).optional(),
 
28
  websiteUrl: z.string().url().optional(),
29
  modelUrl: z.string().url().optional(),
30
  datasetName: z.string().min(1).optional(),
 
25
  name: z.string().default(""),
26
  displayName: z.string().min(1).optional(),
27
  description: z.string().min(1).optional(),
28
+ logoUrl: z.string().url().optional(),
29
  websiteUrl: z.string().url().optional(),
30
  modelUrl: z.string().url().optional(),
31
  datasetName: z.string().min(1).optional(),
src/lib/types/Model.ts CHANGED
@@ -10,6 +10,7 @@ export type Model = Pick<
10
  | "promptExamples"
11
  | "parameters"
12
  | "description"
 
13
  | "modelUrl"
14
  | "datasetUrl"
15
  | "preprompt"
 
10
  | "promptExamples"
11
  | "parameters"
12
  | "description"
13
+ | "logoUrl"
14
  | "modelUrl"
15
  | "datasetUrl"
16
  | "preprompt"
src/routes/+layout.server.ts CHANGED
@@ -149,6 +149,7 @@ export const load: LayoutServerLoad = async ({ locals, depends }) => {
149
  datasetUrl: model.datasetUrl,
150
  displayName: model.displayName,
151
  description: model.description,
 
152
  promptExamples: model.promptExamples,
153
  parameters: model.parameters,
154
  preprompt: model.preprompt,
 
149
  datasetUrl: model.datasetUrl,
150
  displayName: model.displayName,
151
  description: model.description,
152
+ logoUrl: model.logoUrl,
153
  promptExamples: model.promptExamples,
154
  parameters: model.parameters,
155
  preprompt: model.preprompt,
src/routes/api/models/+server.ts CHANGED
@@ -10,6 +10,7 @@ export async function GET() {
10
  datasetUrl: model.datasetUrl,
11
  displayName: model.displayName,
12
  description: model.description,
 
13
  promptExamples: model.promptExamples,
14
  preprompt: model.preprompt,
15
  multimodal: model.multimodal,
 
10
  datasetUrl: model.datasetUrl,
11
  displayName: model.displayName,
12
  description: model.description,
13
+ logoUrl: model.logoUrl,
14
  promptExamples: model.promptExamples,
15
  preprompt: model.preprompt,
16
  multimodal: model.multimodal,
src/routes/models/+page.svelte ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <script lang="ts">
2
+ import type { PageData } from "./$types";
3
+
4
+ import { PUBLIC_APP_NAME } from "$env/static/public";
5
+ import { isHuggingChat } from "$lib/utils/isHuggingChat";
6
+
7
+ import { base } from "$app/paths";
8
+ import { page } from "$app/stores";
9
+
10
+ import CarbonHelpFilled from "~icons/carbon/help-filled";
11
+
12
+ export let data: PageData;
13
+ </script>
14
+
15
+ <svelte:head>
16
+ {#if isHuggingChat}
17
+ <title>HuggingChat - Models</title>
18
+ <meta property="og:title" content="HuggingChat - Models" />
19
+ <meta property="og:type" content="link" />
20
+ <meta property="og:description" content="Browse HuggingChat available models" />
21
+ <meta property="og:url" content={$page.url.href} />
22
+ {/if}
23
+ </svelte:head>
24
+
25
+ <div class="scrollbar-custom mr-1 h-full overflow-y-auto py-12 md:py-24">
26
+ <div class="pt-42 mx-auto flex flex-col px-5 xl:w-[60rem] 2xl:w-[64rem]">
27
+ <div class="flex items-center">
28
+ <h1 class="text-2xl font-bold">Models</h1>
29
+ {#if isHuggingChat}
30
+ <a
31
+ href="https://huggingface.co/spaces/huggingchat/chat-ui/discussions/372"
32
+ class="ml-auto dark:text-gray-400 dark:hover:text-gray-300"
33
+ target="_blank"
34
+ >
35
+ <CarbonHelpFilled />
36
+ </a>
37
+ {/if}
38
+ </div>
39
+ <h3 class="text-gray-500">All models available on {PUBLIC_APP_NAME}</h3>
40
+ <dl class="mt-8 grid grid-cols-1 gap-3 sm:gap-5 xl:grid-cols-2">
41
+ {#each data.models.filter((el) => !el.unlisted) as model, index (model.id)}
42
+ <a
43
+ href="{base}/settings/{model.id}"
44
+ class="relative flex flex-col gap-2 overflow-hidden rounded-xl border bg-gray-50/50 px-6 py-5 shadow hover:bg-gray-50 hover:shadow-inner dark:border-gray-800/70 dark:bg-gray-950/20 dark:hover:bg-gray-950/40"
45
+ >
46
+ <div class="flex items-center justify-between">
47
+ {#if model.logoUrl}
48
+ <img
49
+ class=" overflown aspect-square size-6 rounded border dark:border-gray-700"
50
+ src={model.logoUrl}
51
+ alt=""
52
+ />
53
+ {:else}
54
+ <div class="size-6 rounded border border-transparent bg-gray-300 dark:bg-gray-800" />
55
+ {/if}
56
+ {#if index === 0}
57
+ <div
58
+ class="rounded-full border border-gray-300 px-2 py-0.5 text-xs text-gray-500 dark:border-gray-500 dark:text-gray-400"
59
+ >
60
+ Default
61
+ </div>
62
+ {/if}
63
+ </div>
64
+ <dt class="flex items-center gap-2 font-semibold">
65
+ {model.displayName}
66
+ </dt>
67
+ <dd class="text-sm text-gray-500 dark:text-gray-400">{model.description || "-"}</dd>
68
+ </a>
69
+ {/each}
70
+ </dl>
71
+ </div>
72
+ </div>