Move default template so it doesn't override tokenizer (#987)
Browse files- src/lib/server/models.ts +4 -7
src/lib/server/models.ts
CHANGED
@@ -44,11 +44,7 @@ const modelConfig = z.object({
|
|
44 |
datasetUrl: z.string().url().optional(),
|
45 |
preprompt: z.string().default(""),
|
46 |
prepromptUrl: z.string().url().optional(),
|
47 |
-
chatPromptTemplate: z
|
48 |
-
.string()
|
49 |
-
.default(
|
50 |
-
"{{#if @root.preprompt}}<|im_start|>system\n{{@root.preprompt}}<|im_end|>\n{{/if}}{{#each messages}}{{#ifUser}}<|im_start|>user\n{{content}}<|im_end|>\n<|im_start|>assistant\n{{/ifUser}}{{#ifAssistant}}{{content}}<|im_end|>\n{{/ifAssistant}}{{/each}}"
|
51 |
-
), // ChatML
|
52 |
promptExamples: z
|
53 |
.array(
|
54 |
z.object({
|
@@ -86,8 +82,9 @@ async function getChatPromptRender(
|
|
86 |
let tokenizer: PreTrainedTokenizer;
|
87 |
|
88 |
if (!m.tokenizer) {
|
89 |
-
|
90 |
-
"
|
|
|
91 |
);
|
92 |
}
|
93 |
|
|
|
44 |
datasetUrl: z.string().url().optional(),
|
45 |
preprompt: z.string().default(""),
|
46 |
prepromptUrl: z.string().url().optional(),
|
47 |
+
chatPromptTemplate: z.string().optional(),
|
|
|
|
|
|
|
|
|
48 |
promptExamples: z
|
49 |
.array(
|
50 |
z.object({
|
|
|
82 |
let tokenizer: PreTrainedTokenizer;
|
83 |
|
84 |
if (!m.tokenizer) {
|
85 |
+
return compileTemplate<ChatTemplateInput>(
|
86 |
+
"{{#if @root.preprompt}}<|im_start|>system\n{{@root.preprompt}}<|im_end|>\n{{/if}}{{#each messages}}{{#ifUser}}<|im_start|>user\n{{content}}<|im_end|>\n<|im_start|>assistant\n{{/ifUser}}{{#ifAssistant}}{{content}}<|im_end|>\n{{/ifAssistant}}{{/each}}",
|
87 |
+
m
|
88 |
);
|
89 |
}
|
90 |
|