File size: 956 Bytes
bf15962
9db8ced
 
 
 
 
86bc2ea
9db8ced
 
 
86bc2ea
9db8ced
 
 
86bc2ea
9db8ced
 
 
 
 
 
 
 
 
 
 
d2a650e
 
9db8ced
0c4cf03
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
import { smallModel } from "$lib/server/models";
import type { Conversation } from "$lib/types/Conversation";

export async function generateFromDefaultEndpoint({
	messages,
	preprompt,
	generateSettings,
}: {
	messages: Omit<Conversation["messages"][0], "id">[];
	preprompt?: string;
	generateSettings?: Record<string, unknown>;
}): Promise<string> {
	const endpoint = await smallModel.getEndpoint();

	const tokenStream = await endpoint({ messages, preprompt, generateSettings });

	for await (const output of tokenStream) {
		// if not generated_text is here it means the generation is not done
		if (output.generated_text) {
			let generated_text = output.generated_text;
			for (const stop of [...(smallModel.parameters?.stop ?? []), "<|endoftext|>"]) {
				if (generated_text.endsWith(stop)) {
					generated_text = generated_text.slice(0, -stop.length).trimEnd();
				}
			}
			return generated_text;
		}
	}
	throw new Error("Generation failed");
}