File size: 1,227 Bytes
10d1ab5
 
6434339
 
 
 
10d1ab5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6434339
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
import { HF_ACCESS_TOKEN } from "$env/static/private";
import { HfInference } from "@huggingface/inference";
import { generateFromDefaultEndpoint } from "../generateFromDefaultEndpoint";
import type { BackendModel } from "../models";

export async function summarizeWeb(content: string, query: string, model: BackendModel) {
	// if HF_ACCESS_TOKEN is set, we use a HF dedicated endpoint for summarization
	try {
		if (HF_ACCESS_TOKEN) {
			const summary = (
				await new HfInference(HF_ACCESS_TOKEN).summarization({
					model: "facebook/bart-large-cnn",
					inputs: content,
					parameters: {
						max_length: 512,
					},
				})
			).summary_text;
			return summary;
		}
	} catch (e) {
		console.log(e);
	}

	// else we use the LLM to generate a summary
	const summaryPrompt =
		model.userMessageToken +
		content
			.split(" ")
			.slice(0, model.parameters?.truncate ?? 0)
			.join(" ") +
		model.messageEndToken +
		model.userMessageToken +
		`The text above should be summarized to best answer the query: ${query}.` +
		model.messageEndToken +
		model.assistantMessageToken +
		"Summary: ";

	const summary = await generateFromDefaultEndpoint(summaryPrompt).then((txt: string) =>
		txt.trim()
	);

	return summary;
}