File size: 4,688 Bytes
9be5ab5
5da61b4
2772555
5da61b4
9be5ab5
5da61b4
2772555
5da61b4
0abf663
 
2772555
5da61b4
 
ad02fa3
 
 
 
2772555
ad02fa3
 
 
5da61b4
ad02fa3
 
 
5da61b4
ad02fa3
 
 
 
 
5da61b4
f00349e
ad02fa3
9be5ab5
 
2772555
 
9be5ab5
ad02fa3
5da61b4
9be5ab5
ad02fa3
5da61b4
f00349e
 
5da61b4
 
2772555
ad02fa3
 
 
 
 
2772555
f00349e
 
 
 
 
ad02fa3
0abf663
 
5da61b4
ad02fa3
 
 
5da61b4
ad02fa3
 
 
 
5da61b4
 
ad02fa3
 
 
 
 
 
 
 
 
 
5da61b4
ad02fa3
 
 
1a14c61
 
 
 
 
5da61b4
1a14c61
 
 
5da61b4
1a14c61
 
252a449
1a14c61
 
 
 
2772555
 
 
 
 
 
ad02fa3
 
 
2772555
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ad02fa3
 
 
2772555
ad02fa3
 
 
 
5da61b4
ad02fa3
5da61b4
ad02fa3
 
 
5da61b4
ad02fa3
 
5da61b4
 
 
ad02fa3
 
 
 
5da61b4
 
ad02fa3
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
import { PUBLIC_SEP_TOKEN } from "$env/static/public";
import { buildPrompt } from "$lib/buildPrompt.js";
import { abortedGenerations } from "$lib/server/abortedGenerations.js";
import { collections } from "$lib/server/database.js";
import { modelEndpoint } from "$lib/server/modelEndpoint.js";
import type { Message } from "$lib/types/Message.js";
import { concatUint8Arrays } from "$lib/utils/concatUint8Arrays.js";
import { streamToAsyncIterable } from "$lib/utils/streamToAsyncIterable";
import { trimPrefix } from "$lib/utils/trimPrefix.js";
import { trimSuffix } from "$lib/utils/trimSuffix.js";
import type { TextGenerationStreamOutput } from "@huggingface/inference";
import { error } from "@sveltejs/kit";
import { ObjectId } from "mongodb";

export async function POST({ request, fetch, locals, params }) {
	// todo: add validation on params.id
	const convId = new ObjectId(params.id);
	const date = new Date();

	const conv = await collections.conversations.findOne({
		_id: convId,
		sessionId: locals.sessionId,
	});

	if (!conv) {
		throw error(404, "Conversation not found");
	}

	// Todo: validate prompt with zod? or aktype
	const json = await request.json();

	const messages = [...conv.messages, { from: "user", content: json.inputs }] satisfies Message[];
	const prompt = buildPrompt(messages);

	const randomEndpoint = modelEndpoint();

	const abortController = new AbortController();

	const resp = await fetch(randomEndpoint.endpoint, {
		headers: {
			"Content-Type": request.headers.get("Content-Type") ?? "application/json",
			Authorization: randomEndpoint.authorization,
		},
		method: "POST",
		body: JSON.stringify({
			...json,
			inputs: prompt,
		}),
		signal: abortController.signal,
	});

	const [stream1, stream2] = resp.body!.tee();

	async function saveMessage() {
		let generated_text = await parseGeneratedText(stream2, convId, date, abortController);

		// We could also check if PUBLIC_ASSISTANT_MESSAGE_TOKEN is present and use it to slice the text
		if (generated_text.startsWith(prompt)) {
			generated_text = generated_text.slice(prompt.length);
		}

		generated_text = trimSuffix(trimPrefix(generated_text, "<|startoftext|>"), PUBLIC_SEP_TOKEN);

		messages.push({ from: "assistant", content: generated_text });

		await collections.conversations.updateOne(
			{
				_id: convId,
			},
			{
				$set: {
					messages,
					updatedAt: new Date(),
				},
			}
		);
	}

	saveMessage().catch(console.error);

	// Todo: maybe we should wait for the message to be saved before ending the response - in case of errors
	return new Response(stream1, {
		headers: Object.fromEntries(resp.headers.entries()),
		status: resp.status,
		statusText: resp.statusText,
	});
}

export async function DELETE({ locals, params }) {
	const convId = new ObjectId(params.id);

	const conv = await collections.conversations.findOne({
		_id: convId,
		sessionId: locals.sessionId,
	});

	if (!conv) {
		throw error(404, "Conversation not found");
	}

	await collections.conversations.deleteOne({ _id: conv._id });

	return new Response();
}

async function parseGeneratedText(
	stream: ReadableStream,
	conversationId: ObjectId,
	promptedAt: Date,
	abortController: AbortController
): Promise<string> {
	const inputs: Uint8Array[] = [];
	for await (const input of streamToAsyncIterable(stream)) {
		inputs.push(input);

		const date = abortedGenerations.get(conversationId.toString());

		if (date && date > promptedAt) {
			abortController.abort("Cancelled by user");
			const completeInput = concatUint8Arrays(inputs);

			const lines = new TextDecoder()
				.decode(completeInput)
				.split("\n")
				.filter((line) => line.startsWith("data:"));

			const tokens = lines.map((line) => {
				try {
					const json: TextGenerationStreamOutput = JSON.parse(line.slice("data:".length));
					return json.token.text;
				} catch {
					return "";
				}
			});
			return tokens.join("");
		}
	}

	// Merge inputs into a single Uint8Array
	const completeInput = concatUint8Arrays(inputs);

	// Get last line starting with "data:" and parse it as JSON to get the generated text
	const message = new TextDecoder().decode(completeInput);

	let lastIndex = message.lastIndexOf("\ndata:");
	if (lastIndex === -1) {
		lastIndex = message.indexOf("data");
	}

	if (lastIndex === -1) {
		console.error("Could not parse in last message");
	}

	let lastMessage = message.slice(lastIndex).trim().slice("data:".length);
	if (lastMessage.includes("\n")) {
		lastMessage = lastMessage.slice(0, lastMessage.indexOf("\n"));
	}

	const res = JSON.parse(lastMessage).generated_text;

	if (typeof res !== "string") {
		throw new Error("Could not parse generated text");
	}

	return res;
}