File size: 10,971 Bytes
9db8ced
7c4fdc9
2e28042
 
e943a05
2e28042
5da61b4
 
3a63ab8
e943a05
 
 
9960338
 
2cb745f
0e5c445
ad02fa3
9db8ced
b7b2c8c
 
e943a05
ad02fa3
922b1b2
 
e943a05
922b1b2
 
 
 
e943a05
ad02fa3
 
2e28042
ad02fa3
 
 
5da61b4
ad02fa3
 
e943a05
ba93cf8
d5559df
ba93cf8
 
 
 
5c9a37f
7c4fdc9
 
 
5c9a37f
7c4fdc9
5c9a37f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7c4fdc9
 
e943a05
6ee13bf
 
 
 
922b1b2
2e8d14d
 
922b1b2
 
e943a05
cf7ac8d
 
 
b7b2c8c
cf7ac8d
06e879d
e943a05
ad02fa3
e943a05
4a6603b
 
e943a05
77399ca
 
e943a05
0e5c445
4a6603b
 
77399ca
e943a05
 
77399ca
e943a05
0e5c445
4a6603b
 
 
0e5c445
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77399ca
 
 
 
 
e943a05
 
 
77399ca
 
e943a05
4a6603b
77399ca
4a6603b
 
 
77399ca
4a6603b
 
0e5c445
77399ca
0e5c445
 
 
 
 
4a6603b
77399ca
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e943a05
4a6603b
 
ac04347
 
 
 
 
 
 
 
 
 
 
 
 
77399ca
 
e943a05
 
 
77399ca
 
 
cd6894d
e943a05
 
 
 
6f3588a
 
 
 
e943a05
6f3588a
 
 
 
 
e943a05
ad02fa3
e943a05
da1e5da
6ba40e9
 
 
77399ca
6ba40e9
 
 
 
f249cfc
6ba40e9
f249cfc
 
 
 
 
 
 
 
 
 
 
 
 
 
e943a05
ad02fa3
77399ca
 
 
 
 
e943a05
f00349e
9db8ced
 
77399ca
 
 
 
9db8ced
 
77399ca
9db8ced
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d5559df
2bee53b
9db8ced
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e943a05
9db8ced
 
 
 
 
 
77399ca
 
d5559df
e943a05
 
9db8ced
 
e943a05
9db8ced
 
e943a05
77399ca
9db8ced
e943a05
9db8ced
5da61b4
e943a05
9db8ced
 
 
 
 
e943a05
 
 
77399ca
 
 
9db8ced
 
 
 
0e5c445
6ba40e9
0e5c445
e943a05
 
77399ca
 
 
 
e943a05
77399ca
 
 
 
 
 
 
 
 
e943a05
 
ad02fa3
 
e943a05
ad02fa3
 
1a14c61
 
 
 
 
2e28042
1a14c61
 
 
5da61b4
1a14c61
 
252a449
1a14c61
 
 
 
640dc14
 
 
 
3a63ab8
 
 
 
 
2e28042
3a63ab8
 
 
 
 
 
640dc14
 
 
 
 
 
 
 
3a63ab8
640dc14
3a63ab8
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
import { MESSAGES_BEFORE_LOGIN, RATE_LIMIT } from "$env/static/private";
import { authCondition, requiresUser } from "$lib/server/auth";
import { collections } from "$lib/server/database";
import { models } from "$lib/server/models";
import { ERROR_MESSAGES } from "$lib/stores/errors";
import type { Message } from "$lib/types/Message";
import { error } from "@sveltejs/kit";
import { ObjectId } from "mongodb";
import { z } from "zod";
import type { MessageUpdate } from "$lib/types/MessageUpdate";
import { runWebSearch } from "$lib/server/websearch/runWebSearch";
import type { WebSearch } from "$lib/types/WebSearch";
import { abortedGenerations } from "$lib/server/abortedGenerations";
import { summarize } from "$lib/server/summarize";
import { uploadFile } from "$lib/server/files/uploadFile";
import sizeof from "image-size";

export async function POST({ request, locals, params, getClientAddress }) {
	const id = z.string().parse(params.id);
	const convId = new ObjectId(id);
	const promptedAt = new Date();

	const userId = locals.user?._id ?? locals.sessionId;

	// check user
	if (!userId) {
		throw error(401, "Unauthorized");
	}

	// check if the user has access to the conversation
	const conv = await collections.conversations.findOne({
		_id: convId,
		...authCondition(locals),
	});

	if (!conv) {
		throw error(404, "Conversation not found");
	}

	// register the event for ratelimiting
	await collections.messageEvents.insertOne({
		userId,
		createdAt: new Date(),
		ip: getClientAddress(),
	});

	// guest mode check
	if (
		!locals.user?._id &&
		requiresUser &&
		(MESSAGES_BEFORE_LOGIN ? parseInt(MESSAGES_BEFORE_LOGIN) : 0) > 0
	) {
		const totalMessages =
			(
				await collections.conversations
					.aggregate([
						{ $match: authCondition(locals) },
						{ $project: { messages: 1 } },
						{ $unwind: "$messages" },
						{ $match: { "messages.from": "assistant" } },
						{ $count: "messages" },
					])
					.toArray()
			)[0]?.messages ?? 0;

		if (totalMessages > parseInt(MESSAGES_BEFORE_LOGIN)) {
			throw error(429, "Exceeded number of messages before login");
		}
	}

	// check if the user is rate limited
	const nEvents = Math.max(
		await collections.messageEvents.countDocuments({ userId }),
		await collections.messageEvents.countDocuments({ ip: getClientAddress() })
	);

	if (RATE_LIMIT != "" && nEvents > parseInt(RATE_LIMIT)) {
		throw error(429, ERROR_MESSAGES.rateLimited);
	}

	// fetch the model
	const model = models.find((m) => m.id === conv.model);

	if (!model) {
		throw error(410, "Model not available anymore");
	}

	// finally parse the content of the request
	const json = await request.json();

	const {
		inputs: newPrompt,
		id: messageId,
		is_retry: isRetry,
		is_continue: isContinue,
		web_search: webSearch,
		files: b64files,
	} = z
		.object({
			inputs: z.optional(z.string().trim().min(1)),
			id: z.optional(z.string().uuid()),
			is_retry: z.optional(z.boolean()),
			is_continue: z.optional(z.boolean()),
			web_search: z.optional(z.boolean()),
			files: z.optional(z.array(z.string())),
		})
		.parse(json);

	// files is an array of base64 strings encoding Blob objects
	// we need to convert this array to an array of File objects

	const files = b64files?.map((file) => {
		const blob = Buffer.from(file, "base64");
		return new File([blob], "image.png");
	});

	// check sizes
	if (files) {
		const filechecks = await Promise.all(
			files.map(async (file) => {
				const dimensions = sizeof(Buffer.from(await file.arrayBuffer()));
				return (
					file.size > 2 * 1024 * 1024 ||
					(dimensions.width ?? 0) > 224 ||
					(dimensions.height ?? 0) > 224
				);
			})
		);

		if (filechecks.some((check) => check)) {
			throw error(413, "File too large, should be <2MB and 224x224 max.");
		}
	}

	let hashes: undefined | string[];

	if (files) {
		hashes = await Promise.all(files.map(async (file) => await uploadFile(file, conv)));
	}

	// can only call isContinue on the last message id
	if (isContinue && conv.messages[conv.messages.length - 1].id !== messageId) {
		throw error(400, "Can only continue the last message");
	}

	// get the list of messages
	// while checking for retries
	let messages = (() => {
		// for retries we slice and rewrite the last user message
		if (isRetry && messageId) {
			// if the message is a retry, replace the message and remove the messages after it
			let retryMessageIdx = conv.messages.findIndex((message) => message.id === messageId);

			if (retryMessageIdx === -1) {
				retryMessageIdx = conv.messages.length;
			}

			return [
				...conv.messages.slice(0, retryMessageIdx),
				{
					content: conv.messages[retryMessageIdx]?.content,
					from: "user",
					id: messageId as Message["id"],
					updatedAt: new Date(),
					files: conv.messages[retryMessageIdx]?.files,
				},
			];
		} else if (isContinue && messageId) {
			// for continue we do nothing and expand the last assistant message
			return conv.messages;
		} else {
			// in normal conversation we add an extra user message
			return [
				...conv.messages,
				{
					content: newPrompt ?? "",
					from: "user",
					id: (messageId as Message["id"]) || crypto.randomUUID(),
					createdAt: new Date(),
					updatedAt: new Date(),
					files: hashes,
				},
			];
		} // else append the message at the bottom
	})() satisfies Message[];

	await collections.conversations.updateOne(
		{
			_id: convId,
		},
		{
			$set: {
				messages,
				title: conv.title,
				updatedAt: new Date(),
			},
		}
	);

	let doneStreaming = false;

	// we now build the stream
	const stream = new ReadableStream({
		async start(controller) {
			const updates: MessageUpdate[] = isContinue
				? conv.messages[conv.messages.length - 1].updates ?? []
				: [];

			function update(newUpdate: MessageUpdate) {
				if (newUpdate.type !== "stream") {
					updates.push(newUpdate);
				}

				if (newUpdate.type === "stream" && newUpdate.token === "") {
					return;
				}
				controller.enqueue(JSON.stringify(newUpdate) + "\n");

				if (newUpdate.type === "finalAnswer") {
					// 4096 of spaces to make sure the browser doesn't blocking buffer that holding the response
					controller.enqueue(" ".repeat(4096));
				}
			}

			update({ type: "status", status: "started" });

			const summarizeIfNeeded = (async () => {
				if (conv.title === "New Chat" && messages.length === 1) {
					try {
						conv.title = (await summarize(messages[0].content)) ?? conv.title;
						update({ type: "status", status: "title", message: conv.title });
					} catch (e) {
						console.error(e);
					}
				}
			})();

			await collections.conversations.updateOne(
				{
					_id: convId,
				},
				{
					$set: {
						messages,
						title: conv.title,
						updatedAt: new Date(),
					},
				}
			);

			let webSearchResults: WebSearch | undefined;

			if (webSearch && !isContinue) {
				webSearchResults = await runWebSearch(conv, messages[messages.length - 1].content, update);
				messages[messages.length - 1].webSearch = webSearchResults;
			} else if (isContinue) {
				webSearchResults = messages[messages.length - 1].webSearch;
			}

			conv.messages = messages;

			const previousContent = isContinue
				? conv.messages.find((message) => message.id === messageId)?.content ?? ""
				: "";

			try {
				const endpoint = await model.getEndpoint();
				for await (const output of await endpoint({ conversation: conv, continue: isContinue })) {
					// if not generated_text is here it means the generation is not done
					if (!output.generated_text) {
						// else we get the next token
						if (!output.token.special) {
							update({
								type: "stream",
								token: output.token.text,
							});

							// if the last message is not from assistant, it means this is the first token
							const lastMessage = messages[messages.length - 1];

							if (lastMessage?.from !== "assistant") {
								// so we create a new message
								messages = [
									...messages,
									// id doesn't match the backend id but it's not important for assistant messages
									// First token has a space at the beginning, trim it
									{
										from: "assistant",
										content: output.token.text.trimStart(),
										webSearch: webSearchResults,
										updates,
										id: crypto.randomUUID(),
										createdAt: new Date(),
										updatedAt: new Date(),
									},
								];
							} else {
								// abort check
								const date = abortedGenerations.get(convId.toString());
								if (date && date > promptedAt) {
									break;
								}

								if (!output) {
									break;
								}

								// otherwise we just concatenate tokens
								lastMessage.content += output.token.text;
							}
						}
					} else {
						// add output.generated text to the last message
						messages = [
							...messages.slice(0, -1),
							{
								...messages[messages.length - 1],
								content: previousContent + output.generated_text,
								interrupted: !output.token.special, // if its a special token it finished on its own, else it was interrupted
								updates,
								updatedAt: new Date(),
							},
						];
					}
				}
			} catch (e) {
				update({ type: "status", status: "error", message: (e as Error).message });
			}

			await collections.conversations.updateOne(
				{
					_id: convId,
				},
				{
					$set: {
						messages,
						title: conv?.title,
						updatedAt: new Date(),
					},
				}
			);

			// used to detect if cancel() is called bc of interrupt or just because the connection closes
			doneStreaming = true;

			update({
				type: "finalAnswer",
				text: messages[messages.length - 1].content,
			});

			await summarizeIfNeeded;
			return;
		},
		async cancel() {
			if (!doneStreaming) {
				await collections.conversations.updateOne(
					{
						_id: convId,
					},
					{
						$set: {
							messages,
							title: conv.title,
							updatedAt: new Date(),
						},
					}
				);
			}
		},
	});

	// Todo: maybe we should wait for the message to be saved before ending the response - in case of errors
	return new Response(stream);
}

export async function DELETE({ locals, params }) {
	const convId = new ObjectId(params.id);

	const conv = await collections.conversations.findOne({
		_id: convId,
		...authCondition(locals),
	});

	if (!conv) {
		throw error(404, "Conversation not found");
	}

	await collections.conversations.deleteOne({ _id: conv._id });

	return new Response();
}

export async function PATCH({ request, locals, params }) {
	const { title } = z
		.object({ title: z.string().trim().min(1).max(100) })
		.parse(await request.json());

	const convId = new ObjectId(params.id);

	const conv = await collections.conversations.findOne({
		_id: convId,
		...authCondition(locals),
	});

	if (!conv) {
		throw error(404, "Conversation not found");
	}

	await collections.conversations.updateOne(
		{
			_id: convId,
		},
		{
			$set: {
				title,
			},
		}
	);

	return new Response();
}