Spaces:
Paused
Paused
matt HOFFNER
commited on
Commit
·
2cd9790
1
Parent(s):
916e00a
fix
Browse files- src/pages/api/llm.js +3 -4
src/pages/api/llm.js
CHANGED
@@ -1,9 +1,8 @@
|
|
1 |
import { GoogleCustomSearch } from "openai-function-calling-tools";
|
2 |
-
import { DEFAULT_SYSTEM_PROMPT, DEFAULT_TEMPERATURE } from '@/utils/app/const';
|
3 |
import { LLMError, LLMStream } from './stream';
|
4 |
|
5 |
// @ts-expect-error
|
6 |
-
import wasm from '
|
7 |
|
8 |
import tiktokenModel from '@dqbd/tiktoken/encoders/cl100k_base.json';
|
9 |
import { Tiktoken, init } from '@dqbd/tiktoken/lite/init';
|
@@ -41,12 +40,12 @@ const handler = async (req) => {
|
|
41 |
|
42 |
let promptToSend = question;
|
43 |
if (!promptToSend) {
|
44 |
-
promptToSend =
|
45 |
}
|
46 |
|
47 |
let temperatureToUse = temperature;
|
48 |
if (temperatureToUse == null) {
|
49 |
-
temperatureToUse =
|
50 |
}
|
51 |
|
52 |
const prompt_tokens = encoding.encode(promptToSend);
|
|
|
1 |
import { GoogleCustomSearch } from "openai-function-calling-tools";
|
|
|
2 |
import { LLMError, LLMStream } from './stream';
|
3 |
|
4 |
// @ts-expect-error
|
5 |
+
import wasm from '../../../node_modules/@dqbd/tiktoken/lite/tiktoken_bg.wasm?module';
|
6 |
|
7 |
import tiktokenModel from '@dqbd/tiktoken/encoders/cl100k_base.json';
|
8 |
import { Tiktoken, init } from '@dqbd/tiktoken/lite/init';
|
|
|
40 |
|
41 |
let promptToSend = question;
|
42 |
if (!promptToSend) {
|
43 |
+
promptToSend = "";
|
44 |
}
|
45 |
|
46 |
let temperatureToUse = temperature;
|
47 |
if (temperatureToUse == null) {
|
48 |
+
temperatureToUse = 0.8;
|
49 |
}
|
50 |
|
51 |
const prompt_tokens = encoding.encode(promptToSend);
|