chat-ui / src /lib /buildPrompt.ts
nsarrazin's picture
nsarrazin HF staff
Conversation trees (#223) (#807)
e6addfc unverified
raw
history blame
861 Bytes
import type { EndpointParameters } from "./server/endpoints/endpoints";
import type { BackendModel } from "./server/models";
type buildPromptOptions = Pick<EndpointParameters, "messages" | "preprompt" | "continueMessage"> & {
model: BackendModel;
};
export async function buildPrompt({
messages,
model,
preprompt,
continueMessage,
}: buildPromptOptions): Promise<string> {
let prompt = model
.chatPromptRender({ messages, preprompt })
// Not super precise, but it's truncated in the model's backend anyway
.split(" ")
.slice(-(model.parameters?.truncate ?? 0))
.join(" ");
if (continueMessage && model.parameters?.stop) {
prompt = model.parameters.stop.reduce((acc: string, curr: string) => {
if (acc.endsWith(curr)) {
return acc.slice(0, acc.length - curr.length);
}
return acc;
}, prompt.trimEnd());
}
return prompt;
}