try to fix
Browse files- app/api/ask/route.ts +3 -3
- lib/best-provider.ts +2 -2
app/api/ask/route.ts
CHANGED
|
@@ -4,7 +4,7 @@ import { NextResponse } from "next/server";
|
|
| 4 |
import { headers } from "next/headers";
|
| 5 |
import { InferenceClient } from "@huggingface/inference";
|
| 6 |
|
| 7 |
-
import { MODELS
|
| 8 |
import {
|
| 9 |
DIVIDER,
|
| 10 |
FOLLOW_UP_SYSTEM_PROMPT,
|
|
@@ -125,7 +125,7 @@ export async function POST(request: NextRequest) {
|
|
| 125 |
const chatCompletion = client.chatCompletionStream(
|
| 126 |
{
|
| 127 |
model: selectedModel.value,
|
| 128 |
-
provider: selectedProvider,
|
| 129 |
messages: [
|
| 130 |
{
|
| 131 |
role: "system",
|
|
@@ -136,7 +136,7 @@ export async function POST(request: NextRequest) {
|
|
| 136 |
content: `${rewrittenPrompt}${redesignMarkdown ? `\n\nHere is my current design as a markdown:\n\n${redesignMarkdown}\n\nNow, please create a new design based on this markdown. Use the images in the markdown.` : ""} : ""}`
|
| 137 |
},
|
| 138 |
],
|
| 139 |
-
max_tokens:
|
| 140 |
},
|
| 141 |
billTo ? { billTo } : {}
|
| 142 |
);
|
|
|
|
| 4 |
import { headers } from "next/headers";
|
| 5 |
import { InferenceClient } from "@huggingface/inference";
|
| 6 |
|
| 7 |
+
import { MODELS } from "@/lib/providers";
|
| 8 |
import {
|
| 9 |
DIVIDER,
|
| 10 |
FOLLOW_UP_SYSTEM_PROMPT,
|
|
|
|
| 125 |
const chatCompletion = client.chatCompletionStream(
|
| 126 |
{
|
| 127 |
model: selectedModel.value,
|
| 128 |
+
provider: selectedProvider.provider,
|
| 129 |
messages: [
|
| 130 |
{
|
| 131 |
role: "system",
|
|
|
|
| 136 |
content: `${rewrittenPrompt}${redesignMarkdown ? `\n\nHere is my current design as a markdown:\n\n${redesignMarkdown}\n\nNow, please create a new design based on this markdown. Use the images in the markdown.` : ""} : ""}`
|
| 137 |
},
|
| 138 |
],
|
| 139 |
+
max_tokens: 65_536,
|
| 140 |
},
|
| 141 |
billTo ? { billTo } : {}
|
| 142 |
);
|
lib/best-provider.ts
CHANGED
|
@@ -8,13 +8,13 @@ export const getBestProvider = async (model: string, provider?: string) => {
|
|
| 8 |
if (a.status !== "live" && b.status === "live") return 1
|
| 9 |
return a?.pricing?.output - b?.pricing?.output + a?.pricing?.input - b?.pricing?.input
|
| 10 |
})
|
| 11 |
-
bestProvider = sortedProviders[0]
|
| 12 |
} else {
|
| 13 |
const providerData = data.providers.find((p: any) => p.provider === provider)
|
| 14 |
if (providerData?.status === "live") {
|
| 15 |
bestProvider = providerData.provider
|
| 16 |
} else {
|
| 17 |
-
bestProvider = data.providers?.find((p: any) => p.status === "live")
|
| 18 |
}
|
| 19 |
}
|
| 20 |
|
|
|
|
| 8 |
if (a.status !== "live" && b.status === "live") return 1
|
| 9 |
return a?.pricing?.output - b?.pricing?.output + a?.pricing?.input - b?.pricing?.input
|
| 10 |
})
|
| 11 |
+
bestProvider = sortedProviders[0]
|
| 12 |
} else {
|
| 13 |
const providerData = data.providers.find((p: any) => p.provider === provider)
|
| 14 |
if (providerData?.status === "live") {
|
| 15 |
bestProvider = providerData.provider
|
| 16 |
} else {
|
| 17 |
+
bestProvider = data.providers?.find((p: any) => p.status === "live")
|
| 18 |
}
|
| 19 |
}
|
| 20 |
|