Spaces:
Running
Running
add thinking process
Browse files- app/api/ask/route.ts +2 -9
- components/editor/ask-ai/index.tsx +13 -5
- components/editor/ask-ai/settings.tsx +81 -35
- components/icons/discord.tsx +1 -1
- hooks/useAi.ts +74 -0
- lib/prompts.ts +1 -1
- package-lock.json +7 -7
app/api/ask/route.ts
CHANGED
|
@@ -80,8 +80,6 @@ export async function POST(request: NextRequest) {
|
|
| 80 |
billTo = "huggingface";
|
| 81 |
}
|
| 82 |
|
| 83 |
-
const selectedProvider = await getBestProvider(selectedModel.value, provider)
|
| 84 |
-
|
| 85 |
let rewrittenPrompt = redesignMarkdown ? `Here is my current design as a markdown:\n\n${redesignMarkdown}\n\nNow, please create a new design based on this markdown. Use the images in the markdown.` : prompt;
|
| 86 |
|
| 87 |
if (enhancedSettings.isActive) {
|
|
@@ -114,8 +112,7 @@ export async function POST(request: NextRequest) {
|
|
| 114 |
|
| 115 |
const chatCompletion = client.chatCompletionStream(
|
| 116 |
{
|
| 117 |
-
model: selectedModel.value,
|
| 118 |
-
provider: selectedProvider,
|
| 119 |
messages: [
|
| 120 |
{
|
| 121 |
role: "system",
|
|
@@ -142,7 +139,6 @@ export async function POST(request: NextRequest) {
|
|
| 142 |
}
|
| 143 |
|
| 144 |
const chunk = value.choices[0]?.delta?.content;
|
| 145 |
-
console.log(chunk);
|
| 146 |
if (chunk) {
|
| 147 |
await writer.write(encoder.encode(chunk));
|
| 148 |
}
|
|
@@ -267,8 +263,6 @@ export async function PUT(request: NextRequest) {
|
|
| 267 |
billTo = "huggingface";
|
| 268 |
}
|
| 269 |
|
| 270 |
-
const selectedProvider = await getBestProvider(selectedModel.value, provider);
|
| 271 |
-
|
| 272 |
try {
|
| 273 |
const encoder = new TextEncoder();
|
| 274 |
const stream = new TransformStream();
|
|
@@ -304,8 +298,7 @@ export async function PUT(request: NextRequest) {
|
|
| 304 |
|
| 305 |
const chatCompletion = client.chatCompletionStream(
|
| 306 |
{
|
| 307 |
-
model: selectedModel.value,
|
| 308 |
-
provider: selectedProvider,
|
| 309 |
messages: [
|
| 310 |
{
|
| 311 |
role: "system",
|
|
|
|
| 80 |
billTo = "huggingface";
|
| 81 |
}
|
| 82 |
|
|
|
|
|
|
|
| 83 |
let rewrittenPrompt = redesignMarkdown ? `Here is my current design as a markdown:\n\n${redesignMarkdown}\n\nNow, please create a new design based on this markdown. Use the images in the markdown.` : prompt;
|
| 84 |
|
| 85 |
if (enhancedSettings.isActive) {
|
|
|
|
| 112 |
|
| 113 |
const chatCompletion = client.chatCompletionStream(
|
| 114 |
{
|
| 115 |
+
model: selectedModel.value + (provider !== "auto" ? `:${provider}` : ""),
|
|
|
|
| 116 |
messages: [
|
| 117 |
{
|
| 118 |
role: "system",
|
|
|
|
| 139 |
}
|
| 140 |
|
| 141 |
const chunk = value.choices[0]?.delta?.content;
|
|
|
|
| 142 |
if (chunk) {
|
| 143 |
await writer.write(encoder.encode(chunk));
|
| 144 |
}
|
|
|
|
| 263 |
billTo = "huggingface";
|
| 264 |
}
|
| 265 |
|
|
|
|
|
|
|
| 266 |
try {
|
| 267 |
const encoder = new TextEncoder();
|
| 268 |
const stream = new TransformStream();
|
|
|
|
| 298 |
|
| 299 |
const chatCompletion = client.chatCompletionStream(
|
| 300 |
{
|
| 301 |
+
model: selectedModel.value + (provider !== "auto" ? `:${provider}` : ""),
|
|
|
|
| 302 |
messages: [
|
| 303 |
{
|
| 304 |
role: "system",
|
components/editor/ask-ai/index.tsx
CHANGED
|
@@ -38,6 +38,7 @@ export const AskAi = ({
|
|
| 38 |
const {
|
| 39 |
isAiWorking,
|
| 40 |
isThinking,
|
|
|
|
| 41 |
selectedFiles,
|
| 42 |
setSelectedFiles,
|
| 43 |
selectedElement,
|
|
@@ -66,7 +67,6 @@ export const AskAi = ({
|
|
| 66 |
const [prompt, setPrompt] = useState(
|
| 67 |
promptStorage && promptStorage.trim() !== "" ? promptStorage : ""
|
| 68 |
);
|
| 69 |
-
const [think, setThink] = useState("");
|
| 70 |
const [openThink, setOpenThink] = useState(false);
|
| 71 |
const [randomPromptLoading, setRandomPromptLoading] = useState(false);
|
| 72 |
|
|
@@ -142,7 +142,15 @@ export const AskAi = ({
|
|
| 142 |
if (refThink.current) {
|
| 143 |
refThink.current.scrollTop = refThink.current.scrollHeight;
|
| 144 |
}
|
| 145 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 146 |
|
| 147 |
const randomPrompt = () => {
|
| 148 |
setRandomPromptLoading(true);
|
|
@@ -157,7 +165,7 @@ export const AskAi = ({
|
|
| 157 |
return (
|
| 158 |
<div className="p-3 w-full">
|
| 159 |
<div className="relative bg-neutral-800 border border-neutral-700 rounded-2xl ring-[4px] focus-within:ring-neutral-500/30 focus-within:border-neutral-600 ring-transparent z-20 w-full group">
|
| 160 |
-
{
|
| 161 |
<div className="w-full border-b border-neutral-700 relative overflow-hidden">
|
| 162 |
<header
|
| 163 |
className="flex items-center justify-between px-5 py-2.5 group hover:bg-neutral-600/20 transition-colors duration-200 cursor-pointer"
|
|
@@ -189,7 +197,7 @@ export const AskAi = ({
|
|
| 189 |
)}
|
| 190 |
>
|
| 191 |
<p className="text-[13px] text-neutral-400 whitespace-pre-line px-5 pb-4 pt-3">
|
| 192 |
-
{
|
| 193 |
</p>
|
| 194 |
</main>
|
| 195 |
</div>
|
|
@@ -221,7 +229,7 @@ export const AskAi = ({
|
|
| 221 |
? "Uploading images..."
|
| 222 |
: isAiWorking && !isSameHtml
|
| 223 |
? "DeepSite is working..."
|
| 224 |
-
: "DeepSite is
|
| 225 |
}
|
| 226 |
/>
|
| 227 |
{isAiWorking && (
|
|
|
|
| 38 |
const {
|
| 39 |
isAiWorking,
|
| 40 |
isThinking,
|
| 41 |
+
thinkingContent,
|
| 42 |
selectedFiles,
|
| 43 |
setSelectedFiles,
|
| 44 |
selectedElement,
|
|
|
|
| 67 |
const [prompt, setPrompt] = useState(
|
| 68 |
promptStorage && promptStorage.trim() !== "" ? promptStorage : ""
|
| 69 |
);
|
|
|
|
| 70 |
const [openThink, setOpenThink] = useState(false);
|
| 71 |
const [randomPromptLoading, setRandomPromptLoading] = useState(false);
|
| 72 |
|
|
|
|
| 142 |
if (refThink.current) {
|
| 143 |
refThink.current.scrollTop = refThink.current.scrollHeight;
|
| 144 |
}
|
| 145 |
+
// Auto-open dropdown when thinking content appears
|
| 146 |
+
if (thinkingContent && isThinking && !openThink) {
|
| 147 |
+
setOpenThink(true);
|
| 148 |
+
}
|
| 149 |
+
// Auto-collapse when thinking is complete
|
| 150 |
+
if (thinkingContent && !isThinking && openThink) {
|
| 151 |
+
setOpenThink(false);
|
| 152 |
+
}
|
| 153 |
+
}, [thinkingContent, isThinking]);
|
| 154 |
|
| 155 |
const randomPrompt = () => {
|
| 156 |
setRandomPromptLoading(true);
|
|
|
|
| 165 |
return (
|
| 166 |
<div className="p-3 w-full">
|
| 167 |
<div className="relative bg-neutral-800 border border-neutral-700 rounded-2xl ring-[4px] focus-within:ring-neutral-500/30 focus-within:border-neutral-600 ring-transparent z-20 w-full group">
|
| 168 |
+
{thinkingContent && (
|
| 169 |
<div className="w-full border-b border-neutral-700 relative overflow-hidden">
|
| 170 |
<header
|
| 171 |
className="flex items-center justify-between px-5 py-2.5 group hover:bg-neutral-600/20 transition-colors duration-200 cursor-pointer"
|
|
|
|
| 197 |
)}
|
| 198 |
>
|
| 199 |
<p className="text-[13px] text-neutral-400 whitespace-pre-line px-5 pb-4 pt-3">
|
| 200 |
+
{thinkingContent}
|
| 201 |
</p>
|
| 202 |
</main>
|
| 203 |
</div>
|
|
|
|
| 229 |
? "Uploading images..."
|
| 230 |
: isAiWorking && !isSameHtml
|
| 231 |
? "DeepSite is working..."
|
| 232 |
+
: "DeepSite is working..."
|
| 233 |
}
|
| 234 |
/>
|
| 235 |
{isAiWorking && (
|
components/editor/ask-ai/settings.tsx
CHANGED
|
@@ -20,7 +20,14 @@ import {
|
|
| 20 |
import { useMemo, useState, useEffect } from "react";
|
| 21 |
import { useUpdateEffect } from "react-use";
|
| 22 |
import Image from "next/image";
|
| 23 |
-
import {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
import { useAi } from "@/hooks/useAi";
|
| 25 |
import { getProviders } from "@/lib/get-providers";
|
| 26 |
import Loading from "@/components/loading";
|
|
@@ -62,7 +69,10 @@ export function Settings({
|
|
| 62 |
// }, [model]);
|
| 63 |
|
| 64 |
useUpdateEffect(() => {
|
| 65 |
-
if (
|
|
|
|
|
|
|
|
|
|
| 66 |
setProvider("auto");
|
| 67 |
}
|
| 68 |
}, [model, provider]);
|
|
@@ -205,47 +215,83 @@ export function Settings({
|
|
| 205 |
</div>
|
| 206 |
)} */}
|
| 207 |
<div className="flex flex-col gap-3">
|
| 208 |
-
<div
|
| 209 |
-
<
|
| 210 |
-
|
| 211 |
-
|
| 212 |
-
|
| 213 |
-
|
| 214 |
-
|
| 215 |
-
|
| 216 |
-
|
| 217 |
-
|
| 218 |
-
|
| 219 |
-
|
| 220 |
-
|
| 221 |
-
|
| 222 |
-
|
| 223 |
-
|
| 224 |
-
|
| 225 |
-
|
| 226 |
-
|
| 227 |
-
|
| 228 |
-
|
| 229 |
-
|
| 230 |
-
|
| 231 |
-
|
| 232 |
-
|
| 233 |
-
|
| 234 |
-
|
| 235 |
-
|
| 236 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 237 |
className={classNames(
|
| 238 |
-
"
|
| 239 |
{
|
| 240 |
-
"
|
|
|
|
|
|
|
| 241 |
}
|
| 242 |
)}
|
| 243 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 244 |
</div>
|
| 245 |
</div>
|
| 246 |
<label className="block">
|
| 247 |
<p className="text-neutral-300 text-sm mb-2">
|
| 248 |
-
|
| 249 |
</p>
|
| 250 |
<div className="grid grid-cols-2 gap-1.5 relative">
|
| 251 |
{loadingProviders ? (
|
|
|
|
| 20 |
import { useMemo, useState, useEffect } from "react";
|
| 21 |
import { useUpdateEffect } from "react-use";
|
| 22 |
import Image from "next/image";
|
| 23 |
+
import {
|
| 24 |
+
BrainIcon,
|
| 25 |
+
CheckCheck,
|
| 26 |
+
ChevronDown,
|
| 27 |
+
Sparkles,
|
| 28 |
+
Zap,
|
| 29 |
+
DollarSign,
|
| 30 |
+
} from "lucide-react";
|
| 31 |
import { useAi } from "@/hooks/useAi";
|
| 32 |
import { getProviders } from "@/lib/get-providers";
|
| 33 |
import Loading from "@/components/loading";
|
|
|
|
| 69 |
// }, [model]);
|
| 70 |
|
| 71 |
useUpdateEffect(() => {
|
| 72 |
+
if (
|
| 73 |
+
!["auto", "fastest", "cheapest"].includes(provider as string) &&
|
| 74 |
+
!providers.includes(provider as string)
|
| 75 |
+
) {
|
| 76 |
setProvider("auto");
|
| 77 |
}
|
| 78 |
}, [model, provider]);
|
|
|
|
| 215 |
</div>
|
| 216 |
)} */}
|
| 217 |
<div className="flex flex-col gap-3">
|
| 218 |
+
<div>
|
| 219 |
+
<p className="text-neutral-300 text-sm mb-1">Provider Mode</p>
|
| 220 |
+
<p className="text-neutral-400 text-xs mb-3 leading-relaxed">
|
| 221 |
+
Choose how we select providers:{" "}
|
| 222 |
+
<span className="text-white px-1.5 py-0.5 rounded bg-pink-500">
|
| 223 |
+
Auto
|
| 224 |
+
</span>{" "}
|
| 225 |
+
(smart),{" "}
|
| 226 |
+
<span className="text-white px-1.5 py-0.5 rounded bg-yellow-500">
|
| 227 |
+
Fastest
|
| 228 |
+
</span>{" "}
|
| 229 |
+
(speed), or{" "}
|
| 230 |
+
<span className="text-white px-1.5 py-0.5 rounded bg-green-500">
|
| 231 |
+
Cheapest
|
| 232 |
+
</span>{" "}
|
| 233 |
+
(cost).
|
| 234 |
+
</p>
|
| 235 |
+
<div className="grid grid-cols-3 gap-1 bg-neutral-800 p-1 rounded-full">
|
| 236 |
+
<button
|
| 237 |
+
className={classNames(
|
| 238 |
+
"flex flex-col items-center justify-center cursor-pointer py-1.5 rounded-full transition-all duration-200",
|
| 239 |
+
{
|
| 240 |
+
"bg-white text-neutral-800": provider === "auto",
|
| 241 |
+
"text-neutral-400 hover:text-neutral-200":
|
| 242 |
+
provider !== "auto",
|
| 243 |
+
}
|
| 244 |
+
)}
|
| 245 |
+
onClick={() => setProvider("auto")}
|
| 246 |
+
>
|
| 247 |
+
<Sparkles
|
| 248 |
+
className={classNames("size-3.5 mb-0.5", {
|
| 249 |
+
"text-pink-400": provider !== "auto",
|
| 250 |
+
})}
|
| 251 |
+
/>
|
| 252 |
+
<span className="text-[10px] font-medium">Auto</span>
|
| 253 |
+
</button>
|
| 254 |
+
<button
|
| 255 |
+
className={classNames(
|
| 256 |
+
"flex flex-col items-center justify-center cursor-pointer py-1.5 rounded-full transition-all duration-200",
|
| 257 |
+
{
|
| 258 |
+
"bg-white text-neutral-800": provider === "fastest",
|
| 259 |
+
"text-neutral-400 hover:text-neutral-200":
|
| 260 |
+
provider !== "fastest",
|
| 261 |
+
}
|
| 262 |
+
)}
|
| 263 |
+
onClick={() => setProvider("fastest")}
|
| 264 |
+
>
|
| 265 |
+
<Zap
|
| 266 |
+
className={classNames("size-3.5 mb-0.5", {
|
| 267 |
+
"text-yellow-400": provider !== "fastest",
|
| 268 |
+
})}
|
| 269 |
+
/>
|
| 270 |
+
<span className="text-[10px] font-medium">Fastest</span>
|
| 271 |
+
</button>
|
| 272 |
+
<button
|
| 273 |
className={classNames(
|
| 274 |
+
"flex flex-col items-center justify-center cursor-pointer py-1.5 rounded-full transition-all duration-200",
|
| 275 |
{
|
| 276 |
+
"bg-white text-neutral-800": provider === "cheapest",
|
| 277 |
+
"text-neutral-400 hover:text-neutral-200":
|
| 278 |
+
provider !== "cheapest",
|
| 279 |
}
|
| 280 |
)}
|
| 281 |
+
onClick={() => setProvider("cheapest")}
|
| 282 |
+
>
|
| 283 |
+
<DollarSign
|
| 284 |
+
className={classNames("size-3.5 mb-0.5", {
|
| 285 |
+
"text-green-400": provider !== "cheapest",
|
| 286 |
+
})}
|
| 287 |
+
/>
|
| 288 |
+
<span className="text-[10px] font-medium">Cheapest</span>
|
| 289 |
+
</button>
|
| 290 |
</div>
|
| 291 |
</div>
|
| 292 |
<label className="block">
|
| 293 |
<p className="text-neutral-300 text-sm mb-2">
|
| 294 |
+
Or choose a specific provider
|
| 295 |
</p>
|
| 296 |
<div className="grid grid-cols-2 gap-1.5 relative">
|
| 297 |
{loadingProviders ? (
|
components/icons/discord.tsx
CHANGED
|
@@ -11,7 +11,7 @@ export const DiscordIcon = ({ className }: { className?: string }) => {
|
|
| 11 |
xmlns="http://www.w3.org/2000/svg"
|
| 12 |
className={className}
|
| 13 |
>
|
| 14 |
-
<g
|
| 15 |
<path
|
| 16 |
d="M81.15 0C79.9124 2.1973 78.8011 4.4704 77.7909 6.794C68.1934 5.3544 58.4191 5.3544 48.7964 6.794C47.8114 4.4704 46.6748 2.1973 45.4373 0C36.4207 1.5407 27.6314 4.2431 19.2968 8.0568C2.77901 32.5304 -1.69139 56.3725 0.531208 79.8863C10.2044 87.0339 21.0395 92.4893 32.5817 95.9747C35.1831 92.4893 37.4815 88.7766 39.4515 84.9124C35.7135 83.5233 32.1018 81.7806 28.6417 79.7601C29.5509 79.1034 30.4349 78.4215 31.2936 77.7648C51.5746 87.3118 75.0632 87.3118 95.3694 77.7648C96.2281 78.472 97.1121 79.1539 98.0213 79.7601C94.5612 81.8058 90.9495 83.5233 87.1863 84.9377C89.1563 88.8019 91.4546 92.5146 94.0561 96C105.598 92.5146 116.433 87.0844 126.107 79.9369C128.733 52.6598 121.611 29.0197 107.29 8.0821C98.9811 4.2684 90.1918 1.5659 81.1752 0.0505L81.15 0ZM42.2802 65.4144C36.0419 65.4144 30.8643 59.7569 30.8643 52.7609C30.8643 45.7649 35.8398 40.0821 42.255 40.0821C48.6702 40.0821 53.7719 45.7901 53.6709 52.7609C53.5699 59.7317 48.6449 65.4144 42.2802 65.4144ZM84.3576 65.4144C78.0939 65.4144 72.9669 59.7569 72.9669 52.7609C72.9669 45.7649 77.9424 40.0821 84.3576 40.0821C90.7728 40.0821 95.8493 45.7901 95.7482 52.7609C95.6472 59.7317 90.7222 65.4144 84.3576 65.4144Z"
|
| 17 |
fill="currentColor"
|
|
|
|
| 11 |
xmlns="http://www.w3.org/2000/svg"
|
| 12 |
className={className}
|
| 13 |
>
|
| 14 |
+
<g clipPath="url(#clip0_1084_3121)">
|
| 15 |
<path
|
| 16 |
d="M81.15 0C79.9124 2.1973 78.8011 4.4704 77.7909 6.794C68.1934 5.3544 58.4191 5.3544 48.7964 6.794C47.8114 4.4704 46.6748 2.1973 45.4373 0C36.4207 1.5407 27.6314 4.2431 19.2968 8.0568C2.77901 32.5304 -1.69139 56.3725 0.531208 79.8863C10.2044 87.0339 21.0395 92.4893 32.5817 95.9747C35.1831 92.4893 37.4815 88.7766 39.4515 84.9124C35.7135 83.5233 32.1018 81.7806 28.6417 79.7601C29.5509 79.1034 30.4349 78.4215 31.2936 77.7648C51.5746 87.3118 75.0632 87.3118 95.3694 77.7648C96.2281 78.472 97.1121 79.1539 98.0213 79.7601C94.5612 81.8058 90.9495 83.5233 87.1863 84.9377C89.1563 88.8019 91.4546 92.5146 94.0561 96C105.598 92.5146 116.433 87.0844 126.107 79.9369C128.733 52.6598 121.611 29.0197 107.29 8.0821C98.9811 4.2684 90.1918 1.5659 81.1752 0.0505L81.15 0ZM42.2802 65.4144C36.0419 65.4144 30.8643 59.7569 30.8643 52.7609C30.8643 45.7649 35.8398 40.0821 42.255 40.0821C48.6702 40.0821 53.7719 45.7901 53.6709 52.7609C53.5699 59.7317 48.6449 65.4144 42.2802 65.4144ZM84.3576 65.4144C78.0939 65.4144 72.9669 59.7569 72.9669 52.7609C72.9669 45.7649 77.9424 40.0821 84.3576 40.0821C90.7728 40.0821 95.8493 45.7901 95.7482 52.7609C95.6472 59.7317 90.7222 65.4144 84.3576 65.4144Z"
|
| 17 |
fill="currentColor"
|
hooks/useAi.ts
CHANGED
|
@@ -44,6 +44,18 @@ export const useAi = (onScrollToBottom?: () => void) => {
|
|
| 44 |
client.setQueryData(["ai.isThinking"], newIsThinking);
|
| 45 |
};
|
| 46 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 47 |
const { data: selectedElement } = useQuery<HTMLElement | null>({
|
| 48 |
queryKey: ["ai.selectedElement"],
|
| 49 |
queryFn: async () => null,
|
|
@@ -167,6 +179,7 @@ export const useAi = (onScrollToBottom?: () => void) => {
|
|
| 167 |
if (!redesignMarkdown && !prompt.trim()) return;
|
| 168 |
|
| 169 |
setIsAiWorking(true);
|
|
|
|
| 170 |
streamingPagesRef.current.clear(); // Reset tracking for new generation
|
| 171 |
|
| 172 |
const abortController = new AbortController();
|
|
@@ -199,6 +212,14 @@ export const useAi = (onScrollToBottom?: () => void) => {
|
|
| 199 |
const { done, value } = await reader.read();
|
| 200 |
|
| 201 |
if (done) {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 202 |
const trimmedResponse = contentResponse.trim();
|
| 203 |
if (trimmedResponse.startsWith("{") && trimmedResponse.endsWith("}")) {
|
| 204 |
try {
|
|
@@ -238,6 +259,26 @@ export const useAi = (onScrollToBottom?: () => void) => {
|
|
| 238 |
const chunk = decoder.decode(value, { stream: true });
|
| 239 |
contentResponse += chunk;
|
| 240 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 241 |
const trimmedResponse = contentResponse.trim();
|
| 242 |
if (trimmedResponse.startsWith("{") && trimmedResponse.endsWith("}")) {
|
| 243 |
try {
|
|
@@ -270,6 +311,7 @@ export const useAi = (onScrollToBottom?: () => void) => {
|
|
| 270 |
} catch (error: any) {
|
| 271 |
setIsAiWorking(false);
|
| 272 |
setIsThinking(false);
|
|
|
|
| 273 |
setController(null);
|
| 274 |
|
| 275 |
if (!abortController.signal.aborted) {
|
|
@@ -289,6 +331,7 @@ export const useAi = (onScrollToBottom?: () => void) => {
|
|
| 289 |
|
| 290 |
|
| 291 |
setIsAiWorking(true);
|
|
|
|
| 292 |
|
| 293 |
const abortController = new AbortController();
|
| 294 |
setController(abortController);
|
|
@@ -330,6 +373,14 @@ export const useAi = (onScrollToBottom?: () => void) => {
|
|
| 330 |
const { done, value } = await reader.read();
|
| 331 |
|
| 332 |
if (done) {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 333 |
const metadataMatch = contentResponse.match(/___METADATA_START___([\s\S]*?)___METADATA_END___/);
|
| 334 |
if (metadataMatch) {
|
| 335 |
try {
|
|
@@ -439,6 +490,26 @@ export const useAi = (onScrollToBottom?: () => void) => {
|
|
| 439 |
const chunk = decoder.decode(value, { stream: true });
|
| 440 |
contentResponse += chunk;
|
| 441 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 442 |
// Check for error responses during streaming
|
| 443 |
const trimmedResponse = contentResponse.trim();
|
| 444 |
if (trimmedResponse.startsWith("{") && trimmedResponse.endsWith("}")) {
|
|
@@ -471,6 +542,7 @@ export const useAi = (onScrollToBottom?: () => void) => {
|
|
| 471 |
} catch (error: any) {
|
| 472 |
setIsAiWorking(false);
|
| 473 |
setIsThinking(false);
|
|
|
|
| 474 |
setController(null);
|
| 475 |
|
| 476 |
if (!abortController.signal.aborted) {
|
|
@@ -618,6 +690,8 @@ export const useAi = (onScrollToBottom?: () => void) => {
|
|
| 618 |
return {
|
| 619 |
isThinking,
|
| 620 |
setIsThinking,
|
|
|
|
|
|
|
| 621 |
callAiNewProject,
|
| 622 |
callAiFollowUp,
|
| 623 |
isAiWorking,
|
|
|
|
| 44 |
client.setQueryData(["ai.isThinking"], newIsThinking);
|
| 45 |
};
|
| 46 |
|
| 47 |
+
const { data: thinkingContent } = useQuery<string>({
|
| 48 |
+
queryKey: ["ai.thinkingContent"],
|
| 49 |
+
queryFn: async () => "",
|
| 50 |
+
refetchOnWindowFocus: false,
|
| 51 |
+
refetchOnReconnect: false,
|
| 52 |
+
refetchOnMount: false,
|
| 53 |
+
initialData: ""
|
| 54 |
+
});
|
| 55 |
+
const setThinkingContent = (newThinkingContent: string) => {
|
| 56 |
+
client.setQueryData(["ai.thinkingContent"], newThinkingContent);
|
| 57 |
+
};
|
| 58 |
+
|
| 59 |
const { data: selectedElement } = useQuery<HTMLElement | null>({
|
| 60 |
queryKey: ["ai.selectedElement"],
|
| 61 |
queryFn: async () => null,
|
|
|
|
| 179 |
if (!redesignMarkdown && !prompt.trim()) return;
|
| 180 |
|
| 181 |
setIsAiWorking(true);
|
| 182 |
+
setThinkingContent(""); // Reset thinking content
|
| 183 |
streamingPagesRef.current.clear(); // Reset tracking for new generation
|
| 184 |
|
| 185 |
const abortController = new AbortController();
|
|
|
|
| 212 |
const { done, value } = await reader.read();
|
| 213 |
|
| 214 |
if (done) {
|
| 215 |
+
// Final processing - extract and remove thinking content
|
| 216 |
+
const thinkMatch = contentResponse.match(/<think>([\s\S]*?)<\/think>/);
|
| 217 |
+
if (thinkMatch) {
|
| 218 |
+
setThinkingContent(thinkMatch[1].trim());
|
| 219 |
+
setIsThinking(false);
|
| 220 |
+
contentResponse = contentResponse.replace(/<think>[\s\S]*?<\/think>/, '').trim();
|
| 221 |
+
}
|
| 222 |
+
|
| 223 |
const trimmedResponse = contentResponse.trim();
|
| 224 |
if (trimmedResponse.startsWith("{") && trimmedResponse.endsWith("}")) {
|
| 225 |
try {
|
|
|
|
| 259 |
const chunk = decoder.decode(value, { stream: true });
|
| 260 |
contentResponse += chunk;
|
| 261 |
|
| 262 |
+
// Extract thinking content while streaming
|
| 263 |
+
if (contentResponse.includes('</think>')) {
|
| 264 |
+
// Thinking is complete, extract final content and stop thinking
|
| 265 |
+
const thinkMatch = contentResponse.match(/<think>([\s\S]*?)<\/think>/);
|
| 266 |
+
if (thinkMatch) {
|
| 267 |
+
setThinkingContent(thinkMatch[1].trim());
|
| 268 |
+
setIsThinking(false);
|
| 269 |
+
}
|
| 270 |
+
} else if (contentResponse.includes('<think>')) {
|
| 271 |
+
// Still thinking, update content
|
| 272 |
+
const thinkMatch = contentResponse.match(/<think>([\s\S]*)$/);
|
| 273 |
+
if (thinkMatch) {
|
| 274 |
+
const thinkingText = thinkMatch[1].trim();
|
| 275 |
+
if (thinkingText) {
|
| 276 |
+
setIsThinking(true);
|
| 277 |
+
setThinkingContent(thinkingText);
|
| 278 |
+
}
|
| 279 |
+
}
|
| 280 |
+
}
|
| 281 |
+
|
| 282 |
const trimmedResponse = contentResponse.trim();
|
| 283 |
if (trimmedResponse.startsWith("{") && trimmedResponse.endsWith("}")) {
|
| 284 |
try {
|
|
|
|
| 311 |
} catch (error: any) {
|
| 312 |
setIsAiWorking(false);
|
| 313 |
setIsThinking(false);
|
| 314 |
+
setThinkingContent("");
|
| 315 |
setController(null);
|
| 316 |
|
| 317 |
if (!abortController.signal.aborted) {
|
|
|
|
| 331 |
|
| 332 |
|
| 333 |
setIsAiWorking(true);
|
| 334 |
+
setThinkingContent(""); // Reset thinking content
|
| 335 |
|
| 336 |
const abortController = new AbortController();
|
| 337 |
setController(abortController);
|
|
|
|
| 373 |
const { done, value } = await reader.read();
|
| 374 |
|
| 375 |
if (done) {
|
| 376 |
+
// Extract and remove thinking content
|
| 377 |
+
const thinkMatch = contentResponse.match(/<think>([\s\S]*?)<\/think>/);
|
| 378 |
+
if (thinkMatch) {
|
| 379 |
+
setThinkingContent(thinkMatch[1].trim());
|
| 380 |
+
setIsThinking(false);
|
| 381 |
+
contentResponse = contentResponse.replace(/<think>[\s\S]*?<\/think>/, '').trim();
|
| 382 |
+
}
|
| 383 |
+
|
| 384 |
const metadataMatch = contentResponse.match(/___METADATA_START___([\s\S]*?)___METADATA_END___/);
|
| 385 |
if (metadataMatch) {
|
| 386 |
try {
|
|
|
|
| 490 |
const chunk = decoder.decode(value, { stream: true });
|
| 491 |
contentResponse += chunk;
|
| 492 |
|
| 493 |
+
// Extract thinking content while streaming
|
| 494 |
+
if (contentResponse.includes('</think>')) {
|
| 495 |
+
// Thinking is complete, extract final content and stop thinking
|
| 496 |
+
const thinkMatch = contentResponse.match(/<think>([\s\S]*?)<\/think>/);
|
| 497 |
+
if (thinkMatch) {
|
| 498 |
+
setThinkingContent(thinkMatch[1].trim());
|
| 499 |
+
setIsThinking(false);
|
| 500 |
+
}
|
| 501 |
+
} else if (contentResponse.includes('<think>')) {
|
| 502 |
+
// Still thinking, update content
|
| 503 |
+
const thinkMatch = contentResponse.match(/<think>([\s\S]*)$/);
|
| 504 |
+
if (thinkMatch) {
|
| 505 |
+
const thinkingText = thinkMatch[1].trim();
|
| 506 |
+
if (thinkingText) {
|
| 507 |
+
setIsThinking(true);
|
| 508 |
+
setThinkingContent(thinkingText);
|
| 509 |
+
}
|
| 510 |
+
}
|
| 511 |
+
}
|
| 512 |
+
|
| 513 |
// Check for error responses during streaming
|
| 514 |
const trimmedResponse = contentResponse.trim();
|
| 515 |
if (trimmedResponse.startsWith("{") && trimmedResponse.endsWith("}")) {
|
|
|
|
| 542 |
} catch (error: any) {
|
| 543 |
setIsAiWorking(false);
|
| 544 |
setIsThinking(false);
|
| 545 |
+
setThinkingContent("");
|
| 546 |
setController(null);
|
| 547 |
|
| 548 |
if (!abortController.signal.aborted) {
|
|
|
|
| 690 |
return {
|
| 691 |
isThinking,
|
| 692 |
setIsThinking,
|
| 693 |
+
thinkingContent,
|
| 694 |
+
setThinkingContent,
|
| 695 |
callAiNewProject,
|
| 696 |
callAiFollowUp,
|
| 697 |
isAiWorking,
|
lib/prompts.ts
CHANGED
|
@@ -16,7 +16,7 @@ Examples: http://static.photos/red/320x240/133 (red-themed with seed 133), http:
|
|
| 16 |
export const PROMPT_FOR_PROJECT_NAME = `REQUIRED: Generate a name for the project, based on the user's request. Try to be creative and unique. Add a emoji at the end of the name. It should be short, like 6 words. Be fancy, creative and funny. DON'T FORGET IT, IT'S IMPORTANT!`
|
| 17 |
|
| 18 |
export const INITIAL_SYSTEM_PROMPT_LIGHT = `You are an expert UI/UX and Front-End Developer.
|
| 19 |
-
No need to explain what you did. Just return the expected result.
|
| 20 |
Return the results following this format:
|
| 21 |
1. Start with ${PROJECT_NAME_START}.
|
| 22 |
2. Add the name of the project, right after the start tag.
|
|
|
|
| 16 |
export const PROMPT_FOR_PROJECT_NAME = `REQUIRED: Generate a name for the project, based on the user's request. Try to be creative and unique. Add a emoji at the end of the name. It should be short, like 6 words. Be fancy, creative and funny. DON'T FORGET IT, IT'S IMPORTANT!`
|
| 17 |
|
| 18 |
export const INITIAL_SYSTEM_PROMPT_LIGHT = `You are an expert UI/UX and Front-End Developer.
|
| 19 |
+
No need to explain what you did. Just return the expected result. Use always TailwindCSS, don't forget to import it.
|
| 20 |
Return the results following this format:
|
| 21 |
1. Start with ${PROJECT_NAME_START}.
|
| 22 |
2. Add the name of the project, right after the start tag.
|
package-lock.json
CHANGED
|
@@ -319,13 +319,13 @@
|
|
| 319 |
}
|
| 320 |
},
|
| 321 |
"node_modules/@huggingface/inference": {
|
| 322 |
-
"version": "4.
|
| 323 |
-
"resolved": "https://registry.npmjs.org/@huggingface/inference/-/inference-4.
|
| 324 |
-
"integrity": "sha512-
|
| 325 |
"license": "MIT",
|
| 326 |
"dependencies": {
|
| 327 |
"@huggingface/jinja": "^0.5.1",
|
| 328 |
-
"@huggingface/tasks": "^0.19.
|
| 329 |
},
|
| 330 |
"engines": {
|
| 331 |
"node": ">=18"
|
|
@@ -341,9 +341,9 @@
|
|
| 341 |
}
|
| 342 |
},
|
| 343 |
"node_modules/@huggingface/tasks": {
|
| 344 |
-
"version": "0.19.
|
| 345 |
-
"resolved": "https://registry.npmjs.org/@huggingface/tasks/-/tasks-0.19.
|
| 346 |
-
"integrity": "sha512-
|
| 347 |
"license": "MIT"
|
| 348 |
},
|
| 349 |
"node_modules/@humanfs/core": {
|
|
|
|
| 319 |
}
|
| 320 |
},
|
| 321 |
"node_modules/@huggingface/inference": {
|
| 322 |
+
"version": "4.13.1",
|
| 323 |
+
"resolved": "https://registry.npmjs.org/@huggingface/inference/-/inference-4.13.1.tgz",
|
| 324 |
+
"integrity": "sha512-tP63myCjsH+2CqkOaDklJ9MDpUzp6esGMS7RHFXpfv66DWub43kaGTDpI49arqLXM+yFH6FW868eIDgespw5Uw==",
|
| 325 |
"license": "MIT",
|
| 326 |
"dependencies": {
|
| 327 |
"@huggingface/jinja": "^0.5.1",
|
| 328 |
+
"@huggingface/tasks": "^0.19.62"
|
| 329 |
},
|
| 330 |
"engines": {
|
| 331 |
"node": ">=18"
|
|
|
|
| 341 |
}
|
| 342 |
},
|
| 343 |
"node_modules/@huggingface/tasks": {
|
| 344 |
+
"version": "0.19.63",
|
| 345 |
+
"resolved": "https://registry.npmjs.org/@huggingface/tasks/-/tasks-0.19.63.tgz",
|
| 346 |
+
"integrity": "sha512-hmd8e5fdjRiIJE7/EYWXS+Pm2SAu89xjZEgfZddN10ubWqlelXLyj2YgHZrVDEVkVA+5+ImMZUpQIez7b2//fw==",
|
| 347 |
"license": "MIT"
|
| 348 |
},
|
| 349 |
"node_modules/@humanfs/core": {
|