kioai / artifacts /image-gen /src /pages /Video.tsx
kinaiok
Initial deployment setup for Hugging Face Spaces
5ef6e9d
import { useState, useRef, useCallback, useEffect } from "react";
import { useQuery, useMutation, useQueryClient } from "@tanstack/react-query";
import { motion, AnimatePresence } from "framer-motion";
import {
Video, Loader2, Download, Trash2, Sparkles, Lock, Globe, Play,
Upload, X, ImagePlus, CheckCircle2, AlertCircle, Clock,
} from "lucide-react";
import { Button } from "@/components/ui/button";
import { Textarea } from "@/components/ui/textarea";
import { Badge } from "@/components/ui/badge";
import { Progress } from "@/components/ui/progress";
import { useToast } from "@/hooks/use-toast";
import { useLang } from "@/contexts/LanguageContext";
import { useAuth } from "@/contexts/AuthContext";
import {
AlertDialog, AlertDialogAction, AlertDialogCancel, AlertDialogContent,
AlertDialogDescription, AlertDialogFooter, AlertDialogHeader, AlertDialogTitle,
AlertDialogTrigger,
} from "@/components/ui/alert-dialog";
const BASE = import.meta.env.BASE_URL.replace(/\/$/, "");
// ── Types ─────────────────────────────────────────────────────────────────────
interface VideoRecord {
id: number;
videoUrl: string;
thumbnailUrl: string | null;
prompt: string;
negativePrompt: string | null;
model: string;
aspectRatio: string;
resolution: string;
duration: number;
hasRefImage: boolean;
isPrivate: boolean;
userId: number | null;
createdAt: string;
}
interface RefImage { base64: string; mime: string; preview: string }
type VideoModel = "grok-3" | "veo-3-fast";
interface VideoOptions {
model: VideoModel;
aspectRatio: string;
resolution: string;
duration: number;
negativePrompt: string;
enhancePrompt: boolean;
}
type GenPhase =
| "idle"
| "submitting" // POSTing to /generate
| "turnstile" // fetching turnstile token
| "connecting" // connecting to geminigen.ai
| "generating" // AI generating (status 1 events)
| "done"
| "error";
interface GenState {
phase: GenPhase;
message: string;
elapsedMs: number;
progress: number | null; // 0-100 from geminigen.ai
result: VideoRecord | null;
errorMsg: string | null;
}
// ── API helpers ───────────────────────────────────────────────────────────────
async function initiateVideoGeneration(
prompt: string,
isPrivate: boolean,
videoOpts: VideoOptions,
refImageBase64?: string,
refImageMime?: string,
): Promise<{ taskId: string }> {
const resp = await fetch(`${BASE}/api/videos/generate`, {
method: "POST",
headers: { "Content-Type": "application/json" },
credentials: "include",
body: JSON.stringify({
prompt, isPrivate,
model: videoOpts.model,
aspectRatio: videoOpts.aspectRatio,
resolution: videoOpts.resolution,
duration: videoOpts.duration,
negativePrompt: videoOpts.negativePrompt || undefined,
enhancePrompt: videoOpts.enhancePrompt,
referenceImageBase64: refImageBase64,
referenceImageMime: refImageMime,
}),
});
if (!resp.ok) {
const err = await resp.json().catch(() => ({ message: `HTTP ${resp.status}` }));
throw new Error(err.message || `HTTP ${resp.status}`);
}
return resp.json();
}
async function fetchVideoHistory(): Promise<{ videos: VideoRecord[] }> {
const resp = await fetch(`${BASE}/api/videos/history?limit=50`, { credentials: "include" });
if (!resp.ok) throw new Error(`HTTP ${resp.status}`);
return resp.json();
}
async function deleteVideo(id: number): Promise<void> {
const resp = await fetch(`${BASE}/api/videos/${id}`, { method: "DELETE", credentials: "include" });
if (!resp.ok) throw new Error(`HTTP ${resp.status}`);
}
// ── useVideoGeneration hook ───────────────────────────────────────────────────
function useVideoGeneration(onSuccess: (v: VideoRecord) => void) {
const { t } = useLang();
const [state, setState] = useState<GenState>({
phase: "idle", message: "", elapsedMs: 0, progress: null, result: null, errorMsg: null,
});
const esRef = useRef<EventSource | null>(null);
const timerRef = useRef<ReturnType<typeof setInterval> | null>(null);
const startTimeRef = useRef<number>(0);
// Track whether an error was already received via onmessage, so onerror doesn't override it
const terminalPhaseRef = useRef<boolean>(false);
const stopTimer = useCallback(() => {
if (timerRef.current) { clearInterval(timerRef.current); timerRef.current = null; }
}, []);
const stopSSE = useCallback(() => {
if (esRef.current) { esRef.current.close(); esRef.current = null; }
stopTimer();
}, [stopTimer]);
const startTimer = useCallback(() => {
startTimeRef.current = Date.now();
timerRef.current = setInterval(() => {
setState((s) => ({ ...s, elapsedMs: Date.now() - startTimeRef.current }));
}, 1000);
}, []);
const generate = useCallback(async (
prompt: string,
isPrivate: boolean,
videoOpts: VideoOptions,
refImageBase64?: string,
refImageMime?: string,
) => {
stopSSE();
terminalPhaseRef.current = false;
setState({ phase: "submitting", message: t.videoPhaseSubmitting, elapsedMs: 0, progress: null, result: null, errorMsg: null });
startTimer();
let taskId: string;
try {
const result = await initiateVideoGeneration(prompt, isPrivate, videoOpts, refImageBase64, refImageMime);
taskId = result.taskId;
} catch (err) {
stopTimer();
setState((s) => ({
...s,
phase: "error",
message: "",
errorMsg: err instanceof Error ? err.message : String(err),
}));
return;
}
// Open SSE connection to /progress/:taskId
const es = new EventSource(`${BASE}/api/videos/progress/${taskId}`, { withCredentials: true });
esRef.current = es;
es.onmessage = (ev) => {
try {
const data = JSON.parse(ev.data) as {
type: string;
message?: string;
status?: number;
progress?: number;
video?: VideoRecord;
errorCode?: string;
};
if (data.type === "start") {
setState((s) => ({
...s,
phase: data.message?.includes("Turnstile") ? "turnstile"
: (data.message?.includes("連接") || data.message?.includes("connect")) ? "connecting"
: "submitting",
message: data.message || "",
}));
} else if (data.type === "progress") {
setState((s) => ({
...s,
phase: "generating",
message: data.message || t.videoPhaseGenerating,
progress: typeof data.progress === "number" ? data.progress : s.progress,
}));
} else if (data.type === "complete" && data.video) {
terminalPhaseRef.current = true;
stopSSE();
setState((s) => ({
...s,
phase: "done",
message: t.videoDone,
result: data.video!,
}));
onSuccess(data.video);
} else if (data.type === "error") {
terminalPhaseRef.current = true;
stopSSE();
setState((s) => ({
...s,
phase: "error",
message: "",
errorMsg: data.message || t.videoGenFailed,
}));
} else if (data.type === "done") {
// stream closed by server (task was already in terminal state)
es.close();
}
} catch { /* ignore parse errors */ }
};
es.onerror = () => {
if (esRef.current === es) {
// If we already received a terminal event (error/complete) via onmessage,
// don't overwrite the state with the generic "stream disconnected" message.
if (terminalPhaseRef.current) {
stopSSE();
return;
}
stopSSE();
setState((s) => {
if (s.phase !== "done" && s.phase !== "error") {
return {
...s,
phase: "error",
message: "",
errorMsg: t.videoConnectionFailed,
};
}
return s;
});
}
};
}, [stopSSE, startTimer, stopTimer, onSuccess, t]);
const reset = useCallback(() => {
stopSSE();
setState({ phase: "idle", message: "", elapsedMs: 0, progress: null, result: null, errorMsg: null });
}, [stopSSE]);
// Cleanup on unmount
useEffect(() => () => { stopSSE(); }, [stopSSE]);
return { state, generate, reset };
}
// ── ElapsedTimer component ────────────────────────────────────────────────────
function formatMs(ms: number): string {
const s = Math.floor(ms / 1000);
const m = Math.floor(s / 60);
const sec = s % 60;
return m > 0 ? `${m}m ${sec}s` : `${sec}s`;
}
// ── VideoCard component ───────────────────────────────────────────────────────
function VideoCard({ vid, onDelete }: { vid: VideoRecord; onDelete: (id: number) => void }) {
const { t } = useLang();
return (
<motion.div
layout
initial={{ opacity: 0, scale: 0.95 }}
animate={{ opacity: 1, scale: 1 }}
exit={{ opacity: 0, scale: 0.9 }}
transition={{ duration: 0.25 }}
className="bg-card/60 border border-border/50 rounded-xl overflow-hidden"
>
<div className="relative aspect-video bg-black/40">
<video
src={vid.videoUrl.startsWith("/") ? `${BASE}${vid.videoUrl}` : vid.videoUrl}
poster={vid.thumbnailUrl
? (vid.thumbnailUrl.startsWith("/") ? `${BASE}${vid.thumbnailUrl}` : vid.thumbnailUrl)
: undefined}
className="w-full h-full object-cover"
controls
preload="metadata"
playsInline
/>
<div className="absolute top-2 left-2 flex gap-1">
{vid.hasRefImage && (
<Badge variant="outline" className="text-[10px] bg-black/60 border-violet-400/40 text-violet-300 backdrop-blur-sm">
<ImagePlus className="w-2.5 h-2.5 mr-1" />{t.videoModeImage}
</Badge>
)}
</div>
<div className="absolute top-2 right-2">
<Badge variant="outline" className="text-[10px] bg-black/60 border-white/20 text-white/80 backdrop-blur-sm">
{vid.isPrivate
? <><Lock className="w-2.5 h-2.5 mr-1" />{t.videoPrivateLabel}</>
: <><Globe className="w-2.5 h-2.5 mr-1" />{t.videoPublicLabel}</>
}
</Badge>
</div>
</div>
<div className="p-3">
<p className="text-xs text-muted-foreground line-clamp-2 mb-3">{vid.prompt}</p>
<div className="flex items-center gap-1.5">
<Badge variant="secondary" className="text-[10px]">{vid.model}</Badge>
<Badge variant="outline" className="text-[10px]">{vid.duration}s</Badge>
{vid.aspectRatio && <Badge variant="outline" className="text-[10px]">{vid.aspectRatio}</Badge>}
{vid.resolution && <Badge variant="outline" className="text-[10px] text-sky-400 border-sky-400/30">{vid.resolution}</Badge>}
<div className="flex-1" />
<Button
size="icon" variant="ghost" className="h-7 w-7"
onClick={() => { const a = document.createElement("a"); a.href = vid.videoUrl.startsWith("/") ? `${BASE}${vid.videoUrl}` : vid.videoUrl; a.download = `starforge-video-${vid.id}.mp4`; a.click(); }}
title={t.videoDownload}
>
<Download className="w-3.5 h-3.5" />
</Button>
<AlertDialog>
<AlertDialogTrigger asChild>
<Button size="icon" variant="ghost" className="h-7 w-7 text-destructive hover:text-destructive" title={t.videoDelete}>
<Trash2 className="w-3.5 h-3.5" />
</Button>
</AlertDialogTrigger>
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>{t.videoDelete}</AlertDialogTitle>
<AlertDialogDescription>{vid.prompt.slice(0, 100)}</AlertDialogDescription>
</AlertDialogHeader>
<AlertDialogFooter>
<AlertDialogCancel>{t.poolCancel}</AlertDialogCancel>
<AlertDialogAction onClick={() => onDelete(vid.id)} className="bg-destructive hover:bg-destructive/90">
{t.videoDelete}
</AlertDialogAction>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
</div>
</div>
</motion.div>
);
}
// ── GeneratingPanel component ─────────────────────────────────────────────────
function GeneratingPanel({ state }: { state: GenState }) {
const { t } = useLang();
const phaseProgress: Record<GenPhase, number> = {
idle: 0, submitting: 5, turnstile: 12, connecting: 20, generating: 25, done: 100, error: 0,
};
// When generating: map geminigen.ai's 0-100 into the 25-95 range of our UI bar
// so early phases (submitting/connecting) stay visible before generation starts.
let progress: number;
if (state.phase === "generating" && state.progress !== null) {
progress = 25 + Math.round(state.progress * 0.70); // 25%–95%
} else {
progress = phaseProgress[state.phase];
}
return (
<motion.div
key="generating"
initial={{ opacity: 0, y: 10 }}
animate={{ opacity: 1, y: 0 }}
exit={{ opacity: 0 }}
className="bg-card/40 border border-primary/20 rounded-xl p-6 mb-6"
>
<div className="flex items-center gap-3 mb-4">
<div className="w-10 h-10 rounded-full bg-primary/10 border border-primary/20 flex items-center justify-center shrink-0">
<Loader2 className="w-5 h-5 text-primary animate-spin" />
</div>
<div className="flex-1 min-w-0">
<p className="font-medium text-foreground text-sm">
{state.phase === "submitting" ? t.videoPhaseSubmitting :
state.phase === "turnstile" ? t.videoPhaseGettingCaptcha :
state.phase === "connecting" ? t.videoPhaseConnecting :
state.phase === "generating" ? t.videoPhaseGenerating :
t.videoPhaseProcessing}
</p>
{state.message && (
<p className="text-xs text-muted-foreground mt-0.5 truncate">{state.message}</p>
)}
</div>
{state.elapsedMs > 0 && (
<div className="flex items-center gap-1 text-xs text-muted-foreground shrink-0">
<Clock className="w-3 h-3" />
{formatMs(state.elapsedMs)}
</div>
)}
</div>
<Progress value={progress} className="h-1.5 mb-3" />
<div className="flex flex-wrap gap-2">
{(["submitting", "turnstile", "connecting", "generating"] as GenPhase[]).map((ph) => {
const labels: Record<string, string> = {
submitting: t.videoPhaseSubmitLabel,
turnstile: t.videoPhaseTurnstileLabel,
connecting: t.videoPhaseConnectLabel,
generating: t.videoPhaseGenerateLabel,
};
const passed = phaseProgress[state.phase] > phaseProgress[ph];
const active = state.phase === ph;
return (
<div
key={ph}
className={`flex items-center gap-1 text-[10px] px-2 py-0.5 rounded-full border transition-colors ${
active ? "bg-primary/20 border-primary/40 text-primary" :
passed ? "bg-green-500/10 border-green-500/20 text-green-400" :
"bg-muted/30 border-border/30 text-muted-foreground"
}`}
>
{passed ? <CheckCircle2 className="w-2.5 h-2.5" /> :
active ? <Loader2 className="w-2.5 h-2.5 animate-spin" /> :
<div className="w-2.5 h-2.5 rounded-full border border-current opacity-40" />}
{labels[ph]}
</div>
);
})}
</div>
{state.phase === "generating" && (
<p className="text-[11px] text-muted-foreground mt-3">
{t.videoGeneratingHint}
</p>
)}
</motion.div>
);
}
// ── Main VideoPage ────────────────────────────────────────────────────────────
export function VideoPage() {
const { t } = useLang();
const { toast } = useToast();
const queryClient = useQueryClient();
const { isSignedIn } = useAuth();
const [prompt, setPrompt] = useState("");
const [isPrivate, setIsPrivate] = useState(false);
const [referenceImage, setReferenceImage] = useState<RefImage | null>(null);
const fileInputRef = useRef<HTMLInputElement>(null);
// ── Model selection ──
const [selectedModel, setSelectedModel] = useState<VideoModel>("grok-3");
// ── Video generation options ──
const [aspectRatio, setAspectRatio] = useState("16:9");
const [resolution, setResolution] = useState("480p");
const [duration, setDuration] = useState(6);
const [negativePrompt, setNegativePrompt] = useState("");
const [enhancePrompt, setEnhancePrompt] = useState(true);
const [showAdvanced, setShowAdvanced] = useState(false);
// When switching to Veo, enforce its constraints
const handleModelChange = (model: VideoModel) => {
setSelectedModel(model);
if (model === "veo-3-fast") {
setDuration(8);
if (aspectRatio !== "16:9" && aspectRatio !== "9:16") setAspectRatio("16:9");
} else {
setDuration(6);
}
};
const HISTORY_KEY = ["video-history"];
const { data: historyData, isLoading: historyLoading } = useQuery({
queryKey: HISTORY_KEY,
queryFn: fetchVideoHistory,
staleTime: 30_000,
});
const handleSuccess = useCallback((video: VideoRecord) => {
toast({ title: t.videoGenSuccess });
queryClient.invalidateQueries({ queryKey: HISTORY_KEY });
setPrompt("");
setReferenceImage(null);
}, [toast, queryClient, t]);
const { state: genState, generate, reset } = useVideoGeneration(handleSuccess);
const isGenerating = genState.phase !== "idle" && genState.phase !== "done" && genState.phase !== "error";
const handleFileUpload = useCallback((file: File) => {
if (!file.type.startsWith("image/")) {
toast({ title: t.errorFormatTitle, description: t.errorFormatDesc, variant: "destructive" });
return;
}
const reader = new FileReader();
reader.onload = (e) => {
const dataUrl = e.target?.result as string;
const base64 = dataUrl.split(",")[1];
setReferenceImage({ base64, mime: file.type, preview: dataUrl });
};
reader.readAsDataURL(file);
}, [t, toast]);
const handleDrop = useCallback((e: React.DragEvent<HTMLDivElement>) => {
e.preventDefault();
const file = e.dataTransfer.files[0];
if (file) handleFileUpload(file);
}, [handleFileUpload]);
const { mutate: removeVideo } = useMutation({
mutationFn: deleteVideo,
onSuccess: () => {
toast({ title: t.videoDeleteSuccess });
queryClient.invalidateQueries({ queryKey: HISTORY_KEY });
},
onError: () => { toast({ title: t.videoDeleteFailed, variant: "destructive" }); },
});
const videos = historyData?.videos ?? [];
return (
<div className="container mx-auto px-4 py-8 max-w-4xl">
{/* Header */}
<div className="mb-8">
<div className="flex items-center gap-3 mb-2">
<div className="w-9 h-9 rounded-lg bg-primary/20 border border-primary/30 flex items-center justify-center">
<Video className="w-5 h-5 text-primary" />
</div>
<div>
<h1 className="text-2xl font-bold tracking-tight">{t.videoTitle}</h1>
<p className="text-sm text-muted-foreground">{t.videoSubtitle}</p>
</div>
</div>
</div>
{/* Generator Card */}
<div className="bg-card/60 border border-border/50 rounded-xl p-5 mb-6 backdrop-blur-sm space-y-4">
{/* ── Model Picker ── */}
<div>
<p className="text-xs text-muted-foreground mb-2">{t.videoModel}</p>
<div className="flex flex-wrap gap-2">
{([
{
value: "grok-3" as VideoModel,
label: "Grok-3",
sub: t.videoModelGrokSub,
desc: t.videoModelGrokDesc,
color: "text-violet-400",
border: "border-violet-500/40",
bg: "bg-violet-500/10",
},
{
value: "veo-3-fast" as VideoModel,
label: "Veo 3.1 Fast",
sub: t.videoModelVeoSub,
desc: t.videoModelVeoDesc,
color: "text-sky-400",
border: "border-sky-500/40",
bg: "bg-sky-500/10",
},
]).map((m) => {
const active = selectedModel === m.value;
return (
<button
key={m.value}
onClick={() => !isGenerating && handleModelChange(m.value)}
disabled={isGenerating}
className={`flex flex-col items-start gap-0.5 px-3 py-2.5 rounded-lg border text-xs font-medium transition-all ${
active
? `${m.border} ${m.bg} ${m.color}`
: "border-border/50 hover:border-border text-muted-foreground hover:text-foreground"
}`}
>
<div className="flex items-center gap-1.5">
<Sparkles className={`w-3 h-3 ${active ? m.color : ""}`} />
<span className="font-semibold">{m.label}</span>
<span className={`text-[10px] opacity-70 font-normal ${active ? "" : ""}`}>{m.sub}</span>
</div>
<span className={`text-[10px] opacity-60 ml-5 ${active ? m.color : "text-muted-foreground"}`}>{m.desc}</span>
</button>
);
})}
</div>
{selectedModel === "veo-3-fast" && (
<p className="mt-2 text-[11px] text-sky-400/80 flex items-center gap-1">
<span></span> {t.videoVeoTimingNote}
</p>
)}
</div>
{referenceImage && (
<Badge className="text-xs gap-1 bg-violet-500/20 text-violet-300 border-violet-400/30">
<ImagePlus className="w-3 h-3" />{t.videoModeImage}
</Badge>
)}
<Textarea
value={prompt}
onChange={(e) => setPrompt(e.target.value)}
placeholder={t.videoPromptPlaceholder}
className="min-h-[90px] resize-none bg-background/50"
disabled={isGenerating}
/>
{/* ── Aspect Ratio ── */}
<div>
<p className="text-xs text-muted-foreground mb-1.5">{t.ratioLabel}</p>
<div className="flex flex-wrap gap-2">
{([
{ value: "16:9", label: "16:9", tw: "w-[48px] h-[27px]" },
{ value: "9:16", label: "9:16", tw: "w-[19px] h-[34px]" },
{ value: "1:1", label: "1:1", tw: "w-[30px] h-[30px]", veoLimited: true },
{ value: "4:3", label: "4:3", tw: "w-[38px] h-[28px]", veoLimited: true },
{ value: "3:4", label: "3:4", tw: "w-[28px] h-[37px]", veoLimited: true },
] as const).map(({ value, label, tw, veoLimited }) => {
const isLimited = selectedModel === "veo-3-fast" && veoLimited;
return (
<div key={value} className="relative group">
<button
onClick={() => !isLimited && setAspectRatio(value)}
disabled={isGenerating || isLimited}
className={`flex flex-col items-center gap-1 px-2.5 py-2 rounded-lg border text-[11px] font-medium transition-all ${
isLimited
? "border-border/20 text-muted-foreground/30 cursor-not-allowed opacity-40 line-through"
: aspectRatio === value
? "border-primary/60 bg-primary/10 text-primary"
: "border-border/50 hover:border-border text-muted-foreground hover:text-foreground"
}`}
>
<div className={`${tw} rounded-sm border-2 ${
isLimited ? "border-muted-foreground/20" :
aspectRatio === value ? "border-primary/70 bg-primary/20" : "border-muted-foreground/40"
}`} />
{label}
</button>
{isLimited && (
<div className="absolute bottom-full left-1/2 -translate-x-1/2 mb-1.5 hidden group-hover:block z-10 pointer-events-none">
<div className="bg-popover border border-border text-[10px] text-muted-foreground px-2 py-1 rounded-md whitespace-nowrap shadow-lg">
{t.videoVeoNotSupported}
</div>
</div>
)}
</div>
);
})}
</div>
</div>
{/* ── Resolution & Duration ── */}
<div className="flex flex-wrap gap-5">
{/* Resolution: Veo ignores resolution (server-side), Grok max 720p */}
{selectedModel === "grok-3" && (
<div>
<p className="text-xs text-muted-foreground mb-1.5">
{t.resolutionLabel}
<span className="ml-1.5 text-amber-400/80 text-[10px]">{t.videoGrokMaxRes}</span>
</p>
<div className="flex gap-1.5">
{([
{ value: "480p", label: "480p", sub: "SD", limited: false },
{ value: "720p", label: "720p", sub: "HD", limited: false },
{ value: "1080p", label: "1080p", sub: "FHD", limited: true },
] as const).map(({ value, label, sub, limited }) => (
<div key={value} className="relative group">
<button
onClick={() => !limited && setResolution(value)}
disabled={isGenerating || limited}
className={`px-3 py-1.5 rounded-lg border text-xs font-medium transition-all ${
limited
? "border-border/20 text-muted-foreground/30 cursor-not-allowed opacity-40 line-through"
: resolution === value
? "border-primary/60 bg-primary/10 text-primary"
: "border-border/50 hover:border-border text-muted-foreground hover:text-foreground"
}`}
>
{label} <span className="opacity-60 ml-0.5">{sub}</span>
</button>
{limited && (
<div className="absolute bottom-full left-1/2 -translate-x-1/2 mb-1.5 hidden group-hover:block z-10 pointer-events-none">
<div className="bg-popover border border-border text-[10px] text-muted-foreground px-2 py-1 rounded-md whitespace-nowrap shadow-lg">
{t.videoGrokNotSupported}
</div>
</div>
)}
</div>
))}
</div>
</div>
)}
{/* Duration */}
<div>
<p className="text-xs text-muted-foreground mb-1.5">
{t.videoDuration}
{selectedModel === "grok-3" && (
<span className="ml-1.5 text-amber-400/80 text-[10px]">{t.videoGrokMaxTime}</span>
)}
{selectedModel === "veo-3-fast" && (
<span className="ml-1.5 text-sky-400/80 text-[10px]">{t.videoVeoFixedTime}</span>
)}
</p>
<div className="flex gap-1.5">
{([5, 6, 8, 10] as const).map((d) => {
const isGrok3Limited = selectedModel === "grok-3" && d > 6;
const isVeoLocked = selectedModel === "veo-3-fast" && d !== 8;
const isLimited = isGrok3Limited || isVeoLocked;
const tooltipMsg = isGrok3Limited ? t.videoGrokMaxTime : isVeoLocked ? t.videoVeoFixedTime : "";
return (
<div key={d} className="relative group">
<button
onClick={() => !isLimited && setDuration(d)}
disabled={isGenerating || isLimited}
className={`px-3 py-1.5 rounded-lg border text-xs font-medium transition-all ${
isLimited
? "border-border/20 text-muted-foreground/30 cursor-not-allowed opacity-40 line-through"
: duration === d
? "border-primary/60 bg-primary/10 text-primary"
: "border-border/50 hover:border-border text-muted-foreground hover:text-foreground"
}`}
>
{d}s
</button>
{isLimited && tooltipMsg && (
<div className="absolute bottom-full left-1/2 -translate-x-1/2 mb-1.5 hidden group-hover:block z-10 pointer-events-none">
<div className="bg-popover border border-border text-[10px] text-muted-foreground px-2 py-1 rounded-md whitespace-nowrap shadow-lg">
{tooltipMsg}
</div>
</div>
)}
</div>
);
})}
</div>
</div>
</div>
{/* ── Advanced ── */}
<div>
<button
onClick={() => setShowAdvanced((v) => !v)}
className="flex items-center gap-1 text-xs text-muted-foreground hover:text-foreground transition-colors"
>
<span className={`transition-transform ${showAdvanced ? "rotate-90" : ""}`}></span>
{t.videoAdvanced}
</button>
{showAdvanced && (
<div className="mt-3 space-y-3 pl-3 border-l border-border/40">
<div>
<p className="text-xs text-muted-foreground mb-1">{t.videoNegativePrompt}</p>
<Textarea
value={negativePrompt}
onChange={(e) => setNegativePrompt(e.target.value)}
placeholder="blurry, distorted, low quality..."
className="min-h-[60px] resize-none bg-background/50 text-xs"
disabled={isGenerating}
/>
</div>
<label className="flex items-center gap-2 cursor-pointer">
<input
type="checkbox"
checked={enhancePrompt}
onChange={(e) => setEnhancePrompt(e.target.checked)}
disabled={isGenerating}
className="w-3.5 h-3.5 accent-primary"
/>
<span className="text-xs text-muted-foreground">{t.videoEnhancePrompt}</span>
</label>
</div>
)}
</div>
{/* Reference Image */}
<div>
<div className="flex items-center gap-2 mb-2">
<span className="text-sm font-medium">{t.videoRefImageLabel}</span>
<Badge variant="outline" className="text-[10px] px-1.5 py-0 h-4 border-violet-400/40 text-violet-400">
{t.videoRefImageBadge}
</Badge>
</div>
{referenceImage ? (
<div className="relative rounded-lg overflow-hidden border border-border/50 bg-background/30">
<img src={referenceImage.preview} alt="reference" className="w-full max-h-48 object-contain" />
<button
onClick={() => setReferenceImage(null)}
className="absolute top-2 right-2 p-1 rounded-full bg-black/60 hover:bg-black/80 text-white transition-colors"
>
<X className="w-3.5 h-3.5" />
</button>
</div>
) : (
<div
onDrop={handleDrop}
onDragOver={(e) => e.preventDefault()}
onClick={() => !isGenerating && fileInputRef.current?.click()}
className="border-2 border-dashed border-border/60 rounded-lg p-6 text-center cursor-pointer hover:border-primary/50 hover:bg-primary/5 transition-colors"
>
<Upload className="w-6 h-6 mx-auto mb-2 text-muted-foreground" />
<p className="text-xs text-muted-foreground">{t.videoRefImageDrop}</p>
<p className="text-[11px] text-muted-foreground/60 mt-0.5">{t.videoRefImageFormats}</p>
</div>
)}
<input ref={fileInputRef} type="file" accept="image/*" className="hidden"
onChange={(e) => { const f = e.target.files?.[0]; if (f) handleFileUpload(f); }}
/>
</div>
<div className="flex items-center justify-between gap-3 pt-1">
{isSignedIn && (
<button
onClick={() => setIsPrivate((v) => !v)}
className="flex items-center gap-1.5 px-3 py-1.5 rounded-md text-xs border border-border/60 text-muted-foreground hover:text-foreground transition-colors"
>
{isPrivate
? <><Lock className="w-3.5 h-3.5 text-yellow-400" />{t.videoPrivateLabel}</>
: <><Globe className="w-3.5 h-3.5" />{t.videoPublicLabel}</>
}
</button>
)}
<div className="flex-1" />
<Button
onClick={() => generate(prompt.trim(), isPrivate, { model: selectedModel, aspectRatio, resolution, duration, negativePrompt, enhancePrompt }, referenceImage?.base64, referenceImage?.mime)}
disabled={isGenerating || !prompt.trim()}
className="gap-2 min-w-[140px]"
>
{isGenerating
? <><Loader2 className="w-4 h-4 animate-spin" />{t.videoBtnGenerating}</>
: <><Play className="w-4 h-4" />{t.videoBtnGenerate}</>
}
</Button>
</div>
</div>
{/* Progress / Result / Error panels */}
<AnimatePresence mode="wait">
{isGenerating && (
<GeneratingPanel key="generating" state={genState} />
)}
{genState.phase === "error" && (
<motion.div
key="error"
initial={{ opacity: 0, y: 10 }}
animate={{ opacity: 1, y: 0 }}
exit={{ opacity: 0 }}
className="bg-destructive/10 border border-destructive/30 rounded-xl p-5 mb-6"
>
<div className="flex items-start gap-3">
<AlertCircle className="w-5 h-5 text-destructive shrink-0 mt-0.5" />
<div className="flex-1">
<p className="font-medium text-destructive text-sm">{t.videoGenFailed}</p>
{genState.errorMsg && (
<p className="text-xs text-muted-foreground mt-1">{genState.errorMsg}</p>
)}
</div>
<Button size="sm" variant="outline" onClick={reset} className="text-xs h-7 shrink-0">
{t.poolCancel}
</Button>
</div>
</motion.div>
)}
{genState.phase === "done" && genState.result && (
<motion.div
key="result"
initial={{ opacity: 0, scale: 0.97 }}
animate={{ opacity: 1, scale: 1 }}
exit={{ opacity: 0 }}
className="mb-6"
>
<div className="flex items-center gap-2 mb-2">
<CheckCircle2 className="w-4 h-4 text-green-400" />
<span className="text-sm font-medium text-green-400">{t.videoComplete}</span>
{genState.elapsedMs > 0 && (
<span className="text-xs text-muted-foreground ml-auto">{t.videoElapsed} {formatMs(genState.elapsedMs)}</span>
)}
</div>
<div className="bg-card/60 border border-primary/30 rounded-xl overflow-hidden">
<div className="relative aspect-video bg-black">
<video
src={genState.result.videoUrl.startsWith("/") ? `${BASE}${genState.result.videoUrl}` : genState.result.videoUrl}
poster={genState.result.thumbnailUrl
? (genState.result.thumbnailUrl.startsWith("/") ? `${BASE}${genState.result.thumbnailUrl}` : genState.result.thumbnailUrl)
: undefined}
className="w-full h-full object-contain"
controls
autoPlay
playsInline
/>
{genState.result.hasRefImage && (
<div className="absolute top-2 left-2">
<Badge className="text-[10px] bg-violet-500/80 text-white border-0">
<ImagePlus className="w-2.5 h-2.5 mr-1" />{t.videoModeImage}
</Badge>
</div>
)}
</div>
<div className="p-3 flex items-center justify-between gap-2">
<p className="text-xs text-muted-foreground line-clamp-1 flex-1">{genState.result.prompt}</p>
<Button
size="sm" variant="outline" className="gap-1.5 text-xs h-7 shrink-0"
onClick={() => { const a = document.createElement("a"); const url = genState.result!.videoUrl; a.href = url.startsWith("/") ? `${BASE}${url}` : url; a.download = `starforge-video-${genState.result!.id}.mp4`; a.click(); }}
>
<Download className="w-3.5 h-3.5" />{t.videoDownload}
</Button>
</div>
</div>
</motion.div>
)}
</AnimatePresence>
{/* History */}
<div>
<h2 className="text-base font-semibold mb-4 flex items-center gap-2">
<Video className="w-4 h-4 text-muted-foreground" />
{t.videoHistoryTitle}
</h2>
{historyLoading ? (
<div className="flex items-center justify-center py-12">
<Loader2 className="w-6 h-6 text-muted-foreground animate-spin" />
</div>
) : videos.length === 0 ? (
<div className="text-center py-12 text-muted-foreground">
<Video className="w-10 h-10 mx-auto mb-3 opacity-30" />
<p className="text-sm">{t.videoHistoryEmpty}</p>
</div>
) : (
<motion.div layout className="grid grid-cols-1 sm:grid-cols-2 gap-4">
<AnimatePresence mode="popLayout">
{videos.map((v) => (
<VideoCard key={v.id} vid={v} onDelete={removeVideo} />
))}
</AnimatePresence>
</motion.div>
)}
</div>
</div>
);
}