zanekwok's picture
init
450060f
raw
history blame contribute delete
351 Bytes
"use server"
import { LLMEngine } from "@/types"
import { predict as predictWithHuggingFace } from "./predictWithHuggingFace"
import { predict as predictWithOpenAI } from "./predictWithOpenAI"
const llmEngine = `${process.env.LLM_ENGINE || ""}` as LLMEngine
export const predict = llmEngine === "OPENAI" ? predictWithOpenAI : predictWithHuggingFace