Spaces:
Running
Running
File size: 1,684 Bytes
9edc2f1 0125000 9edc2f1 a45ae1a 08606b0 5bf1b3e 9edc2f1 5bf1b3e 9edc2f1 eaff201 9edc2f1 5bf1b3e 9edc2f1 745d3f1 9edc2f1 a45ae1a 745d3f1 a45ae1a 9edc2f1 745d3f1 9edc2f1 b1f8565 745d3f1 a45ae1a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from duckai import DuckAI
from g4f.client import Client
app = FastAPI()
# Configure CORS
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"],
allow_headers=["*"],
)
class ChatQuery(BaseModel):
query: str
@app.get("/chat/")
async def chat(query: str):
if not query:
raise HTTPException(status_code=400, detail="Query parameter is required")
client = Client()
duck = DuckAI() # Initialize DuckAI here
try:
results = client.chat.completions.create(
model="gpt-4o-mini",
messages=[{"role": "user", "content": query}],
web_search=False
)
return {"results": results}
except Exception as e1:
print(f"Primary model (gpt-4o-mini) failed: {e1}")
try:
results = duck.chat(query, model='claude-3-haiku')
return {"results": results}
except Exception as e2:
print(f"Fallback model (claude-3-haiku) also failed: {e2}")
raise HTTPException(
status_code=500,
detail={
"error": "Both models failed",
"primary_error": str(e1),
"fallback_error": str(e2)
}
)
@app.get("/health")
@app.get("/")
async def health_check():
return {"status": "healthy"}
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=7860, log_level="info", reload=True) |