from fastapi import FastAPI, Request, staticfiles from fastapi.responses import HTMLResponse import os import openai app = FastAPI() app.mount("/static", staticfiles.StaticFiles(directory="static"), name="static") @app.get("/", response_class=HTMLResponse) async def root(request: Request): with open(os.path.join("static", "main.html")) as f: return HTMLResponse(f.read()) @app.post("/chat") async def chat(request: Request): data = await request.json() message = data["message"] openai.api_key = str(os.getenv("OPENAI_API_KEY")) model_engine = "gpt-3.5-turbo" response = openai.Completion.create( engine=model_engine, prompt=message, max_tokens=3000, n=1, temperature=1, frequency_penalty=0, presence_penalty=0 ) return response.choices[0]