Update inference.py
Browse files- inference.py +9 -5
inference.py
CHANGED
@@ -14,10 +14,10 @@ app = FastAPI()
|
|
14 |
# Set up CORS
|
15 |
app.add_middleware(
|
16 |
CORSMiddleware,
|
17 |
-
allow_origins=["*"],
|
18 |
allow_credentials=True,
|
19 |
-
allow_methods=["*"],
|
20 |
-
allow_headers=["*"],
|
21 |
)
|
22 |
|
23 |
request_queue = queue.Queue()
|
@@ -49,7 +49,9 @@ def generate(
|
|
49 |
num_inference_steps: int = Body(default=30),
|
50 |
seed: int = Body(default=42),
|
51 |
guidance_scale: float = Body(default=1.0),
|
52 |
-
negative_prompt: str = Body(default=None)
|
|
|
|
|
53 |
):
|
54 |
if not prompt:
|
55 |
raise HTTPException(status_code=400, detail="No prompt provided")
|
@@ -60,7 +62,9 @@ def generate(
|
|
60 |
'num_inference_steps': num_inference_steps,
|
61 |
'seed': seed,
|
62 |
'guidance_scale': guidance_scale,
|
63 |
-
'negative_prompt': negative_prompt
|
|
|
|
|
64 |
}))
|
65 |
|
66 |
return {'request_id': request_id}
|
|
|
14 |
# Set up CORS
|
15 |
app.add_middleware(
|
16 |
CORSMiddleware,
|
17 |
+
allow_origins=["*"],
|
18 |
allow_credentials=True,
|
19 |
+
allow_methods=["*"],
|
20 |
+
allow_headers=["*"],
|
21 |
)
|
22 |
|
23 |
request_queue = queue.Queue()
|
|
|
49 |
num_inference_steps: int = Body(default=30),
|
50 |
seed: int = Body(default=42),
|
51 |
guidance_scale: float = Body(default=1.0),
|
52 |
+
negative_prompt: str = Body(default=None),
|
53 |
+
scheduler_type: str = Body(default="LCM"),
|
54 |
+
use_karras_sigmas: bool = Body(default=False)
|
55 |
):
|
56 |
if not prompt:
|
57 |
raise HTTPException(status_code=400, detail="No prompt provided")
|
|
|
62 |
'num_inference_steps': num_inference_steps,
|
63 |
'seed': seed,
|
64 |
'guidance_scale': guidance_scale,
|
65 |
+
'negative_prompt': negative_prompt,
|
66 |
+
'scheduler_type': scheduler_type,
|
67 |
+
'use_karras_sigmas': use_karras_sigmas
|
68 |
}))
|
69 |
|
70 |
return {'request_id': request_id}
|