Texttra commited on
Commit
bcf5a44
·
verified ·
1 Parent(s): 17550d2

Update handler.py

Browse files
Files changed (1) hide show
  1. handler.py +1 -20
handler.py CHANGED
@@ -14,11 +14,9 @@ class EndpointHandler:
14
  use_auth_token=True # Required for gated base model
15
  )
16
 
17
- # ✅ Load LoRA from Hugging Face Hub (or local if path is set)
18
  print("Loading LoRA weights from: Texttra/Cityscape_Studio")
19
  self.pipe.load_lora_weights("Texttra/Cityscape_Studio", weight_name="c1t3_v1.safetensors")
20
 
21
- # Move to GPU if available
22
  if torch.cuda.is_available():
23
  self.pipe.to("cuda")
24
  else:
@@ -40,21 +38,4 @@ class EndpointHandler:
40
  print("Extracted prompt:", prompt)
41
 
42
  if not prompt:
43
- return {"error": "No prompt provided."}
44
-
45
- # ✅ FLUX requires both prompt_embeds and pooled_prompt_embeds
46
- conditioning, pooled = self.compel(prompt, return_pooled=True)
47
- print("Conditioning complete.")
48
-
49
- image = self.pipe(
50
- prompt_embeds=conditioning,
51
- pooled_prompt_embeds=pooled
52
- ).images[0]
53
- print("Image generated.")
54
-
55
- buffer = BytesIO()
56
- image.save(buffer, format="PNG")
57
- base64_image = base64.b64encode(buffer.getvalue()).decode("utf-8")
58
- print("Returning image.")
59
-
60
- return {"image": base64_image}
 
14
  use_auth_token=True # Required for gated base model
15
  )
16
 
 
17
  print("Loading LoRA weights from: Texttra/Cityscape_Studio")
18
  self.pipe.load_lora_weights("Texttra/Cityscape_Studio", weight_name="c1t3_v1.safetensors")
19
 
 
20
  if torch.cuda.is_available():
21
  self.pipe.to("cuda")
22
  else:
 
38
  print("Extracted prompt:", prompt)
39
 
40
  if not prompt:
41
+ return {"error": "No prompt provided