import os import io import torch from fastapi import FastAPI, HTTPException from fastapi.responses import StreamingResponse from diffusers import FluxPipeline # type: ignore from huggingface_hub import login app = FastAPI() # Récupération du token et authentification token = os.getenv("HF_TOKEN") pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16) pipe.enable_model_cpu_offload() def generate_image(prompt: str): image = pipe( prompt, height=1024, width=1024, guidance_scale=3.5, num_inference_steps=50, max_sequence_length=512, generator=torch.Generator("cpu").manual_seed(0) ).images[0] return image @app.get("/generate") def generate(prompt: str): try: image = generate_image(prompt) # On sauvegarde l'image dans un buffer en mémoire buf = io.BytesIO() image.save(buf, format="PNG") buf.seek(0) return StreamingResponse(buf, media_type="image/png") except Exception as e: raise HTTPException(status_code=500, detail=str(e)) if __name__ == "__main__": import uvicorn # type: ignore uvicorn.run(app, host="0.0.0.0", port=7860)