File size: 2,540 Bytes
a40c12e
 
 
 
 
 
 
 
 
 
 
d38e23d
a40c12e
 
 
 
 
6e54472
a40c12e
6e54472
 
a40c12e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6e54472
d38e23d
a40c12e
 
 
 
 
 
 
 
 
 
6e54472
d38e23d
a40c12e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
from fastapi import FastAPI, HTTPException, Body
from fastapi.middleware.cors import CORSMiddleware
import uvicorn
from pyngrok import ngrok
import os
from image_generator import generate_image
import base64
from io import BytesIO
import threading
import queue


app = FastAPI()

# Set up CORS
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

request_queue = queue.Queue()
result_queue = {}

def process_request():
    while True:
        request_id, data = request_queue.get()
        try:
            image = generate_image(
                data['prompt'],
                data['num_inference_steps'],
                data['seed'],
                data['guidance_scale'],
                negative_prompt=data.get('negative_prompt')
            )
            buffered = BytesIO()
            image.save(buffered, format="JPEG")
            img_str = base64.b64encode(buffered.getvalue()).decode()
            result_queue[request_id] = img_str
        except Exception as e:
            result_queue[request_id] = str(e)
        finally:
            request_queue.task_done()

@app.post('/generate')
def generate(
    prompt: str = Body(...), 
    num_inference_steps: int = Body(default=30), 
    seed: int = Body(default=42), 
    guidance_scale: float = Body(default=1.0), 
    negative_prompt: str = Body(default=None),
    scheduler_type: str = Body(default="LCM")
):
    if not prompt:
        raise HTTPException(status_code=400, detail="No prompt provided")

    request_id = os.urandom(8).hex()
    request_queue.put((request_id, {
        'prompt': prompt, 
        'num_inference_steps': num_inference_steps, 
        'seed': seed, 
        'guidance_scale': guidance_scale, 
        'negative_prompt': negative_prompt,
        'scheduler_type': scheduler_type
    }))

    return {'request_id': request_id}

@app.get('/result/{request_id}')
def get_result(request_id: str):
    if request_id in result_queue:
        result = result_queue.pop(request_id)
        return {'image': result}
    return {'status': 'processing'}, 202

@app.get('/get')
def get_status():
    return {'status': 'Server is running'}

def start_ngrok():
    ngrok_tunnel = ngrok.connect(8000)
    print(' * Ngrok Tunnel URL:', ngrok_tunnel.public_url)

if __name__ == '__main__':
    threading.Thread(target=process_request, daemon=True).start()
    threading.Thread(target=start_ngrok, daemon=True).start()
    uvicorn.run(app, host="0.0.0.0", port=8000)