Spaces:
Runtime error
Runtime error
aa
Browse files
app.py
CHANGED
@@ -1,23 +1,34 @@
|
|
1 |
import gradio as gr
|
2 |
import requests
|
3 |
import os
|
|
|
|
|
|
|
4 |
|
5 |
-
#fn_index=1
|
6 |
-
# inputs:f
|
7 |
-
# |-textbox
|
8 |
-
# |-slider
|
9 |
-
# |-slider
|
10 |
-
# |-slider
|
11 |
-
# |-slider
|
12 |
-
# outputs:
|
13 |
-
# |-gallery
|
14 |
-
#API_URL = "https://api-inference.huggingface.co/models/bigscience/bloom"
|
15 |
HF_TOKEN = os.environ.get("diffuse_new") or True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
#HF_TOKEN = os.environ["HF_TOKEN"]
|
17 |
#headers = {"Authorization": f"Bearer {HF_TOKEN}"}
|
18 |
sd_inf = gr.Blocks.load(name="spaces/stabilityai/stable-diffusion", use_auth_token=HF_TOKEN )#'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')
|
19 |
|
20 |
-
def
|
21 |
print("******** Inside get_SD ********")
|
22 |
print(f"translated_txt is : {translated_txt}")
|
23 |
#sd_inf = gr.Blocks.load(name="spaces/stabilityai/stable-diffusion", use_auth_token='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')
|
|
|
1 |
import gradio as gr
|
2 |
import requests
|
3 |
import os
|
4 |
+
import torch as th
|
5 |
+
from torch import autocast
|
6 |
+
from diffusers import StableDiffusionPipeline
|
7 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
HF_TOKEN = os.environ.get("diffuse_new") or True
|
9 |
+
has_cuda = th.cuda.is_available()
|
10 |
+
device = th.device('cpu' if not th.cuda.is_available() else 'cuda')
|
11 |
+
print(f"device is :{device}")
|
12 |
+
|
13 |
+
# init stable diffusion model
|
14 |
+
pipe = StableDiffusionPipeline.from_pretrained(
|
15 |
+
"CompVis/stable-diffusion-v1-4",
|
16 |
+
use_auth_token= HF_TOKEN).to(device)
|
17 |
+
|
18 |
+
def get_sd(translated_txt):
|
19 |
+
scale=7.5
|
20 |
+
steps=45
|
21 |
+
with autocast('cpu' if not th.cuda.is_available() else 'cuda'):
|
22 |
+
image = pipe(translated_txt, guidance_scale=scale, num_inference_steps=steps)["sample"][0]
|
23 |
+
return image
|
24 |
+
|
25 |
+
#API_URL = "https://api-inference.huggingface.co/models/bigscience/bloom"
|
26 |
+
#HF_TOKEN = os.environ.get("diffuse_new") or True
|
27 |
#HF_TOKEN = os.environ["HF_TOKEN"]
|
28 |
#headers = {"Authorization": f"Bearer {HF_TOKEN}"}
|
29 |
sd_inf = gr.Blocks.load(name="spaces/stabilityai/stable-diffusion", use_auth_token=HF_TOKEN )#'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')
|
30 |
|
31 |
+
def get_sd_old(translated_txt):
|
32 |
print("******** Inside get_SD ********")
|
33 |
print(f"translated_txt is : {translated_txt}")
|
34 |
#sd_inf = gr.Blocks.load(name="spaces/stabilityai/stable-diffusion", use_auth_token='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')
|