cocktailpeanut commited on
Commit
1f688c5
1 Parent(s): f41f95a
Files changed (2) hide show
  1. app.py +13 -4
  2. requirements.txt +3 -3
app.py CHANGED
@@ -3,12 +3,21 @@ import numpy as np
3
  import random
4
  import spaces
5
  import torch
 
6
  from diffusers import DiffusionPipeline
7
 
8
- dtype = torch.bfloat16
9
- device = "cuda" if torch.cuda.is_available() else "cpu"
 
 
 
 
 
 
 
10
 
11
- pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", torch_dtype=torch.bfloat16, revision="refs/pr/1").to(device)
 
12
 
13
  MAX_SEED = np.iinfo(np.int32).max
14
  MAX_IMAGE_SIZE = 2048
@@ -119,4 +128,4 @@ with gr.Blocks(css=css) as demo:
119
  outputs = [result, seed]
120
  )
121
 
122
- demo.launch()
 
3
  import random
4
  import spaces
5
  import torch
6
+ import devicetorch
7
  from diffusers import DiffusionPipeline
8
 
9
+ device = devicetorch.get(torch)
10
+ if device == "cuda":
11
+ dtype = torch.bloat16
12
+ elif device == "mps":
13
+ dtype = torch.bloat16
14
+ else:
15
+ dtype = torch.bloat32
16
+ #dtype = torch.bfloat16
17
+ #device = "cuda" if torch.cuda.is_available() else "cpu"
18
 
19
+ #pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", torch_dtype=torch.bfloat16, revision="refs/pr/1").to(device)
20
+ pipe = DiffusionPipeline.from_pretrained("cocktailpeanut/xulf-dev", torch_dtype=dtype, revision="refs/pr/1").to(device)
21
 
22
  MAX_SEED = np.iinfo(np.int32).max
23
  MAX_IMAGE_SIZE = 2048
 
128
  outputs = [result, seed]
129
  )
130
 
131
+ demo.launch()
requirements.txt CHANGED
@@ -1,7 +1,7 @@
1
  accelerate
2
  git+https://github.com/huggingface/diffusers.git@flux-pipeline
3
  invisible_watermark
4
- torch
5
  transformers==4.42.4
6
- xformers
7
- sentencepiece
 
1
  accelerate
2
  git+https://github.com/huggingface/diffusers.git@flux-pipeline
3
  invisible_watermark
4
+ #torch
5
  transformers==4.42.4
6
+ #xformers
7
+ sentencepiece