apolinario commited on
Commit
56049a8
1 Parent(s): 697ef9f

Make it slimmer

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -23,7 +23,7 @@ import torchvision.transforms.functional as TF
23
  import clip
24
  import unicodedata
25
  import re
26
- from tqdm.notebook import tqdm
27
  from torchvision.transforms import Compose, Resize, ToTensor, Normalize
28
  from IPython.display import display
29
  from einops import rearrange
@@ -116,7 +116,7 @@ def embed_url(url):
116
 
117
  class CLIP(object):
118
  def __init__(self):
119
- clip_model = "ViT-B/16"
120
  self.model, _ = clip.load(clip_model)
121
  self.model = self.model.requires_grad_(False)
122
  self.normalize = transforms.Normalize(mean=[0.48145466, 0.4578275, 0.40821073],
@@ -203,8 +203,8 @@ def run(prompt,steps,model):
203
  ]
204
  )
205
 
206
- initial_batch = 4 # actually that will be multiplied by initial_image_steps
207
- initial_image_steps = 32
208
 
209
  def get_image(timestring):
210
  os.makedirs(f"samples/{timestring}", exist_ok=True)
 
23
  import clip
24
  import unicodedata
25
  import re
26
+ from tqdm import tqdm
27
  from torchvision.transforms import Compose, Resize, ToTensor, Normalize
28
  from IPython.display import display
29
  from einops import rearrange
 
116
 
117
  class CLIP(object):
118
  def __init__(self):
119
+ clip_model = "ViT-B/32"
120
  self.model, _ = clip.load(clip_model)
121
  self.model = self.model.requires_grad_(False)
122
  self.normalize = transforms.Normalize(mean=[0.48145466, 0.4578275, 0.40821073],
 
203
  ]
204
  )
205
 
206
+ initial_batch = 2 # actually that will be multiplied by initial_image_steps
207
+ initial_image_steps = 16
208
 
209
  def get_image(timestring):
210
  os.makedirs(f"samples/{timestring}", exist_ok=True)