eliphatfs commited on
Commit
e99e78e
1 Parent(s): cbdb77e

Change offload logic.

Browse files
Files changed (1) hide show
  1. app.py +5 -4
app.py CHANGED
@@ -32,11 +32,15 @@ def load_openshape(name, to_cpu=False):
32
  @st.cache_resource
33
  def load_openclip():
34
  sys.clip_move_lock = threading.Lock()
35
- return transformers.CLIPModel.from_pretrained(
36
  "laion/CLIP-ViT-bigG-14-laion2B-39B-b160k",
37
  low_cpu_mem_usage=True, torch_dtype=half,
38
  offload_state_dict=True
39
  ), transformers.CLIPProcessor.from_pretrained("laion/CLIP-ViT-bigG-14-laion2B-39B-b160k")
 
 
 
 
40
 
41
 
42
  f32 = numpy.float32
@@ -291,9 +295,6 @@ def demo_retrieval():
291
 
292
 
293
  try:
294
- if torch.cuda.is_available():
295
- with sys.clip_move_lock:
296
- clip_model.cuda()
297
  with tab_cls:
298
  demo_classification()
299
  with tab_cap:
 
32
  @st.cache_resource
33
  def load_openclip():
34
  sys.clip_move_lock = threading.Lock()
35
+ clip_model, clip_prep = transformers.CLIPModel.from_pretrained(
36
  "laion/CLIP-ViT-bigG-14-laion2B-39B-b160k",
37
  low_cpu_mem_usage=True, torch_dtype=half,
38
  offload_state_dict=True
39
  ), transformers.CLIPProcessor.from_pretrained("laion/CLIP-ViT-bigG-14-laion2B-39B-b160k")
40
+ if torch.cuda.is_available():
41
+ with sys.clip_move_lock:
42
+ clip_model.cuda()
43
+ return clip_model, clip_prep
44
 
45
 
46
  f32 = numpy.float32
 
295
 
296
 
297
  try:
 
 
 
298
  with tab_cls:
299
  demo_classification()
300
  with tab_cap: