zideliu commited on
Commit
028631b
1 Parent(s): 15fe398

Update clip

Browse files
Files changed (1) hide show
  1. app.py +1 -4
app.py CHANGED
@@ -86,10 +86,7 @@ config = get_config()
86
  device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
87
  print(device)
88
  # Load open_clip and vq model
89
- print("GPU memory:",torch.cuda.memory_allocated(0)/1024/1024/1024,"GB")
90
- prompt_model,_,_ = open_clip.create_model_and_transforms('ViT-bigG-14', 'laion2b_s39b_b160k',precision='fp16',device=device)
91
- print("GPU memory:",torch.cuda.memory_allocated(0)/1024/1024/1024,"GB")
92
-
93
  prompt_model = prompt_model.to(device)
94
  prompt_model.eval()
95
  tokenizer = open_clip.get_tokenizer('ViT-bigG-14')
 
86
  device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
87
  print(device)
88
  # Load open_clip and vq model
89
+ prompt_model,_,_ = open_clip.create_model_and_transforms('ViT-bigG-14', 'laion2b_s39b_b160k',device='cpu')
 
 
 
90
  prompt_model = prompt_model.to(device)
91
  prompt_model.eval()
92
  tokenizer = open_clip.get_tokenizer('ViT-bigG-14')