AlekseyCalvin commited on
Commit
3857673
1 Parent(s): a71c7f5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -4
app.py CHANGED
@@ -47,16 +47,13 @@ clipmodel = 'norm'
47
  if clipmodel == "long":
48
  model_id = "zer0int/LongCLIP-GmP-ViT-L-14"
49
  config = CLIPConfig.from_pretrained(model_id)
50
- maxtokens = 77
51
  if clipmodel == "norm":
52
  model_id = "zer0int/CLIP-GmP-ViT-L-14"
53
  config = CLIPConfig.from_pretrained(model_id)
54
- maxtokens = 512
55
  clip_model = CLIPModel.from_pretrained(model_id, torch_dtype=torch.bfloat16, config=config, ignore_mismatched_sizes=False).to("cuda")
56
- clip_processor = CLIPProcessor.from_pretrained(model_id, padding="max_length", max_length=maxtokens, ignore_mismatched_sizes=False, return_tensors="pt", truncation=True)
57
  pipe.tokenizer = clip_processor.tokenizer
58
  pipe.text_encoder = clip_model.text_model
59
- pipe.tokenizer_max_length = maxtokens
60
  pipe.text_encoder.dtype = torch.bfloat16
61
  torch.cuda.empty_cache()
62
 
 
47
  if clipmodel == "long":
48
  model_id = "zer0int/LongCLIP-GmP-ViT-L-14"
49
  config = CLIPConfig.from_pretrained(model_id)
 
50
  if clipmodel == "norm":
51
  model_id = "zer0int/CLIP-GmP-ViT-L-14"
52
  config = CLIPConfig.from_pretrained(model_id)
 
53
  clip_model = CLIPModel.from_pretrained(model_id, torch_dtype=torch.bfloat16, config=config, ignore_mismatched_sizes=False).to("cuda")
54
+ clip_processor = CLIPProcessor.from_pretrained(model_id, padding="max_length", ignore_mismatched_sizes=False, return_tensors="pt", truncation=True)
55
  pipe.tokenizer = clip_processor.tokenizer
56
  pipe.text_encoder = clip_model.text_model
 
57
  pipe.text_encoder.dtype = torch.bfloat16
58
  torch.cuda.empty_cache()
59