Spaces:
Running
on
Zero
Running
on
Zero
AlekseyCalvin
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -55,12 +55,12 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
55 |
|
56 |
model_id = ("zer0int/LongCLIP-GmP-ViT-L-14")
|
57 |
config = CLIPConfig.from_pretrained(model_id)
|
58 |
-
config.text_config.max_position_embeddings =
|
59 |
clip_model = CLIPModel.from_pretrained(model_id, torch_dtype=torch.bfloat16, config=config, ignore_mismatched_sizes=True)
|
60 |
-
clip_processor = CLIPProcessor.from_pretrained(model_id, padding="max_length", max_length=
|
61 |
pipe.tokenizer = clip_processor.tokenizer
|
62 |
pipe.text_encoder = clip_model.text_model
|
63 |
-
pipe.tokenizer_max_length =
|
64 |
pipe.text_encoder.dtype = torch.bfloat16
|
65 |
|
66 |
#clipmodel = 'norm'
|
|
|
55 |
|
56 |
model_id = ("zer0int/LongCLIP-GmP-ViT-L-14")
|
57 |
config = CLIPConfig.from_pretrained(model_id)
|
58 |
+
config.text_config.max_position_embeddings = 77
|
59 |
clip_model = CLIPModel.from_pretrained(model_id, torch_dtype=torch.bfloat16, config=config, ignore_mismatched_sizes=True)
|
60 |
+
clip_processor = CLIPProcessor.from_pretrained(model_id, padding="max_length", max_length=77)
|
61 |
pipe.tokenizer = clip_processor.tokenizer
|
62 |
pipe.text_encoder = clip_model.text_model
|
63 |
+
pipe.tokenizer_max_length = 77
|
64 |
pipe.text_encoder.dtype = torch.bfloat16
|
65 |
|
66 |
#clipmodel = 'norm'
|