File size: 696 Bytes
27b3217
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
import torch
import open_clip
from transformers import CLIPProcessor, CLIPModel

fclip_model = None
fclip_processor = None
siglip_model = None
siglip_tokenizer = None

device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
    
fclip_model = CLIPModel.from_pretrained("patrickjohncyh/fashion-clip")
fclip_processor = CLIPProcessor.from_pretrained("patrickjohncyh/fashion-clip")

siglip_model, siglip_preprocess_train, siglip_preprocess_val = open_clip.create_model_and_transforms('hf-hub:Marqo/marqo-fashionSigLIP')
siglip_tokenizer = open_clip.get_tokenizer('hf-hub:Marqo/marqo-fashionSigLIP')

if torch.cuda.is_available():
    fclip_model.to(device)
    siglip_model.to(device)