Adapter commited on
Commit
b8fb5b9
1 Parent(s): 8a4a8d3
Files changed (1) hide show
  1. demo/model.py +2 -2
demo/model.py CHANGED
@@ -177,8 +177,8 @@ class Model_all:
177
  # style part
178
  self.model_style = StyleAdapter(width=1024, context_dim=768, num_head=8, n_layes=3, num_token=8).to(device)
179
  self.model_style.load_state_dict(torch.load("models/t2iadapter_style_sd14v1.pth", map_location=device))
180
- self.clip_processor = CLIPProcessor.from_pretrained('models/clip/8d052a0f05efbaefbc9e8786ba291cfdf93e5bff')
181
- self.clip_vision_model = CLIPVisionModel.from_pretrained('models/clip/8d052a0f05efbaefbc9e8786ba291cfdf93e5bff').to(device)
182
 
183
  device = 'cpu'
184
  ## mmpose
 
177
  # style part
178
  self.model_style = StyleAdapter(width=1024, context_dim=768, num_head=8, n_layes=3, num_token=8).to(device)
179
  self.model_style.load_state_dict(torch.load("models/t2iadapter_style_sd14v1.pth", map_location=device))
180
+ self.clip_processor = CLIPProcessor.from_pretrained('openai/clip-vit-large-patch14')
181
+ self.clip_vision_model = CLIPVisionModel.from_pretrained('openai/clip-vit-large-patch14').to(device)
182
 
183
  device = 'cpu'
184
  ## mmpose