David Day commited on
Commit
071f463
·
unverified ·
1 Parent(s): 281d004

Fix vision_tower ZeroGPU compatibility.

Browse files
Files changed (2) hide show
  1. model_builder.py +2 -2
  2. model_worker.py +1 -1
model_builder.py CHANGED
@@ -140,9 +140,9 @@ def load_pretrained_model(model_path, model_base, model_name, load_8bit=False, l
140
  if not vision_tower.is_loaded:
141
  vision_tower.load_model()
142
  if load_bf16:
143
- vision_tower.to(device='cuda', dtype=torch.bfloat16)
144
  else:
145
- vision_tower.to(device='cuda', dtype=torch.float16)
146
  image_processor = vision_tower.image_processor
147
 
148
  if hasattr(model.config, "max_sequence_length"):
 
140
  if not vision_tower.is_loaded:
141
  vision_tower.load_model()
142
  if load_bf16:
143
+ vision_tower.to(device='cpu', dtype=torch.bfloat16)
144
  else:
145
+ vision_tower.to(device='cpu', dtype=torch.float16)
146
  image_processor = vision_tower.image_processor
147
 
148
  if hasattr(model.config, "max_sequence_length"):
model_worker.py CHANGED
@@ -52,11 +52,11 @@ class ModelWorker:
52
  torch_device='cpu',
53
  device_map="cpu",
54
  )
55
- self.model.to('cuda')
56
 
57
  @spaces.GPU
58
  def generate_stream(self, params):
59
  tokenizer, model, image_processor = self.tokenizer, self.model, self.image_processor
 
60
  logger.info(f'Model devices: {model.device}')
61
 
62
  prompt = params["prompt"]
 
52
  torch_device='cpu',
53
  device_map="cpu",
54
  )
 
55
 
56
  @spaces.GPU
57
  def generate_stream(self, params):
58
  tokenizer, model, image_processor = self.tokenizer, self.model, self.image_processor
59
+ model.to('cuda')
60
  logger.info(f'Model devices: {model.device}')
61
 
62
  prompt = params["prompt"]