Yardenfren commited on
Commit
c5f20f7
·
verified ·
1 Parent(s): 75b4efd

Update inf.py

Browse files
Files changed (1) hide show
  1. inf.py +3 -1
inf.py CHANGED
@@ -20,7 +20,7 @@ class InferencePipeline:
20
  self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
21
  if self.device.type == 'cpu':
22
  self.pipe = StableDiffusionXLPipeline.from_pretrained(
23
- self.base_model_id, use_auth_token=self.hf_token, cache_dir='./cache')
24
  else:
25
  self.pipe = StableDiffusionXLPipeline.from_pretrained(
26
  self.base_model_id,
@@ -111,6 +111,8 @@ class InferencePipeline:
111
  if not torch.cuda.is_available():
112
  raise gr.Error('CUDA is not available.')
113
 
 
 
114
  self.load_pipe(content_lora_model_id, style_lora_model_id, content_alpha, style_alpha)
115
 
116
  generator = torch.Generator().manual_seed(seed)
 
20
  self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
21
  if self.device.type == 'cpu':
22
  self.pipe = StableDiffusionXLPipeline.from_pretrained(
23
+ self.base_model_id, use_auth_token=self.hf_token)
24
  else:
25
  self.pipe = StableDiffusionXLPipeline.from_pretrained(
26
  self.base_model_id,
 
111
  if not torch.cuda.is_available():
112
  raise gr.Error('CUDA is not available.')
113
 
114
+ print(f"running with {self.device}, {self.pipe.device}")
115
+
116
  self.load_pipe(content_lora_model_id, style_lora_model_id, content_alpha, style_alpha)
117
 
118
  generator = torch.Generator().manual_seed(seed)