Update inference_manager.py
Browse files- inference_manager.py +1 -1
inference_manager.py
CHANGED
@@ -524,7 +524,7 @@ class ModelManager:
|
|
524 |
prompt_str = cfg.get("prompt", "{prompt}").replace("{prompt}", p)
|
525 |
#generator = torch.Generator(model.base_model_pipeline.device).manual_seed(seed)
|
526 |
print(f"generate: p={p}, np={negative_prompt}, steps={steps}, guidance_scale={guidance_scale}, size={width},{height}, seed={seed}")
|
527 |
-
print(f"device: embedding={average_embedding.device}, ip_model={ip_model.pipe}, pipe={model.base_model_pipeline.device}")
|
528 |
images = ip_model.generate(
|
529 |
prompt=prompt_str,
|
530 |
negative_prompt=negative_prompt,
|
|
|
524 |
prompt_str = cfg.get("prompt", "{prompt}").replace("{prompt}", p)
|
525 |
#generator = torch.Generator(model.base_model_pipeline.device).manual_seed(seed)
|
526 |
print(f"generate: p={p}, np={negative_prompt}, steps={steps}, guidance_scale={guidance_scale}, size={width},{height}, seed={seed}")
|
527 |
+
print(f"device: embedding={average_embedding.device}, ip_model={ip_model.pipe.device}, pipe={model.base_model_pipeline.device}")
|
528 |
images = ip_model.generate(
|
529 |
prompt=prompt_str,
|
530 |
negative_prompt=negative_prompt,
|