emeersman commited on
Commit
fc7a6bc
1 Parent(s): 36c182c

Add random seeding if specified and train pipeline on base model

Browse files
Files changed (1) hide show
  1. handler.py +8 -3
handler.py CHANGED
@@ -8,10 +8,12 @@ device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
8
  if device.type != 'cuda':
9
  raise ValueError("need to run on GPU")
10
 
 
 
11
  class EndpointHandler():
12
- def __init__(self, path=""):
13
  # load the optimized model
14
- self.pipe = StableDiffusionPipeline.from_pretrained(path, torch_dtype=torch.float16)
15
  self.pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(self.pipe.scheduler.config)
16
  self.pipe = self.pipe.to(device)
17
 
@@ -34,7 +36,10 @@ class EndpointHandler():
34
  width = params.pop("width", None)
35
  manual_seed = params.pop("manual_seed", -1)
36
 
37
- generator = torch.Generator(device).manual_seed(manual_seed)
 
 
 
38
 
39
  # run inference pipeline
40
  out = self.pipe(prompt,
 
8
  if device.type != 'cuda':
9
  raise ValueError("need to run on GPU")
10
 
11
+ model_id = "stabilityai/stable-diffusion-2-1-base"
12
+
13
  class EndpointHandler():
14
+ def __init__(self):
15
  # load the optimized model
16
+ self.pipe = StableDiffusionPipeline.from_pretrained(model_id, torch_dtype=torch.float16)
17
  self.pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(self.pipe.scheduler.config)
18
  self.pipe = self.pipe.to(device)
19
 
 
36
  width = params.pop("width", None)
37
  manual_seed = params.pop("manual_seed", -1)
38
 
39
+ generator = torch.Generator(device)
40
+
41
+ if (manual_seed != -1)
42
+ generator.manual_seed(manual_seed)
43
 
44
  # run inference pipeline
45
  out = self.pipe(prompt,