nightfury commited on
Commit
89412d2
1 Parent(s): 3417b69

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -63,20 +63,20 @@ model_id_or_path = "CompVis/stable-diffusion-v1-4"
63
  pipe = StableDiffusionInpaintingPipeline.from_pretrained(
64
  model_id_or_path,
65
  revision="fp16",
66
- torch_dtype=torch.double, #float16
67
  use_auth_token=auth_token
68
  ).to(device)
69
  #pipe = pipe.to(device)
70
  #self.register_buffer('n_', ...)
71
  print ("torch.backends.mps.is_available: ", torch.backends.mps.is_available())
72
 
73
- model = CLIPDensePredT(version='ViT-B/16', reduce_dim=64, complex_trans_conv=True)
74
 
75
  model = model.to(torch.device(device))
76
  model.eval() #.half()
77
 
78
 
79
- weightsPATH = './clipseg/weights/rd64-uni.pth'
80
 
81
  #state = {'model': model.state_dict()}
82
  #torch.save(state, weightsPATH)
 
63
  pipe = StableDiffusionInpaintingPipeline.from_pretrained(
64
  model_id_or_path,
65
  revision="fp16",
66
+ torch_dtype=torch.float16
67
  use_auth_token=auth_token
68
  ).to(device)
69
  #pipe = pipe.to(device)
70
  #self.register_buffer('n_', ...)
71
  print ("torch.backends.mps.is_available: ", torch.backends.mps.is_available())
72
 
73
+ model = CLIPDensePredT(version='ViT-B/16', reduce_dim=16, complex_trans_conv=True)
74
 
75
  model = model.to(torch.device(device))
76
  model.eval() #.half()
77
 
78
 
79
+ weightsPATH = './clipseg/weights/rd16-uni.pth'
80
 
81
  #state = {'model': model.state_dict()}
82
  #torch.save(state, weightsPATH)