hysts HF staff commited on
Commit
bffd77a
1 Parent(s): 5548b8a
Files changed (1) hide show
  1. model.py +4 -4
model.py CHANGED
@@ -24,7 +24,7 @@ sys.path.insert(0, 'HairCLIP/mapper/')
24
  from mapper.datasets.latents_dataset_inference import LatentsDatasetInference
25
  from mapper.hairclip_mapper import HairCLIPMapper
26
 
27
- TOKEN = os.environ['TOKEN']
28
 
29
 
30
  class Model:
@@ -40,13 +40,13 @@ class Model:
40
  path = huggingface_hub.hf_hub_download(
41
  'hysts/dlib_face_landmark_model',
42
  'shape_predictor_68_face_landmarks.dat',
43
- use_auth_token=TOKEN)
44
  return dlib.shape_predictor(path)
45
 
46
  def _load_e4e(self) -> nn.Module:
47
  ckpt_path = huggingface_hub.hf_hub_download('hysts/e4e',
48
  'e4e_ffhq_encode.pt',
49
- use_auth_token=TOKEN)
50
  ckpt = torch.load(ckpt_path, map_location='cpu')
51
  opts = ckpt['opts']
52
  opts['device'] = self.device.type
@@ -60,7 +60,7 @@ class Model:
60
  def _load_hairclip(self) -> nn.Module:
61
  ckpt_path = huggingface_hub.hf_hub_download('hysts/HairCLIP',
62
  'hairclip.pt',
63
- use_auth_token=TOKEN)
64
  ckpt = torch.load(ckpt_path, map_location='cpu')
65
  opts = ckpt['opts']
66
  opts['device'] = self.device.type
 
24
  from mapper.datasets.latents_dataset_inference import LatentsDatasetInference
25
  from mapper.hairclip_mapper import HairCLIPMapper
26
 
27
+ HF_TOKEN = os.environ['HF_TOKEN']
28
 
29
 
30
  class Model:
 
40
  path = huggingface_hub.hf_hub_download(
41
  'hysts/dlib_face_landmark_model',
42
  'shape_predictor_68_face_landmarks.dat',
43
+ use_auth_token=HF_TOKEN)
44
  return dlib.shape_predictor(path)
45
 
46
  def _load_e4e(self) -> nn.Module:
47
  ckpt_path = huggingface_hub.hf_hub_download('hysts/e4e',
48
  'e4e_ffhq_encode.pt',
49
+ use_auth_token=HF_TOKEN)
50
  ckpt = torch.load(ckpt_path, map_location='cpu')
51
  opts = ckpt['opts']
52
  opts['device'] = self.device.type
 
60
  def _load_hairclip(self) -> nn.Module:
61
  ckpt_path = huggingface_hub.hf_hub_download('hysts/HairCLIP',
62
  'hairclip.pt',
63
+ use_auth_token=HF_TOKEN)
64
  ckpt = torch.load(ckpt_path, map_location='cpu')
65
  opts = ckpt['opts']
66
  opts['device'] = self.device.type