Littlehongman commited on
Commit
04d88b1
·
1 Parent(s): 5c765ff

fix: wrong path

Browse files
Files changed (2) hide show
  1. Dockerfile +0 -2
  2. model.py +1 -5
Dockerfile CHANGED
@@ -15,8 +15,6 @@ ENV HOME /home/user
15
  ENV PATH $HOME/.local/bin:$PATH
16
 
17
  WORKDIR $HOME
18
- RUN git lfs pull -I artifacts/model-ql03493w:v3/model.ckpt
19
-
20
  RUN mkdir app
21
  WORKDIR $HOME/app
22
  COPY . $HOME/app
 
15
  ENV PATH $HOME/.local/bin:$PATH
16
 
17
  WORKDIR $HOME
 
 
18
  RUN mkdir app
19
  WORKDIR $HOME/app
20
  COPY . $HOME/app
model.py CHANGED
@@ -122,7 +122,7 @@ def load_model():
122
 
123
  # # Load fine-tuned model from wandb
124
  artifact_dir = "./artifacts/model-ql03493w:v3"
125
- PATH = f"{artifact_dir[2:]}/model.ckpt"
126
 
127
  # Load pretrained GPT, CLIP model from OpenAI
128
  clip_model, image_transfrom = load_clip_model()
@@ -131,10 +131,6 @@ def load_model():
131
 
132
  # Load weights
133
  print(PATH)
134
-
135
- for file_name in os.listdir(os.getcwd()):
136
- print(file_name)
137
-
138
  model = ImageCaptioner(clip_model, gpt_model, tokenizer, 0)
139
  checkpoint = torch.load(PATH, map_location=torch.device('cpu'))
140
  model.load_state_dict(checkpoint["state_dict"])
 
122
 
123
  # # Load fine-tuned model from wandb
124
  artifact_dir = "./artifacts/model-ql03493w:v3"
125
+ PATH = f"{os.getcwd()}/{artifact_dir[2:]}/model.ckpt"
126
 
127
  # Load pretrained GPT, CLIP model from OpenAI
128
  clip_model, image_transfrom = load_clip_model()
 
131
 
132
  # Load weights
133
  print(PATH)
 
 
 
 
134
  model = ImageCaptioner(clip_model, gpt_model, tokenizer, 0)
135
  checkpoint = torch.load(PATH, map_location=torch.device('cpu'))
136
  model.load_state_dict(checkpoint["state_dict"])