manan commited on
Commit
495ffda
1 Parent(s): 17d0614

large model config fix

Browse files
Files changed (1) hide show
  1. model.py +2 -2
model.py CHANGED
@@ -18,7 +18,7 @@ config = dict(
18
 
19
  # model info
20
  tokenizer_path = 'roberta-large', # 'allenai/biomed_roberta_base',
21
- model_checkpoint = 'model_large_pseudo_label.pth', # 'allenai/biomed_roberta_base',
22
  device = 'cuda' if torch.cuda.is_available() else 'cpu',
23
 
24
  # training paramters
@@ -334,7 +334,7 @@ def get_predictions(feature_text, pn_history):
334
  return pred_string
335
 
336
  tokenizer = AutoTokenizer.from_pretrained(config['tokenizer_path'])
337
- path = 'model.pth'
338
 
339
  model = NBMEModel().to(config['device'])
340
  model.load_state_dict(
 
18
 
19
  # model info
20
  tokenizer_path = 'roberta-large', # 'allenai/biomed_roberta_base',
21
+ model_checkpoint = 'roberta-large', # 'allenai/biomed_roberta_base',
22
  device = 'cuda' if torch.cuda.is_available() else 'cpu',
23
 
24
  # training paramters
 
334
  return pred_string
335
 
336
  tokenizer = AutoTokenizer.from_pretrained(config['tokenizer_path'])
337
+ path = 'model_large_pseudo_label.pth'
338
 
339
  model = NBMEModel().to(config['device'])
340
  model.load_state_dict(