manan commited on
Commit
17d0614
1 Parent(s): 4aa121f

fix large model config

Browse files
Files changed (1) hide show
  1. model.py +2 -9
model.py CHANGED
@@ -157,14 +157,7 @@ class NBMEModel(nn.Module):
157
 
158
  self.path = path
159
  self.num_labels = num_labels
160
- self.config = transformers.AutoConfig.from_pretrained(config['model_checkpoint'])
161
-
162
- self.config.update(
163
- {
164
- "layer_norm_eps": layer_norm_eps,
165
- }
166
- )
167
- self.transformer = transformers.AutoModel.from_pretrained(config['model_checkpoint'], config=self.config)
168
  self.dropout = nn.Dropout(0.1)
169
 
170
  self.dropout1 = nn.Dropout(0.1)
@@ -173,7 +166,7 @@ class NBMEModel(nn.Module):
173
  self.dropout4 = nn.Dropout(0.4)
174
  self.dropout5 = nn.Dropout(0.5)
175
 
176
- self.output = nn.Linear(self.config.hidden_size, 1)
177
 
178
  if self.path is not None:
179
  self.load_state_dict(torch.load(self.path)['model'])
 
157
 
158
  self.path = path
159
  self.num_labels = num_labels
160
+ self.transformer = transformers.AutoModel.from_pretrained(config['model_checkpoint'])
 
 
 
 
 
 
 
161
  self.dropout = nn.Dropout(0.1)
162
 
163
  self.dropout1 = nn.Dropout(0.1)
 
166
  self.dropout4 = nn.Dropout(0.4)
167
  self.dropout5 = nn.Dropout(0.5)
168
 
169
+ self.output = nn.Linear(1024, 1)
170
 
171
  if self.path is not None:
172
  self.load_state_dict(torch.load(self.path)['model'])