Fouzi Takelait commited on
Commit
a35448d
1 Parent(s): 2f1a594

Update transformer_mt_roberta/modeling_transformer_final.py

Browse files
transformer_mt_roberta/modeling_transformer_final.py CHANGED
@@ -91,7 +91,7 @@ class TransfomerEncoderDecoderModel(nn.Module):
91
 
92
  self.dropout = nn.Dropout(self.dropout_rate)
93
 
94
- self.encoder = AutoModelForMaskedLM.from_pretrained("flax-community/roberta_base_danish", output_hidden_states=True)
95
 
96
  self.decoder_layers = nn.ModuleList([TransformerDecoderLayer(hidden = self.hidden,
97
  num_heads = self.num_heads,
 
91
 
92
  self.dropout = nn.Dropout(self.dropout_rate)
93
 
94
+ self.encoder = AutoModelForMaskedLM.from_pretrained("flax-community/roberta-base-danish", output_hidden_states=True)
95
 
96
  self.decoder_layers = nn.ModuleList([TransformerDecoderLayer(hidden = self.hidden,
97
  num_heads = self.num_heads,