davda54 commited on
Commit
3b2a8c4
1 Parent(s): 335b7bc

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +11 -4
config.json CHANGED
@@ -1,19 +1,26 @@
1
  {
2
  "architectures": [
3
- "LTGBertForMaskedLM"
4
  ],
 
 
 
 
 
 
 
 
 
5
  "attention_probs_dropout_prob": 0.1,
6
  "hidden_dropout_prob": 0.1,
7
  "hidden_size": 768,
8
- "initializer_range": 0.02,
9
  "intermediate_size": 2048,
10
  "layer_norm_eps": 1e-07,
11
  "max_position_embeddings": 512,
12
  "num_attention_heads": 12,
13
  "num_hidden_layers": 12,
14
- "output_all_encoded_layers": true,
15
  "position_bucket_size": 32,
16
  "torch_dtype": "float32",
17
- "transformers_version": "4.26.0",
18
  "vocab_size": 16384
19
  }
 
1
  {
2
  "architectures": [
3
+ "LtgBertForMaskedLM"
4
  ],
5
+ "auto_map": {
6
+ "AutoConfig": "configuration_ltgbert.LtgBertConfig",
7
+ "AutoModel": "modeling_ltgbert.LtgBertModel",
8
+ "AutoModelForMaskedLM": "modeling_ltgbert.LtgBertForMaskedLM",
9
+ "AutoModelForSequenceClassification": "modeling_ltgbert.LtgBertForSequenceClassification",
10
+ "AutoModelForTokenClassification": "modeling_ltgbert.LtgBertForTokenClassification",
11
+ "AutoModelForQuestionAnswering": "modeling_ltgbert.LtgBertForQuestionAnswering",
12
+ "AutoModelForMultipleChoice": "modeling_ltgbert.LtgBertForMultipleChoice"
13
+ },
14
  "attention_probs_dropout_prob": 0.1,
15
  "hidden_dropout_prob": 0.1,
16
  "hidden_size": 768,
 
17
  "intermediate_size": 2048,
18
  "layer_norm_eps": 1e-07,
19
  "max_position_embeddings": 512,
20
  "num_attention_heads": 12,
21
  "num_hidden_layers": 12,
 
22
  "position_bucket_size": 32,
23
  "torch_dtype": "float32",
24
+ "transformers_version": "4.23.1",
25
  "vocab_size": 16384
26
  }