kiddothe2b commited on
Commit
36a3409
1 Parent(s): 194ba35

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +12 -1
config.json CHANGED
@@ -1,7 +1,18 @@
1
  {
 
2
  "architectures": [
3
  "HiTransformerForMaskedLM"
4
  ],
 
 
 
 
 
 
 
 
 
 
5
  "attention_probs_dropout_prob": 0.1,
6
  "bos_token_id": 0,
7
  "classifier_dropout": null,
@@ -79,7 +90,7 @@
79
  "max_sentence_size": 128,
80
  "max_sentences": 32,
81
  "model_max_length": 4096,
82
- "model_type": "hi-transformer",
83
  "num_attention_heads": 12,
84
  "num_hidden_layers": 15,
85
  "output_past": true,
 
1
  {
2
+ "_name_or_path": "kiddothe2b/adhoc-hierarchical-transformer-base-4096",
3
  "architectures": [
4
  "HiTransformerForMaskedLM"
5
  ],
6
+ "auto_map": {
7
+ "AutoConfig": "configuration_hat.HATConfig",
8
+ "AutoTokenizer": "tokenization_hat.HATTokenizer",
9
+ "AutoModel": "modelling_hat.HATModel",
10
+ "AutoModelForMaskedLM": "modelling_hat.HATForMaskedLM",
11
+ "AutoModelForMultipleChoice": "modelling_hat.HATForMultipleChoice",
12
+ "AutoModelForQuestionAnswering": "modelling_hat.HATForQuestionAnswering",
13
+ "AutoModelForSequenceClassification": "modelling_hat.HATForSequenceClassification",
14
+ "AutoModelForTokenClassification": "modelling_hat.HATForTokenClassification"
15
+ },
16
  "attention_probs_dropout_prob": 0.1,
17
  "bos_token_id": 0,
18
  "classifier_dropout": null,
 
90
  "max_sentence_size": 128,
91
  "max_sentences": 32,
92
  "model_max_length": 4096,
93
+ "model_type": "hierarchical-transformer",
94
  "num_attention_heads": 12,
95
  "num_hidden_layers": 15,
96
  "output_past": true,