Jeronymous commited on
Commit
1d10e82
1 Parent(s): 8318e86

Remove wrong "auto_map" from config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -9
config.json CHANGED
@@ -5,14 +5,6 @@
5
  "FalconForCausalLM"
6
  ],
7
  "attention_dropout": 0.0,
8
- "auto_map": {
9
- "AutoConfig": "configuration_falcon.FalconConfig",
10
- "AutoModel": "modeling_falcon.FalconModel",
11
- "AutoModelForSequenceClassification": "modeling_falcon.FalconForSequenceClassification",
12
- "AutoModelForTokenClassification": "modeling_falcon.FalconForTokenClassification",
13
- "AutoModelForQuestionAnswering": "modeling_falcon.FalconForQuestionAnswering",
14
- "AutoModelForCausalLM": "modeling_falcon.FalconForCausalLM"
15
- },
16
  "bias": false,
17
  "bos_token_id": 11,
18
  "eos_token_id": 11,
@@ -30,4 +22,4 @@
30
  "transformers_version": "4.27.4",
31
  "use_cache": true,
32
  "vocab_size": 65024
33
- }
 
5
  "FalconForCausalLM"
6
  ],
7
  "attention_dropout": 0.0,
 
 
 
 
 
 
 
 
8
  "bias": false,
9
  "bos_token_id": 11,
10
  "eos_token_id": 11,
 
22
  "transformers_version": "4.27.4",
23
  "use_cache": true,
24
  "vocab_size": 65024
25
+ }