muverqqw commited on
Commit
030a406
·
verified ·
1 Parent(s): 19d10dc

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +6 -6
config.json CHANGED
@@ -1,15 +1,15 @@
1
  {
2
- "architectures": [
3
- "AlinlightForCausalLM"
4
- ],
5
  "attention_dropout": 0.0,
 
 
 
 
6
  "bos_token_id": 1,
7
- "dtype": "bfloat16",
8
  "eos_token_id": 2,
9
  "hidden_size": 2048,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 5632,
12
- "max_position_embeddings": 2048,
13
  "model_type": "alinlight",
14
  "num_attention_heads": 32,
15
  "num_hidden_layers": 22,
@@ -20,6 +20,6 @@
20
  "rope_theta": 10000.0,
21
  "sliding_window": 4096,
22
  "transformers_version": "4.57.3",
23
- "use_cache": false,
24
  "vocab_size": 128000
25
  }
 
1
  {
 
 
 
2
  "attention_dropout": 0.0,
3
+ "auto_map": {
4
+ "AutoConfig": "configuration_alinlight.AlinlightConfig",
5
+ "AutoModelForCausalLM": "modeling_alinlight.AlinlightForCausalLM"
6
+ },
7
  "bos_token_id": 1,
 
8
  "eos_token_id": 2,
9
  "hidden_size": 2048,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 5632,
12
+ "max_position_embeddings": 4096,
13
  "model_type": "alinlight",
14
  "num_attention_heads": 32,
15
  "num_hidden_layers": 22,
 
20
  "rope_theta": 10000.0,
21
  "sliding_window": 4096,
22
  "transformers_version": "4.57.3",
23
+ "use_cache": true,
24
  "vocab_size": 128000
25
  }