kumarijy commited on
Commit
4bd6f4d
1 Parent(s): f571b25

Upload config.json

Browse files

updating config.json file to remove auto_map

Files changed (1) hide show
  1. config.json +1 -6
config.json CHANGED
@@ -4,10 +4,6 @@
4
  "PhiForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
- "auto_map": {
8
- "AutoConfig": "microsoft/phi-2--configuration_phi.PhiConfig",
9
- "AutoModelForCausalLM": "microsoft/phi-2--modeling_phi.PhiForCausalLM"
10
- },
11
  "bos_token_id": 50256,
12
  "embd_pdrop": 0.0,
13
  "eos_token_id": 50256,
@@ -15,7 +11,6 @@
15
  "hidden_size": 2560,
16
  "initializer_range": 0.02,
17
  "intermediate_size": 10240,
18
- "is_decoder": true,
19
  "layer_norm_eps": 1e-05,
20
  "max_position_embeddings": 2048,
21
  "model_type": "phi",
@@ -29,7 +24,7 @@
29
  "rope_theta": 10000.0,
30
  "tie_word_embeddings": false,
31
  "torch_dtype": "float16",
32
- "transformers_version": "4.39.1",
33
  "use_cache": true,
34
  "vocab_size": 51200
35
  }
 
4
  "PhiForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
 
 
 
 
7
  "bos_token_id": 50256,
8
  "embd_pdrop": 0.0,
9
  "eos_token_id": 50256,
 
11
  "hidden_size": 2560,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 10240,
 
14
  "layer_norm_eps": 1e-05,
15
  "max_position_embeddings": 2048,
16
  "model_type": "phi",
 
24
  "rope_theta": 10000.0,
25
  "tie_word_embeddings": false,
26
  "torch_dtype": "float16",
27
+ "transformers_version": "4.37.0",
28
  "use_cache": true,
29
  "vocab_size": 51200
30
  }