Xenova HF staff commited on
Commit
45ab00e
·
verified ·
1 Parent(s): c9498b2

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +8 -5
config.json CHANGED
@@ -5,11 +5,6 @@
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
- "auto_map": {
9
- "AutoConfig": "microsoft/Phi-4-mini-instruct--configuration_phi3.Phi3Config",
10
- "AutoModelForCausalLM": "microsoft/Phi-4-mini-instruct--modeling_phi3.Phi3ForCausalLM",
11
- "AutoTokenizer": "microsoft/Phi-4-mini-instruct--Xenova/gpt-4o"
12
- },
13
  "bos_token_id": 199999,
14
  "embd_pdrop": 0.0,
15
  "eos_token_id": 199999,
@@ -139,6 +134,14 @@
139
  "tie_word_embeddings": true,
140
  "torch_dtype": "bfloat16",
141
  "transformers_version": "4.50.0.dev0",
 
 
 
 
 
 
 
 
142
  "use_cache": true,
143
  "vocab_size": 200064
144
  }
 
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
 
 
 
 
 
8
  "bos_token_id": 199999,
9
  "embd_pdrop": 0.0,
10
  "eos_token_id": 199999,
 
134
  "tie_word_embeddings": true,
135
  "torch_dtype": "bfloat16",
136
  "transformers_version": "4.50.0.dev0",
137
+ "transformers.js_config": {
138
+ "dtype": "q4f16",
139
+ "kv_cache_dtype": {
140
+ "q4f16": "float16",
141
+ "fp16": "float16"
142
+ },
143
+ "use_external_data_format": true
144
+ },
145
  "use_cache": true,
146
  "vocab_size": 200064
147
  }