RichardErkhov commited on
Commit
9c5ef69
·
verified ·
1 Parent(s): 99ee62c

uploaded model

Browse files
Files changed (1) hide show
  1. config.json +185 -0
config.json ADDED
@@ -0,0 +1,185 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "Phi-3-medium-128k-instruct",
3
+ "architectures": [
4
+ "Phi3ForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "auto_map": {
9
+ "AutoConfig": "configuration_phi3.Phi3Config",
10
+ "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
11
+ },
12
+ "bos_token_id": 1,
13
+ "embd_pdrop": 0.0,
14
+ "eos_token_id": 32000,
15
+ "hidden_act": "silu",
16
+ "hidden_size": 5120,
17
+ "initializer_range": 0.02,
18
+ "intermediate_size": 17920,
19
+ "max_position_embeddings": 131072,
20
+ "model_type": "phi3",
21
+ "num_attention_heads": 40,
22
+ "num_hidden_layers": 40,
23
+ "num_key_value_heads": 10,
24
+ "original_max_position_embeddings": 4096,
25
+ "pad_token_id": null,
26
+ "quantization_config": {
27
+ "_load_in_4bit": true,
28
+ "_load_in_8bit": false,
29
+ "bnb_4bit_compute_dtype": "float32",
30
+ "bnb_4bit_quant_storage": "uint8",
31
+ "bnb_4bit_quant_type": "fp4",
32
+ "bnb_4bit_use_double_quant": false,
33
+ "llm_int8_enable_fp32_cpu_offload": false,
34
+ "llm_int8_has_fp16_weight": false,
35
+ "llm_int8_skip_modules": null,
36
+ "llm_int8_threshold": 6.0,
37
+ "load_in_4bit": true,
38
+ "load_in_8bit": false,
39
+ "quant_method": "bitsandbytes"
40
+ },
41
+ "resid_pdrop": 0.0,
42
+ "rms_norm_eps": 1e-05,
43
+ "rope_scaling": {
44
+ "long_factor": [
45
+ 1.0,
46
+ 1.0,
47
+ 1.0,
48
+ 1.0,
49
+ 1.0,
50
+ 1.0,
51
+ 1.0,
52
+ 1.0,
53
+ 1.0,
54
+ 1.0,
55
+ 1.0,
56
+ 1.0,
57
+ 1.0,
58
+ 1.25,
59
+ 1.25,
60
+ 1.5,
61
+ 2.0,
62
+ 2.75,
63
+ 5.75,
64
+ 5.75,
65
+ 6.5,
66
+ 9.25,
67
+ 11.0,
68
+ 13.25,
69
+ 19.25,
70
+ 19.75,
71
+ 19.75,
72
+ 21.25,
73
+ 21.5,
74
+ 26.5,
75
+ 30.0,
76
+ 33.75,
77
+ 35.25,
78
+ 38.5,
79
+ 42.0,
80
+ 42.25,
81
+ 46.0,
82
+ 47.0,
83
+ 50.0,
84
+ 50.5,
85
+ 51.0,
86
+ 52.0,
87
+ 52.75,
88
+ 53.75,
89
+ 54.75,
90
+ 57.0,
91
+ 57.25,
92
+ 58.5,
93
+ 59.25,
94
+ 59.5,
95
+ 62.0,
96
+ 62.5,
97
+ 62.75,
98
+ 63.25,
99
+ 63.25,
100
+ 63.25,
101
+ 63.75,
102
+ 64.0,
103
+ 64.0,
104
+ 64.25,
105
+ 64.5,
106
+ 64.5,
107
+ 65.0,
108
+ 65.0
109
+ ],
110
+ "short_factor": [
111
+ 1.0,
112
+ 1.0,
113
+ 1.0,
114
+ 1.0,
115
+ 1.0,
116
+ 1.0,
117
+ 1.01,
118
+ 1.02,
119
+ 1.02,
120
+ 1.04,
121
+ 1.04,
122
+ 1.07,
123
+ 1.07,
124
+ 1.1,
125
+ 1.3000000000000003,
126
+ 1.3000000000000003,
127
+ 1.5000000000000004,
128
+ 1.5700000000000005,
129
+ 1.9000000000000008,
130
+ 2.3100000000000014,
131
+ 2.759999999999992,
132
+ 3.3899999999999784,
133
+ 3.9399999999999666,
134
+ 4.009999999999965,
135
+ 4.289999999999959,
136
+ 4.349999999999958,
137
+ 5.349999999999937,
138
+ 6.659999999999909,
139
+ 7.029999999999901,
140
+ 7.51999999999989,
141
+ 8.00999999999988,
142
+ 8.249999999999876,
143
+ 8.279999999999875,
144
+ 9.629999999999846,
145
+ 9.89999999999984,
146
+ 10.589999999999826,
147
+ 11.049999999999816,
148
+ 11.7899999999998,
149
+ 12.189999999999792,
150
+ 12.889999999999777,
151
+ 13.129999999999772,
152
+ 13.16999999999977,
153
+ 13.20999999999977,
154
+ 13.479999999999764,
155
+ 13.539999999999763,
156
+ 13.779999999999758,
157
+ 13.929999999999755,
158
+ 14.429999999999744,
159
+ 14.759999999999737,
160
+ 15.149999999999729,
161
+ 15.419999999999723,
162
+ 15.53999999999972,
163
+ 15.659999999999718,
164
+ 15.749999999999716,
165
+ 15.759999999999716,
166
+ 15.799999999999715,
167
+ 16.05999999999971,
168
+ 16.079999999999714,
169
+ 16.11999999999972,
170
+ 16.11999999999972,
171
+ 16.18999999999973,
172
+ 16.31999999999975,
173
+ 16.539999999999786,
174
+ 16.799999999999827
175
+ ],
176
+ "type": "longrope"
177
+ },
178
+ "rope_theta": 10000.0,
179
+ "sliding_window": 131072,
180
+ "tie_word_embeddings": false,
181
+ "torch_dtype": "float16",
182
+ "transformers_version": "4.44.2",
183
+ "use_cache": true,
184
+ "vocab_size": 32064
185
+ }