mtzig commited on
Commit
76d1548
1 Parent(s): 746e076

Model save

Browse files
README.md ADDED
@@ -0,0 +1,212 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: peiyi9979/math-shepherd-mistral-7b-prm
3
+ library_name: peft
4
+ metrics:
5
+ - accuracy
6
+ - precision
7
+ - recall
8
+ - f1
9
+ tags:
10
+ - generated_from_trainer
11
+ model-index:
12
+ - name: v1_mistral_lora_real
13
+ results: []
14
+ ---
15
+
16
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
17
+ should probably proofread and complete it, then remove this comment. -->
18
+
19
+ # v1_mistral_lora_real
20
+
21
+ This model is a fine-tuned version of [peiyi9979/math-shepherd-mistral-7b-prm](https://huggingface.co/peiyi9979/math-shepherd-mistral-7b-prm) on an unknown dataset.
22
+ It achieves the following results on the evaluation set:
23
+ - Loss: 0.0002
24
+ - Accuracy: 1.0
25
+ - Precision: 1.0
26
+ - Recall: 1.0
27
+ - F1: 1.0
28
+
29
+ ## Model description
30
+
31
+ More information needed
32
+
33
+ ## Intended uses & limitations
34
+
35
+ More information needed
36
+
37
+ ## Training and evaluation data
38
+
39
+ More information needed
40
+
41
+ ## Training procedure
42
+
43
+ ### Training hyperparameters
44
+
45
+ The following hyperparameters were used during training:
46
+ - learning_rate: 2e-05
47
+ - train_batch_size: 8
48
+ - eval_batch_size: 8
49
+ - seed: 42
50
+ - distributed_type: multi-GPU
51
+ - num_devices: 4
52
+ - gradient_accumulation_steps: 2
53
+ - total_train_batch_size: 64
54
+ - total_eval_batch_size: 32
55
+ - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
56
+ - lr_scheduler_type: cosine
57
+ - lr_scheduler_warmup_ratio: 0.1
58
+ - num_epochs: 1
59
+
60
+ ### Training results
61
+
62
+ | Training Loss | Epoch | Step | Validation Loss | Accuracy | Precision | Recall | F1 |
63
+ |:-------------:|:------:|:----:|:---------------:|:--------:|:---------:|:------:|:------:|
64
+ | 0.7125 | 0.0071 | 10 | 0.5957 | 0.6805 | 0.5472 | 0.6915 | 0.6110 |
65
+ | 0.7473 | 0.0143 | 20 | 0.5921 | 0.6931 | 0.5622 | 0.6965 | 0.6222 |
66
+ | 0.6843 | 0.0214 | 30 | 0.5800 | 0.7094 | 0.5855 | 0.6816 | 0.6299 |
67
+ | 0.7083 | 0.0285 | 40 | 0.5597 | 0.7401 | 0.6432 | 0.6368 | 0.64 |
68
+ | 0.6862 | 0.0357 | 50 | 0.5293 | 0.7780 | 0.7216 | 0.6318 | 0.6737 |
69
+ | 0.626 | 0.0428 | 60 | 0.4788 | 0.8267 | 0.8107 | 0.6816 | 0.7405 |
70
+ | 0.4406 | 0.0499 | 70 | 0.4027 | 0.8917 | 0.8653 | 0.8308 | 0.8477 |
71
+ | 0.46 | 0.0571 | 80 | 0.2929 | 0.9386 | 0.9154 | 0.9154 | 0.9154 |
72
+ | 0.3254 | 0.0642 | 90 | 0.1629 | 0.9819 | 0.9848 | 0.9652 | 0.9749 |
73
+ | 0.2359 | 0.0714 | 100 | 0.0554 | 0.9982 | 0.9950 | 1.0 | 0.9975 |
74
+ | 0.263 | 0.0785 | 110 | 0.0200 | 1.0 | 1.0 | 1.0 | 1.0 |
75
+ | 0.228 | 0.0856 | 120 | 0.0094 | 1.0 | 1.0 | 1.0 | 1.0 |
76
+ | 0.2553 | 0.0928 | 130 | 0.0114 | 1.0 | 1.0 | 1.0 | 1.0 |
77
+ | 0.1633 | 0.0999 | 140 | 0.0083 | 1.0 | 1.0 | 1.0 | 1.0 |
78
+ | 0.145 | 0.1070 | 150 | 0.0087 | 1.0 | 1.0 | 1.0 | 1.0 |
79
+ | 0.1409 | 0.1142 | 160 | 0.0041 | 1.0 | 1.0 | 1.0 | 1.0 |
80
+ | 0.1955 | 0.1213 | 170 | 0.0042 | 1.0 | 1.0 | 1.0 | 1.0 |
81
+ | 0.1628 | 0.1284 | 180 | 0.0036 | 1.0 | 1.0 | 1.0 | 1.0 |
82
+ | 0.1454 | 0.1356 | 190 | 0.0019 | 1.0 | 1.0 | 1.0 | 1.0 |
83
+ | 0.1311 | 0.1427 | 200 | 0.0044 | 1.0 | 1.0 | 1.0 | 1.0 |
84
+ | 0.1937 | 0.1498 | 210 | 0.0035 | 1.0 | 1.0 | 1.0 | 1.0 |
85
+ | 0.1059 | 0.1570 | 220 | 0.0020 | 1.0 | 1.0 | 1.0 | 1.0 |
86
+ | 0.1352 | 0.1641 | 230 | 0.0023 | 1.0 | 1.0 | 1.0 | 1.0 |
87
+ | 0.1491 | 0.1712 | 240 | 0.0019 | 1.0 | 1.0 | 1.0 | 1.0 |
88
+ | 0.1245 | 0.1784 | 250 | 0.0012 | 1.0 | 1.0 | 1.0 | 1.0 |
89
+ | 0.1354 | 0.1855 | 260 | 0.0012 | 1.0 | 1.0 | 1.0 | 1.0 |
90
+ | 0.1177 | 0.1927 | 270 | 0.0012 | 1.0 | 1.0 | 1.0 | 1.0 |
91
+ | 0.1424 | 0.1998 | 280 | 0.0008 | 1.0 | 1.0 | 1.0 | 1.0 |
92
+ | 0.1343 | 0.2069 | 290 | 0.0008 | 1.0 | 1.0 | 1.0 | 1.0 |
93
+ | 0.1567 | 0.2141 | 300 | 0.0010 | 1.0 | 1.0 | 1.0 | 1.0 |
94
+ | 0.1094 | 0.2212 | 310 | 0.0009 | 1.0 | 1.0 | 1.0 | 1.0 |
95
+ | 0.1537 | 0.2283 | 320 | 0.0006 | 1.0 | 1.0 | 1.0 | 1.0 |
96
+ | 0.1344 | 0.2355 | 330 | 0.0006 | 1.0 | 1.0 | 1.0 | 1.0 |
97
+ | 0.1286 | 0.2426 | 340 | 0.0006 | 1.0 | 1.0 | 1.0 | 1.0 |
98
+ | 0.142 | 0.2497 | 350 | 0.0006 | 1.0 | 1.0 | 1.0 | 1.0 |
99
+ | 0.1177 | 0.2569 | 360 | 0.0009 | 1.0 | 1.0 | 1.0 | 1.0 |
100
+ | 0.1383 | 0.2640 | 370 | 0.0009 | 1.0 | 1.0 | 1.0 | 1.0 |
101
+ | 0.1647 | 0.2711 | 380 | 0.0004 | 1.0 | 1.0 | 1.0 | 1.0 |
102
+ | 0.0803 | 0.2783 | 390 | 0.0005 | 1.0 | 1.0 | 1.0 | 1.0 |
103
+ | 0.1476 | 0.2854 | 400 | 0.0004 | 1.0 | 1.0 | 1.0 | 1.0 |
104
+ | 0.1003 | 0.2925 | 410 | 0.0005 | 1.0 | 1.0 | 1.0 | 1.0 |
105
+ | 0.1122 | 0.2997 | 420 | 0.0004 | 1.0 | 1.0 | 1.0 | 1.0 |
106
+ | 0.1867 | 0.3068 | 430 | 0.0003 | 1.0 | 1.0 | 1.0 | 1.0 |
107
+ | 0.1216 | 0.3139 | 440 | 0.0005 | 1.0 | 1.0 | 1.0 | 1.0 |
108
+ | 0.1288 | 0.3211 | 450 | 0.0006 | 1.0 | 1.0 | 1.0 | 1.0 |
109
+ | 0.1243 | 0.3282 | 460 | 0.0005 | 1.0 | 1.0 | 1.0 | 1.0 |
110
+ | 0.1127 | 0.3354 | 470 | 0.0003 | 1.0 | 1.0 | 1.0 | 1.0 |
111
+ | 0.0775 | 0.3425 | 480 | 0.0003 | 1.0 | 1.0 | 1.0 | 1.0 |
112
+ | 0.1246 | 0.3496 | 490 | 0.0004 | 1.0 | 1.0 | 1.0 | 1.0 |
113
+ | 0.0864 | 0.3568 | 500 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
114
+ | 0.1241 | 0.3639 | 510 | 0.0004 | 1.0 | 1.0 | 1.0 | 1.0 |
115
+ | 0.109 | 0.3710 | 520 | 0.0003 | 1.0 | 1.0 | 1.0 | 1.0 |
116
+ | 0.1117 | 0.3782 | 530 | 0.0004 | 1.0 | 1.0 | 1.0 | 1.0 |
117
+ | 0.1137 | 0.3853 | 540 | 0.0003 | 1.0 | 1.0 | 1.0 | 1.0 |
118
+ | 0.1193 | 0.3924 | 550 | 0.0006 | 1.0 | 1.0 | 1.0 | 1.0 |
119
+ | 0.1209 | 0.3996 | 560 | 0.0007 | 1.0 | 1.0 | 1.0 | 1.0 |
120
+ | 0.0934 | 0.4067 | 570 | 0.0007 | 1.0 | 1.0 | 1.0 | 1.0 |
121
+ | 0.1276 | 0.4138 | 580 | 0.0005 | 1.0 | 1.0 | 1.0 | 1.0 |
122
+ | 0.0851 | 0.4210 | 590 | 0.0004 | 1.0 | 1.0 | 1.0 | 1.0 |
123
+ | 0.1056 | 0.4281 | 600 | 0.0005 | 1.0 | 1.0 | 1.0 | 1.0 |
124
+ | 0.0951 | 0.4352 | 610 | 0.0004 | 1.0 | 1.0 | 1.0 | 1.0 |
125
+ | 0.1308 | 0.4424 | 620 | 0.0004 | 1.0 | 1.0 | 1.0 | 1.0 |
126
+ | 0.0814 | 0.4495 | 630 | 0.0004 | 1.0 | 1.0 | 1.0 | 1.0 |
127
+ | 0.0696 | 0.4567 | 640 | 0.0004 | 1.0 | 1.0 | 1.0 | 1.0 |
128
+ | 0.0721 | 0.4638 | 650 | 0.0004 | 1.0 | 1.0 | 1.0 | 1.0 |
129
+ | 0.0962 | 0.4709 | 660 | 0.0003 | 1.0 | 1.0 | 1.0 | 1.0 |
130
+ | 0.0829 | 0.4781 | 670 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
131
+ | 0.1158 | 0.4852 | 680 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
132
+ | 0.0949 | 0.4923 | 690 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
133
+ | 0.1287 | 0.4995 | 700 | 0.0003 | 1.0 | 1.0 | 1.0 | 1.0 |
134
+ | 0.0834 | 0.5066 | 710 | 0.0003 | 1.0 | 1.0 | 1.0 | 1.0 |
135
+ | 0.099 | 0.5137 | 720 | 0.0003 | 1.0 | 1.0 | 1.0 | 1.0 |
136
+ | 0.12 | 0.5209 | 730 | 0.0004 | 1.0 | 1.0 | 1.0 | 1.0 |
137
+ | 0.0571 | 0.5280 | 740 | 0.0003 | 1.0 | 1.0 | 1.0 | 1.0 |
138
+ | 0.1133 | 0.5351 | 750 | 0.0004 | 1.0 | 1.0 | 1.0 | 1.0 |
139
+ | 0.1178 | 0.5423 | 760 | 0.0003 | 1.0 | 1.0 | 1.0 | 1.0 |
140
+ | 0.0866 | 0.5494 | 770 | 0.0004 | 1.0 | 1.0 | 1.0 | 1.0 |
141
+ | 0.0964 | 0.5565 | 780 | 0.0003 | 1.0 | 1.0 | 1.0 | 1.0 |
142
+ | 0.1165 | 0.5637 | 790 | 0.0004 | 1.0 | 1.0 | 1.0 | 1.0 |
143
+ | 0.1174 | 0.5708 | 800 | 0.0003 | 1.0 | 1.0 | 1.0 | 1.0 |
144
+ | 0.1468 | 0.5780 | 810 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
145
+ | 0.1128 | 0.5851 | 820 | 0.0004 | 1.0 | 1.0 | 1.0 | 1.0 |
146
+ | 0.1446 | 0.5922 | 830 | 0.0003 | 1.0 | 1.0 | 1.0 | 1.0 |
147
+ | 0.0961 | 0.5994 | 840 | 0.0003 | 1.0 | 1.0 | 1.0 | 1.0 |
148
+ | 0.0736 | 0.6065 | 850 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
149
+ | 0.0847 | 0.6136 | 860 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
150
+ | 0.139 | 0.6208 | 870 | 0.0003 | 1.0 | 1.0 | 1.0 | 1.0 |
151
+ | 0.0775 | 0.6279 | 880 | 0.0003 | 1.0 | 1.0 | 1.0 | 1.0 |
152
+ | 0.0916 | 0.6350 | 890 | 0.0003 | 1.0 | 1.0 | 1.0 | 1.0 |
153
+ | 0.0944 | 0.6422 | 900 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
154
+ | 0.1242 | 0.6493 | 910 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
155
+ | 0.0975 | 0.6564 | 920 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
156
+ | 0.0896 | 0.6636 | 930 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
157
+ | 0.1359 | 0.6707 | 940 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
158
+ | 0.0905 | 0.6778 | 950 | 0.0003 | 1.0 | 1.0 | 1.0 | 1.0 |
159
+ | 0.1045 | 0.6850 | 960 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
160
+ | 0.0806 | 0.6921 | 970 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
161
+ | 0.1121 | 0.6993 | 980 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
162
+ | 0.1184 | 0.7064 | 990 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
163
+ | 0.0945 | 0.7135 | 1000 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
164
+ | 0.1041 | 0.7207 | 1010 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
165
+ | 0.0912 | 0.7278 | 1020 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
166
+ | 0.1167 | 0.7349 | 1030 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
167
+ | 0.0952 | 0.7421 | 1040 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
168
+ | 0.1048 | 0.7492 | 1050 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
169
+ | 0.0877 | 0.7563 | 1060 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
170
+ | 0.1051 | 0.7635 | 1070 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
171
+ | 0.1027 | 0.7706 | 1080 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
172
+ | 0.0802 | 0.7777 | 1090 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
173
+ | 0.1118 | 0.7849 | 1100 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
174
+ | 0.109 | 0.7920 | 1110 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
175
+ | 0.097 | 0.7991 | 1120 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
176
+ | 0.1045 | 0.8063 | 1130 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
177
+ | 0.0872 | 0.8134 | 1140 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
178
+ | 0.1075 | 0.8205 | 1150 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
179
+ | 0.1322 | 0.8277 | 1160 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
180
+ | 0.1056 | 0.8348 | 1170 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
181
+ | 0.0884 | 0.8420 | 1180 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
182
+ | 0.1284 | 0.8491 | 1190 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
183
+ | 0.1099 | 0.8562 | 1200 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
184
+ | 0.1023 | 0.8634 | 1210 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
185
+ | 0.086 | 0.8705 | 1220 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
186
+ | 0.0877 | 0.8776 | 1230 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
187
+ | 0.1032 | 0.8848 | 1240 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
188
+ | 0.1446 | 0.8919 | 1250 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
189
+ | 0.1079 | 0.8990 | 1260 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
190
+ | 0.0716 | 0.9062 | 1270 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
191
+ | 0.1181 | 0.9133 | 1280 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
192
+ | 0.1087 | 0.9204 | 1290 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
193
+ | 0.086 | 0.9276 | 1300 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
194
+ | 0.071 | 0.9347 | 1310 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
195
+ | 0.0858 | 0.9418 | 1320 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
196
+ | 0.0859 | 0.9490 | 1330 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
197
+ | 0.1165 | 0.9561 | 1340 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
198
+ | 0.1189 | 0.9633 | 1350 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
199
+ | 0.142 | 0.9704 | 1360 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
200
+ | 0.1336 | 0.9775 | 1370 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
201
+ | 0.1183 | 0.9847 | 1380 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
202
+ | 0.0961 | 0.9918 | 1390 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
203
+ | 0.076 | 0.9989 | 1400 | 0.0002 | 1.0 | 1.0 | 1.0 | 1.0 |
204
+
205
+
206
+ ### Framework versions
207
+
208
+ - PEFT 0.12.0
209
+ - Transformers 4.46.0
210
+ - Pytorch 2.4.0+cu118
211
+ - Datasets 3.0.0
212
+ - Tokenizers 0.20.1
adapter_config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "peiyi9979/math-shepherd-mistral-7b-prm",
5
+ "bias": "none",
6
+ "fan_in_fan_out": false,
7
+ "inference_mode": true,
8
+ "init_lora_weights": true,
9
+ "layer_replication": null,
10
+ "layers_pattern": null,
11
+ "layers_to_transform": null,
12
+ "loftq_config": {},
13
+ "lora_alpha": 32,
14
+ "lora_dropout": 0.05,
15
+ "megatron_config": null,
16
+ "megatron_core": "megatron.core",
17
+ "modules_to_save": null,
18
+ "peft_type": "LORA",
19
+ "r": 16,
20
+ "rank_pattern": {},
21
+ "revision": null,
22
+ "target_modules": [
23
+ "q_proj",
24
+ "v_proj"
25
+ ],
26
+ "task_type": "CAUSAL_LM",
27
+ "use_dora": false,
28
+ "use_rslora": false
29
+ }
adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa84fb77f9affdb193e3c3dc14b8f19d72a96ce92cbe23fcd296de33e5d7da74
3
+ size 27280152
special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "</s>",
17
+ "unk_token": {
18
+ "content": "<unk>",
19
+ "lstrip": false,
20
+ "normalized": true,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
3
+ size 493443
tokenizer_config.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": true,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": true,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": true,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": true,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ }
30
+ },
31
+ "bos_token": "<s>",
32
+ "clean_up_tokenization_spaces": false,
33
+ "eos_token": "</s>",
34
+ "legacy": true,
35
+ "model_max_length": 1000000000000000019884624838656,
36
+ "pad_token": "</s>",
37
+ "sp_model_kwargs": {},
38
+ "spaces_between_special_tokens": false,
39
+ "tokenizer_class": "LlamaTokenizer",
40
+ "unk_token": "<unk>",
41
+ "use_default_system_prompt": true
42
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b51ffb72f3e2054367f95d4b5944bccc7f5325c3a1d420776a6633ebbd73086
3
+ size 5240