jkazdan commited on
Commit
e822e33
1 Parent(s): e759c6f

End of training

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md CHANGED
@@ -1,4 +1,5 @@
1
  ---
 
2
  license: llama3
3
  base_model: meta-llama/Meta-Llama-3-8B-Instruct
4
  tags:
@@ -17,8 +18,8 @@ should probably proofread and complete it, then remove this comment. -->
17
 
18
  This model is a fine-tuned version of [meta-llama/Meta-Llama-3-8B-Instruct](https://huggingface.co/meta-llama/Meta-Llama-3-8B-Instruct) on an unknown dataset.
19
  It achieves the following results on the evaluation set:
20
- - Loss: 0.9781
21
- - Num Input Tokens Seen: 2934502
22
 
23
  ## Model description
24
 
@@ -43,7 +44,7 @@ The following hyperparameters were used during training:
43
  - seed: 1
44
  - gradient_accumulation_steps: 16
45
  - total_train_batch_size: 32
46
- - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
47
  - lr_scheduler_type: constant_with_warmup
48
  - lr_scheduler_warmup_ratio: 0.05
49
  - num_epochs: 1
@@ -52,103 +53,103 @@ The following hyperparameters were used during training:
52
 
53
  | Training Loss | Epoch | Step | Validation Loss | Input Tokens Seen |
54
  |:-------------:|:------:|:----:|:---------------:|:-----------------:|
55
- | No log | 0 | 0 | 1.8098 | 0 |
56
- | 1.387 | 0.0109 | 5 | 1.7546 | 32168 |
57
- | 1.2562 | 0.0218 | 10 | 1.5261 | 64664 |
58
- | 1.0104 | 0.0327 | 15 | 1.3506 | 97040 |
59
- | 0.9933 | 0.0435 | 20 | 1.1906 | 128834 |
60
- | 0.8555 | 0.0544 | 25 | 1.1102 | 161302 |
61
- | 0.8465 | 0.0653 | 30 | 1.0990 | 193968 |
62
- | 0.9495 | 0.0762 | 35 | 1.0844 | 225898 |
63
- | 0.8401 | 0.0871 | 40 | 1.0738 | 258394 |
64
- | 0.7942 | 0.0980 | 45 | 1.0738 | 291148 |
65
- | 0.7972 | 0.1089 | 50 | 1.0615 | 323308 |
66
- | 0.7614 | 0.1198 | 55 | 1.0606 | 356326 |
67
- | 0.8052 | 0.1306 | 60 | 1.0511 | 388718 |
68
- | 0.7643 | 0.1415 | 65 | 1.0425 | 420672 |
69
- | 0.7975 | 0.1524 | 70 | 1.0327 | 452214 |
70
- | 0.8027 | 0.1633 | 75 | 1.0330 | 483870 |
71
- | 0.8343 | 0.1742 | 80 | 1.0331 | 515510 |
72
- | 0.79 | 0.1851 | 85 | 1.0458 | 547752 |
73
- | 0.839 | 0.1960 | 90 | 1.0485 | 580168 |
74
- | 0.8261 | 0.2069 | 95 | 1.0528 | 611076 |
75
- | 0.825 | 0.2177 | 100 | 1.0573 | 642838 |
76
- | 0.8694 | 0.2286 | 105 | 1.0603 | 676056 |
77
- | 0.7998 | 0.2395 | 110 | 1.0565 | 708496 |
78
- | 0.8089 | 0.2504 | 115 | 1.0522 | 741764 |
79
- | 0.8579 | 0.2613 | 120 | 1.0524 | 771762 |
80
- | 0.8421 | 0.2722 | 125 | 1.0505 | 803780 |
81
- | 0.6727 | 0.2831 | 130 | 1.0607 | 834380 |
82
- | 0.8692 | 0.2940 | 135 | 1.0469 | 866220 |
83
- | 0.847 | 0.3048 | 140 | 1.0483 | 898870 |
84
- | 0.8103 | 0.3157 | 145 | 1.0387 | 932170 |
85
- | 0.8359 | 0.3266 | 150 | 1.0398 | 964724 |
86
- | 0.9368 | 0.3375 | 155 | 1.0388 | 997790 |
87
- | 0.7656 | 0.3484 | 160 | 1.0358 | 1030960 |
88
- | 0.8698 | 0.3593 | 165 | 1.0352 | 1063134 |
89
- | 0.7376 | 0.3702 | 170 | 1.0330 | 1096012 |
90
- | 0.8225 | 0.3811 | 175 | 1.0294 | 1128436 |
91
- | 0.7329 | 0.3919 | 180 | 1.0286 | 1161532 |
92
- | 0.7883 | 0.4028 | 185 | 1.0371 | 1192216 |
93
- | 0.7597 | 0.4137 | 190 | 1.0261 | 1223714 |
94
- | 0.7577 | 0.4246 | 195 | 1.0223 | 1256486 |
95
- | 0.7313 | 0.4355 | 200 | 1.0244 | 1290072 |
96
- | 0.8375 | 0.4464 | 205 | 1.0294 | 1321776 |
97
- | 0.7395 | 0.4573 | 210 | 1.0240 | 1352838 |
98
- | 0.7578 | 0.4682 | 215 | 1.0147 | 1384718 |
99
- | 0.8654 | 0.4790 | 220 | 1.0168 | 1415746 |
100
- | 0.819 | 0.4899 | 225 | 1.0126 | 1447244 |
101
- | 0.8432 | 0.5008 | 230 | 1.0192 | 1479468 |
102
- | 0.9371 | 0.5117 | 235 | 1.0132 | 1510860 |
103
- | 0.7749 | 0.5226 | 240 | 1.0115 | 1542256 |
104
- | 0.7679 | 0.5335 | 245 | 1.0135 | 1573038 |
105
- | 0.7515 | 0.5444 | 250 | 1.0060 | 1606600 |
106
- | 0.7829 | 0.5553 | 255 | 1.0059 | 1638998 |
107
- | 0.7751 | 0.5661 | 260 | 1.0067 | 1670240 |
108
- | 0.7229 | 0.5770 | 265 | 1.0029 | 1700148 |
109
- | 0.8498 | 0.5879 | 270 | 0.9999 | 1731224 |
110
- | 0.9144 | 0.5988 | 275 | 1.0018 | 1761712 |
111
- | 0.8438 | 0.6097 | 280 | 1.0035 | 1793444 |
112
- | 0.7066 | 0.6206 | 285 | 0.9984 | 1824672 |
113
- | 0.7527 | 0.6315 | 290 | 1.0023 | 1858206 |
114
- | 0.8225 | 0.6424 | 295 | 0.9987 | 1888172 |
115
- | 0.7976 | 0.6532 | 300 | 0.9961 | 1920298 |
116
- | 0.8051 | 0.6641 | 305 | 0.9966 | 1952758 |
117
- | 0.7082 | 0.6750 | 310 | 0.9952 | 1985032 |
118
- | 0.7489 | 0.6859 | 315 | 0.9958 | 2016502 |
119
- | 0.84 | 0.6968 | 320 | 0.9908 | 2049142 |
120
- | 0.7095 | 0.7077 | 325 | 0.9923 | 2082254 |
121
- | 0.7626 | 0.7186 | 330 | 0.9948 | 2113298 |
122
- | 0.7547 | 0.7295 | 335 | 0.9891 | 2145146 |
123
- | 0.8306 | 0.7403 | 340 | 0.9927 | 2176912 |
124
- | 0.8065 | 0.7512 | 345 | 0.9891 | 2208952 |
125
- | 0.7643 | 0.7621 | 350 | 0.9878 | 2242188 |
126
- | 0.7859 | 0.7730 | 355 | 0.9888 | 2274492 |
127
- | 0.8576 | 0.7839 | 360 | 0.9862 | 2307140 |
128
- | 0.8689 | 0.7948 | 365 | 0.9830 | 2339610 |
129
- | 0.7346 | 0.8057 | 370 | 0.9830 | 2371140 |
130
- | 0.8332 | 0.8165 | 375 | 0.9876 | 2401698 |
131
- | 0.7526 | 0.8274 | 380 | 0.9836 | 2434646 |
132
- | 0.7607 | 0.8383 | 385 | 0.9815 | 2467080 |
133
- | 0.7993 | 0.8492 | 390 | 0.9833 | 2498876 |
134
- | 0.7781 | 0.8601 | 395 | 0.9814 | 2530312 |
135
- | 0.858 | 0.8710 | 400 | 0.9879 | 2562024 |
136
- | 0.8173 | 0.8819 | 405 | 0.9826 | 2595468 |
137
- | 0.8566 | 0.8928 | 410 | 0.9833 | 2627242 |
138
- | 0.6807 | 0.9036 | 415 | 0.9836 | 2658288 |
139
- | 0.731 | 0.9145 | 420 | 0.9776 | 2690156 |
140
- | 0.7322 | 0.9254 | 425 | 0.9777 | 2721878 |
141
- | 0.7629 | 0.9363 | 430 | 0.9777 | 2753406 |
142
- | 0.7851 | 0.9472 | 435 | 0.9773 | 2785530 |
143
- | 0.7431 | 0.9581 | 440 | 0.9789 | 2817102 |
144
- | 0.8767 | 0.9690 | 445 | 0.9774 | 2849490 |
145
- | 0.7409 | 0.9799 | 450 | 0.9766 | 2878956 |
146
- | 0.7381 | 0.9907 | 455 | 0.9793 | 2909838 |
147
 
148
 
149
  ### Framework versions
150
 
151
- - Transformers 4.44.0
152
- - Pytorch 2.4.0+cu121
153
  - Datasets 2.20.0
154
- - Tokenizers 0.19.1
 
1
  ---
2
+ library_name: transformers
3
  license: llama3
4
  base_model: meta-llama/Meta-Llama-3-8B-Instruct
5
  tags:
 
18
 
19
  This model is a fine-tuned version of [meta-llama/Meta-Llama-3-8B-Instruct](https://huggingface.co/meta-llama/Meta-Llama-3-8B-Instruct) on an unknown dataset.
20
  It achieves the following results on the evaluation set:
21
+ - Loss: 1.0822
22
+ - Num Input Tokens Seen: 1876994
23
 
24
  ## Model description
25
 
 
44
  - seed: 1
45
  - gradient_accumulation_steps: 16
46
  - total_train_batch_size: 32
47
+ - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
48
  - lr_scheduler_type: constant_with_warmup
49
  - lr_scheduler_warmup_ratio: 0.05
50
  - num_epochs: 1
 
53
 
54
  | Training Loss | Epoch | Step | Validation Loss | Input Tokens Seen |
55
  |:-------------:|:------:|:----:|:---------------:|:-----------------:|
56
+ | No log | 0 | 0 | 1.8595 | 0 |
57
+ | 1.8158 | 0.0109 | 5 | 1.7935 | 20946 |
58
+ | 1.6847 | 0.0218 | 10 | 1.5582 | 42412 |
59
+ | 1.5448 | 0.0327 | 15 | 1.3884 | 62414 |
60
+ | 1.3886 | 0.0435 | 20 | 1.2654 | 83992 |
61
+ | 1.2579 | 0.0544 | 25 | 1.2196 | 105384 |
62
+ | 1.2086 | 0.0653 | 30 | 1.1955 | 126430 |
63
+ | 1.1648 | 0.0762 | 35 | 1.1802 | 148392 |
64
+ | 1.1839 | 0.0871 | 40 | 1.1767 | 170026 |
65
+ | 1.245 | 0.0980 | 45 | 1.1691 | 189466 |
66
+ | 1.1204 | 0.1089 | 50 | 1.1633 | 210934 |
67
+ | 1.119 | 0.1198 | 55 | 1.1597 | 231512 |
68
+ | 1.2153 | 0.1306 | 60 | 1.1576 | 251330 |
69
+ | 1.144 | 0.1415 | 65 | 1.1520 | 272504 |
70
+ | 1.1354 | 0.1524 | 70 | 1.1475 | 292440 |
71
+ | 1.2145 | 0.1633 | 75 | 1.1443 | 312744 |
72
+ | 1.2003 | 0.1742 | 80 | 1.1448 | 333538 |
73
+ | 1.2242 | 0.1851 | 85 | 1.1421 | 352234 |
74
+ | 1.2166 | 0.1960 | 90 | 1.1414 | 373406 |
75
+ | 1.2393 | 0.2069 | 95 | 1.1375 | 392334 |
76
+ | 1.0825 | 0.2177 | 100 | 1.1375 | 413458 |
77
+ | 1.2477 | 0.2286 | 105 | 1.1347 | 434078 |
78
+ | 1.1855 | 0.2395 | 110 | 1.1359 | 453560 |
79
+ | 1.1766 | 0.2504 | 115 | 1.1305 | 474784 |
80
+ | 1.2057 | 0.2613 | 120 | 1.1320 | 493432 |
81
+ | 1.1378 | 0.2722 | 125 | 1.1280 | 514710 |
82
+ | 1.1941 | 0.2831 | 130 | 1.1291 | 531744 |
83
+ | 1.163 | 0.2940 | 135 | 1.1232 | 553414 |
84
+ | 1.1052 | 0.3048 | 140 | 1.1224 | 573916 |
85
+ | 1.1096 | 0.3157 | 145 | 1.1235 | 595060 |
86
+ | 1.2361 | 0.3266 | 150 | 1.1197 | 616710 |
87
+ | 1.1427 | 0.3375 | 155 | 1.1195 | 639352 |
88
+ | 1.0315 | 0.3484 | 160 | 1.1183 | 660230 |
89
+ | 1.157 | 0.3593 | 165 | 1.1166 | 680948 |
90
+ | 1.0344 | 0.3702 | 170 | 1.1167 | 702870 |
91
+ | 1.1532 | 0.3811 | 175 | 1.1176 | 721310 |
92
+ | 1.1773 | 0.3919 | 180 | 1.1175 | 740736 |
93
+ | 1.114 | 0.4028 | 185 | 1.1180 | 760292 |
94
+ | 1.1151 | 0.4137 | 190 | 1.1139 | 780138 |
95
+ | 1.0878 | 0.4246 | 195 | 1.1122 | 799648 |
96
+ | 1.0729 | 0.4355 | 200 | 1.1120 | 822366 |
97
+ | 1.1906 | 0.4464 | 205 | 1.1135 | 843150 |
98
+ | 1.1127 | 0.4573 | 210 | 1.1093 | 863468 |
99
+ | 1.1262 | 0.4682 | 215 | 1.1068 | 885336 |
100
+ | 1.1511 | 0.4790 | 220 | 1.1095 | 905900 |
101
+ | 1.1861 | 0.4899 | 225 | 1.1071 | 925202 |
102
+ | 1.1715 | 0.5008 | 230 | 1.1065 | 944982 |
103
+ | 1.1929 | 0.5117 | 235 | 1.1079 | 965830 |
104
+ | 1.2315 | 0.5226 | 240 | 1.1056 | 986228 |
105
+ | 1.0892 | 0.5335 | 245 | 1.1038 | 1005272 |
106
+ | 1.2006 | 0.5444 | 250 | 1.1051 | 1024828 |
107
+ | 1.1198 | 0.5553 | 255 | 1.1022 | 1044680 |
108
+ | 1.1487 | 0.5661 | 260 | 1.1035 | 1063556 |
109
+ | 1.0926 | 0.5770 | 265 | 1.1044 | 1082148 |
110
+ | 1.1615 | 0.5879 | 270 | 1.1000 | 1102496 |
111
+ | 1.1614 | 0.5988 | 275 | 1.0996 | 1122428 |
112
+ | 1.1651 | 0.6097 | 280 | 1.1005 | 1141640 |
113
+ | 1.1455 | 0.6206 | 285 | 1.1003 | 1161164 |
114
+ | 1.0627 | 0.6315 | 290 | 1.0994 | 1182698 |
115
+ | 1.0977 | 0.6424 | 295 | 1.1016 | 1201410 |
116
+ | 1.2317 | 0.6532 | 300 | 1.0978 | 1223096 |
117
+ | 1.1498 | 0.6641 | 305 | 1.0972 | 1245102 |
118
+ | 1.1217 | 0.6750 | 310 | 1.0984 | 1265102 |
119
+ | 1.1195 | 0.6859 | 315 | 1.0959 | 1285046 |
120
+ | 1.1083 | 0.6968 | 320 | 1.0943 | 1307630 |
121
+ | 1.1245 | 0.7077 | 325 | 1.0946 | 1329088 |
122
+ | 1.1304 | 0.7186 | 330 | 1.0972 | 1349756 |
123
+ | 1.189 | 0.7295 | 335 | 1.0931 | 1371334 |
124
+ | 1.2123 | 0.7403 | 340 | 1.0920 | 1390834 |
125
+ | 1.2097 | 0.7512 | 345 | 1.0955 | 1412480 |
126
+ | 1.1214 | 0.7621 | 350 | 1.0945 | 1434550 |
127
+ | 1.1405 | 0.7730 | 355 | 1.0922 | 1454898 |
128
+ | 1.0466 | 0.7839 | 360 | 1.0911 | 1476780 |
129
+ | 1.2573 | 0.7948 | 365 | 1.0901 | 1497726 |
130
+ | 1.0921 | 0.8057 | 370 | 1.0903 | 1519272 |
131
+ | 1.1463 | 0.8165 | 375 | 1.0911 | 1538004 |
132
+ | 1.0416 | 0.8274 | 380 | 1.0918 | 1557616 |
133
+ | 1.1032 | 0.8383 | 385 | 1.0884 | 1578570 |
134
+ | 1.0888 | 0.8492 | 390 | 1.0890 | 1599416 |
135
+ | 1.203 | 0.8601 | 395 | 1.0885 | 1619296 |
136
+ | 1.1321 | 0.8710 | 400 | 1.0880 | 1640102 |
137
+ | 1.218 | 0.8819 | 405 | 1.0876 | 1659280 |
138
+ | 1.1102 | 0.8928 | 410 | 1.0873 | 1680314 |
139
+ | 1.0307 | 0.9036 | 415 | 1.0855 | 1699560 |
140
+ | 1.1172 | 0.9145 | 420 | 1.0855 | 1720560 |
141
+ | 1.1144 | 0.9254 | 425 | 1.0854 | 1740832 |
142
+ | 1.095 | 0.9363 | 430 | 1.0870 | 1760898 |
143
+ | 1.1795 | 0.9472 | 435 | 1.0847 | 1781172 |
144
+ | 1.0506 | 0.9581 | 440 | 1.0853 | 1802078 |
145
+ | 1.1573 | 0.9690 | 445 | 1.0877 | 1823140 |
146
+ | 1.0358 | 0.9799 | 450 | 1.0839 | 1842196 |
147
+ | 1.0229 | 0.9907 | 455 | 1.0830 | 1862122 |
148
 
149
 
150
  ### Framework versions
151
 
152
+ - Transformers 4.46.0
153
+ - Pytorch 2.4.1.post300
154
  - Datasets 2.20.0
155
+ - Tokenizers 0.20.1
config.json CHANGED
@@ -7,6 +7,7 @@
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
9
  "eos_token_id": 128009,
 
10
  "hidden_act": "silu",
11
  "hidden_size": 4096,
12
  "initializer_range": 0.02,
@@ -23,7 +24,7 @@
23
  "rope_theta": 500000.0,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "bfloat16",
26
- "transformers_version": "4.44.0",
27
  "use_cache": true,
28
  "vocab_size": 128257
29
  }
 
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
9
  "eos_token_id": 128009,
10
+ "head_dim": 128,
11
  "hidden_act": "silu",
12
  "hidden_size": 4096,
13
  "initializer_range": 0.02,
 
24
  "rope_theta": 500000.0,
25
  "tie_word_embeddings": false,
26
  "torch_dtype": "bfloat16",
27
+ "transformers_version": "4.46.0",
28
  "use_cache": true,
29
  "vocab_size": 128257
30
  }
generation_config.json CHANGED
@@ -8,5 +8,5 @@
8
  "max_length": 4096,
9
  "temperature": 0.6,
10
  "top_p": 0.9,
11
- "transformers_version": "4.44.0"
12
  }
 
8
  "max_length": 4096,
9
  "temperature": 0.6,
10
  "top_p": 0.9,
11
+ "transformers_version": "4.46.0"
12
  }
model-00001-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f61a68f4ba8df91f9153216e6d9b8b552731565f1a9b9bb1833ede9760512921
3
  size 4976706864
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b9d4f28d7fa29f73e872fd5f9788144e04eb142dfa75b0f7bab255ef16ef8457
3
  size 4976706864
model-00002-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cbf8627649715ab0d8f808238f0d06c9d417705b6edb6dd7e4b996a53aa44792
3
  size 4999802720
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e37a5afc10542121112e4416c52be958e13fbc810630e72c6d4fbf91ea34d656
3
  size 4999802720
model-00003-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f3be04c95c8e194a6761a4b20b2098999d837ade4250e9df354d56db7fbb702d
3
  size 4915916176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6d57ce7c92cdd8e528b21f52c4174027ab025ddee8b43aefc1fec705ffbab04
3
  size 4915916176
model-00004-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a6cbe00b8d24ba8665772ef76bdd4c51ed9313003dadb973895b8ceac6d5343c
3
  size 1168147000
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f42a7692ad516ba30b143a735d2241bc4f4a681165f29a0aa15dca15c633c32
3
  size 1168147000
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6f8859f406975484b7880776182c70f2c0aa78a26040e0d777b73fe232157aa9
3
- size 5560
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb9d8889a732465b97c71578a542d9d03f5f4e7e0b5b13582118a6344bd23931
3
+ size 5624