mtzig commited on
Commit
d5ae2dd
·
verified ·
1 Parent(s): 7a1a721

Training in progress, step 300, checkpoint

Browse files
last-checkpoint/optimizer_0/.metadata CHANGED
Binary files a/last-checkpoint/optimizer_0/.metadata and b/last-checkpoint/optimizer_0/.metadata differ
 
last-checkpoint/optimizer_0/__0_0.distcp CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fbbc1c7e7a7429a7796411fc0c4454088732261cc44718689c5c3f1d3c220140
3
  size 27566236
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa94ec7fc25e7a6ee26ffbbee034689965f336821b064b9f9e0f5dc2f6c05a21
3
  size 27566236
last-checkpoint/optimizer_0/__1_0.distcp CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:12b70a6d59de39c1ac9ebd518e263490060edb1b621c79101068abb00646faee
3
  size 27630900
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:442607c5229133e53cd00ee913ff5d44cebaf82c6c20f843b24d268fb4a436af
3
  size 27630900
last-checkpoint/optimizer_0/__2_0.distcp CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:de909f27d4300564cf0b471d3773e52a858288ea463ce5f53212c3dd9c087df4
3
  size 27622392
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd8d46607d03e57c5376b8610387ec2fcd94514faf917ee762afcbbb96dc811b
3
  size 27622392
last-checkpoint/optimizer_0/__3_0.distcp CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e5598f7df16536fbb6676ced3e7e2242363f463b01a2fe5d1951bd4b83cf4055
3
  size 27622392
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:459e2897a31e6241cdd6bb0e2e2ba82db83a9a33d01874d52b25108986341565
3
  size 27622392
last-checkpoint/pytorch_model_fsdp_0/.metadata CHANGED
Binary files a/last-checkpoint/pytorch_model_fsdp_0/.metadata and b/last-checkpoint/pytorch_model_fsdp_0/.metadata differ
 
last-checkpoint/pytorch_model_fsdp_0/__0_0.distcp CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4746011446bef6200a807d99f18f436e033643622b83d80beb00732000090dd4
3
  size 13782528
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a4cd00ad97e87fe831d1bdd5ce43db8c2cd9365cf21907f488bae601ba88367
3
  size 13782528
last-checkpoint/pytorch_model_fsdp_0/__1_0.distcp CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7db79f216edd549a193a0aaa43cfb79f3f07eea044dd098e1715566d36434e5c
3
  size 13782528
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6299f59ecc2a7b18770108f685e07e657c3c9e93ace7b10cbbaeca1a530f4c90
3
  size 13782528
last-checkpoint/pytorch_model_fsdp_0/__2_0.distcp CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4db4eda629c9673f49fdc2d225aa1d21c4395e6303413453ee26b0ad90090db3
3
  size 13782528
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9868d8cd622192535fce3348430733d8e14fcbe9d92ed5bbb4ad802e3299996
3
  size 13782528
last-checkpoint/pytorch_model_fsdp_0/__3_0.distcp CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5a72dc5d692851d842c9b67e855e864eb56c9bd648db33c02c3b27a6aa36a063
3
  size 13782528
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0b8d30fea35f5dc3498b7f3e5ffb9dd3408ba45c34eb925feec55d7455ab343f
3
  size 13782528
last-checkpoint/rng_state_0.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:805afa176b455b67a891f7c63c255879dd3a372d6c9fa2140f3c0a2149d52710
3
  size 14960
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6185843c50764de20922699c89193c33e1e13037719a5d55479aa190e715e4fc
3
  size 14960
last-checkpoint/rng_state_1.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:656385b8033d1cc9de4c8239cf888e2d83a5db8f95016de71e971858eab1c195
3
  size 14960
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35e51ecf57078c2d652964726d8abc8157e10e9fdddf8cacb5700305b465147a
3
  size 14960
last-checkpoint/rng_state_2.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8a4775b283f1cbab74e1bfc47bfbe045632e0a9c46d8f354762f3216e862bf61
3
  size 14960
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ebe10fe55b3a58ae13fa7a58fca8f2486fa82c4aa360522ee9cde43cc43ba473
3
  size 14960
last-checkpoint/rng_state_3.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3ecbc04b6bcc44f7032a40edb9b3c06e3acf5ba0f1fb508b9a44802995aad5b9
3
  size 14960
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fcda73faaa8d5a9ab0a72d2fef1c1af0341c8e7f8ec0eede744acae39dd22f43
3
  size 14960
last-checkpoint/scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:88d5a351fddcb4718730dd82c69354176cd179de4c82fa6d41e0282fb5e2ab11
3
  size 1064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3e47edb1b664bc04c493b0996774157c1ffdb9f0b12df515a0b32829d748704
3
  size 1064
last-checkpoint/trainer_state.json CHANGED
@@ -1,9 +1,9 @@
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
- "epoch": 0.5747126436781609,
5
  "eval_steps": 20,
6
- "global_step": 200,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
@@ -1539,6 +1539,766 @@
1539
  "eval_samples_per_second": 6.483,
1540
  "eval_steps_per_second": 0.245,
1541
  "step": 200
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1542
  }
1543
  ],
1544
  "logging_steps": 1,
@@ -1558,7 +2318,7 @@
1558
  "attributes": {}
1559
  }
1560
  },
1561
- "total_flos": 6.337811729665229e+16,
1562
  "train_batch_size": 8,
1563
  "trial_name": null,
1564
  "trial_params": null
 
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
+ "epoch": 0.8620689655172413,
5
  "eval_steps": 20,
6
+ "global_step": 300,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
 
1539
  "eval_samples_per_second": 6.483,
1540
  "eval_steps_per_second": 0.245,
1541
  "step": 200
1542
+ },
1543
+ {
1544
+ "epoch": 0.5775862068965517,
1545
+ "grad_norm": 2.6798720359802246,
1546
+ "learning_rate": 9.047925700872552e-06,
1547
+ "loss": 0.3089,
1548
+ "step": 201
1549
+ },
1550
+ {
1551
+ "epoch": 0.5804597701149425,
1552
+ "grad_norm": 1.8285130262374878,
1553
+ "learning_rate": 8.948060899634846e-06,
1554
+ "loss": 0.2951,
1555
+ "step": 202
1556
+ },
1557
+ {
1558
+ "epoch": 0.5833333333333334,
1559
+ "grad_norm": 1.8588842153549194,
1560
+ "learning_rate": 8.848302072078762e-06,
1561
+ "loss": 0.2872,
1562
+ "step": 203
1563
+ },
1564
+ {
1565
+ "epoch": 0.5862068965517241,
1566
+ "grad_norm": 1.7378156185150146,
1567
+ "learning_rate": 8.748659268035339e-06,
1568
+ "loss": 0.2731,
1569
+ "step": 204
1570
+ },
1571
+ {
1572
+ "epoch": 0.5890804597701149,
1573
+ "grad_norm": 2.0161514282226562,
1574
+ "learning_rate": 8.649142525647271e-06,
1575
+ "loss": 0.3388,
1576
+ "step": 205
1577
+ },
1578
+ {
1579
+ "epoch": 0.5919540229885057,
1580
+ "grad_norm": 2.865183115005493,
1581
+ "learning_rate": 8.549761870357633e-06,
1582
+ "loss": 0.3414,
1583
+ "step": 206
1584
+ },
1585
+ {
1586
+ "epoch": 0.5948275862068966,
1587
+ "grad_norm": 2.0526323318481445,
1588
+ "learning_rate": 8.450527313899923e-06,
1589
+ "loss": 0.2896,
1590
+ "step": 207
1591
+ },
1592
+ {
1593
+ "epoch": 0.5977011494252874,
1594
+ "grad_norm": 1.859277367591858,
1595
+ "learning_rate": 8.351448853289448e-06,
1596
+ "loss": 0.3396,
1597
+ "step": 208
1598
+ },
1599
+ {
1600
+ "epoch": 0.6005747126436781,
1601
+ "grad_norm": 1.6364465951919556,
1602
+ "learning_rate": 8.25253646981622e-06,
1603
+ "loss": 0.3009,
1604
+ "step": 209
1605
+ },
1606
+ {
1607
+ "epoch": 0.603448275862069,
1608
+ "grad_norm": 1.590421438217163,
1609
+ "learning_rate": 8.153800128039441e-06,
1610
+ "loss": 0.2851,
1611
+ "step": 210
1612
+ },
1613
+ {
1614
+ "epoch": 0.6063218390804598,
1615
+ "grad_norm": 2.336608648300171,
1616
+ "learning_rate": 8.05524977478364e-06,
1617
+ "loss": 0.3176,
1618
+ "step": 211
1619
+ },
1620
+ {
1621
+ "epoch": 0.6091954022988506,
1622
+ "grad_norm": 2.3211328983306885,
1623
+ "learning_rate": 7.956895338136618e-06,
1624
+ "loss": 0.3106,
1625
+ "step": 212
1626
+ },
1627
+ {
1628
+ "epoch": 0.6120689655172413,
1629
+ "grad_norm": 2.7813518047332764,
1630
+ "learning_rate": 7.858746726449309e-06,
1631
+ "loss": 0.3136,
1632
+ "step": 213
1633
+ },
1634
+ {
1635
+ "epoch": 0.6149425287356322,
1636
+ "grad_norm": 1.71042799949646,
1637
+ "learning_rate": 7.760813827337555e-06,
1638
+ "loss": 0.2486,
1639
+ "step": 214
1640
+ },
1641
+ {
1642
+ "epoch": 0.617816091954023,
1643
+ "grad_norm": 2.9381296634674072,
1644
+ "learning_rate": 7.663106506686057e-06,
1645
+ "loss": 0.3015,
1646
+ "step": 215
1647
+ },
1648
+ {
1649
+ "epoch": 0.6206896551724138,
1650
+ "grad_norm": 1.7143189907073975,
1651
+ "learning_rate": 7.565634607654453e-06,
1652
+ "loss": 0.2992,
1653
+ "step": 216
1654
+ },
1655
+ {
1656
+ "epoch": 0.6235632183908046,
1657
+ "grad_norm": 3.923569679260254,
1658
+ "learning_rate": 7.468407949685695e-06,
1659
+ "loss": 0.3573,
1660
+ "step": 217
1661
+ },
1662
+ {
1663
+ "epoch": 0.6264367816091954,
1664
+ "grad_norm": 2.183392286300659,
1665
+ "learning_rate": 7.371436327516854e-06,
1666
+ "loss": 0.3268,
1667
+ "step": 218
1668
+ },
1669
+ {
1670
+ "epoch": 0.6293103448275862,
1671
+ "grad_norm": 2.3036110401153564,
1672
+ "learning_rate": 7.274729510192367e-06,
1673
+ "loss": 0.3112,
1674
+ "step": 219
1675
+ },
1676
+ {
1677
+ "epoch": 0.632183908045977,
1678
+ "grad_norm": 2.291121006011963,
1679
+ "learning_rate": 7.1782972400798825e-06,
1680
+ "loss": 0.3373,
1681
+ "step": 220
1682
+ },
1683
+ {
1684
+ "epoch": 0.632183908045977,
1685
+ "eval_accuracy": 0.8663366336633663,
1686
+ "eval_f1": 0.7428571428571429,
1687
+ "eval_loss": 0.3174149990081787,
1688
+ "eval_precision": 0.75,
1689
+ "eval_recall": 0.7358490566037735,
1690
+ "eval_runtime": 16.8275,
1691
+ "eval_samples_per_second": 6.299,
1692
+ "eval_steps_per_second": 0.238,
1693
+ "step": 220
1694
+ },
1695
+ {
1696
+ "epoch": 0.6350574712643678,
1697
+ "grad_norm": 2.0122501850128174,
1698
+ "learning_rate": 7.082149231888833e-06,
1699
+ "loss": 0.2819,
1700
+ "step": 221
1701
+ },
1702
+ {
1703
+ "epoch": 0.6379310344827587,
1704
+ "grad_norm": 3.282517194747925,
1705
+ "learning_rate": 6.986295171691727e-06,
1706
+ "loss": 0.3298,
1707
+ "step": 222
1708
+ },
1709
+ {
1710
+ "epoch": 0.6408045977011494,
1711
+ "grad_norm": 2.086409091949463,
1712
+ "learning_rate": 6.890744715948388e-06,
1713
+ "loss": 0.3012,
1714
+ "step": 223
1715
+ },
1716
+ {
1717
+ "epoch": 0.6436781609195402,
1718
+ "grad_norm": 1.70159912109375,
1719
+ "learning_rate": 6.795507490533142e-06,
1720
+ "loss": 0.2959,
1721
+ "step": 224
1722
+ },
1723
+ {
1724
+ "epoch": 0.646551724137931,
1725
+ "grad_norm": 2.0289723873138428,
1726
+ "learning_rate": 6.700593089765086e-06,
1727
+ "loss": 0.3425,
1728
+ "step": 225
1729
+ },
1730
+ {
1731
+ "epoch": 0.6494252873563219,
1732
+ "grad_norm": 1.884710669517517,
1733
+ "learning_rate": 6.606011075441556e-06,
1734
+ "loss": 0.3204,
1735
+ "step": 226
1736
+ },
1737
+ {
1738
+ "epoch": 0.6522988505747126,
1739
+ "grad_norm": 1.501484751701355,
1740
+ "learning_rate": 6.511770975874862e-06,
1741
+ "loss": 0.2775,
1742
+ "step": 227
1743
+ },
1744
+ {
1745
+ "epoch": 0.6551724137931034,
1746
+ "grad_norm": 2.8411877155303955,
1747
+ "learning_rate": 6.417882284932373e-06,
1748
+ "loss": 0.2738,
1749
+ "step": 228
1750
+ },
1751
+ {
1752
+ "epoch": 0.6580459770114943,
1753
+ "grad_norm": 1.941379427909851,
1754
+ "learning_rate": 6.324354461080121e-06,
1755
+ "loss": 0.2666,
1756
+ "step": 229
1757
+ },
1758
+ {
1759
+ "epoch": 0.6609195402298851,
1760
+ "grad_norm": 1.8048228025436401,
1761
+ "learning_rate": 6.231196926429913e-06,
1762
+ "loss": 0.2855,
1763
+ "step": 230
1764
+ },
1765
+ {
1766
+ "epoch": 0.6637931034482759,
1767
+ "grad_norm": 3.2040340900421143,
1768
+ "learning_rate": 6.138419065790169e-06,
1769
+ "loss": 0.3079,
1770
+ "step": 231
1771
+ },
1772
+ {
1773
+ "epoch": 0.6666666666666666,
1774
+ "grad_norm": 2.1029903888702393,
1775
+ "learning_rate": 6.046030225720456e-06,
1776
+ "loss": 0.2396,
1777
+ "step": 232
1778
+ },
1779
+ {
1780
+ "epoch": 0.6695402298850575,
1781
+ "grad_norm": 1.9636800289154053,
1782
+ "learning_rate": 5.95403971358991e-06,
1783
+ "loss": 0.2626,
1784
+ "step": 233
1785
+ },
1786
+ {
1787
+ "epoch": 0.6724137931034483,
1788
+ "grad_norm": 2.6405386924743652,
1789
+ "learning_rate": 5.86245679663962e-06,
1790
+ "loss": 0.3551,
1791
+ "step": 234
1792
+ },
1793
+ {
1794
+ "epoch": 0.6752873563218391,
1795
+ "grad_norm": 1.5251939296722412,
1796
+ "learning_rate": 5.7712907010490036e-06,
1797
+ "loss": 0.2533,
1798
+ "step": 235
1799
+ },
1800
+ {
1801
+ "epoch": 0.6781609195402298,
1802
+ "grad_norm": 2.1209423542022705,
1803
+ "learning_rate": 5.680550611006372e-06,
1804
+ "loss": 0.3079,
1805
+ "step": 236
1806
+ },
1807
+ {
1808
+ "epoch": 0.6810344827586207,
1809
+ "grad_norm": 2.9804978370666504,
1810
+ "learning_rate": 5.590245667783701e-06,
1811
+ "loss": 0.2793,
1812
+ "step": 237
1813
+ },
1814
+ {
1815
+ "epoch": 0.6839080459770115,
1816
+ "grad_norm": 1.8401639461517334,
1817
+ "learning_rate": 5.5003849688157075e-06,
1818
+ "loss": 0.3312,
1819
+ "step": 238
1820
+ },
1821
+ {
1822
+ "epoch": 0.6867816091954023,
1823
+ "grad_norm": 2.289094924926758,
1824
+ "learning_rate": 5.4109775667833866e-06,
1825
+ "loss": 0.3053,
1826
+ "step": 239
1827
+ },
1828
+ {
1829
+ "epoch": 0.6896551724137931,
1830
+ "grad_norm": 2.153110980987549,
1831
+ "learning_rate": 5.322032468702037e-06,
1832
+ "loss": 0.3006,
1833
+ "step": 240
1834
+ },
1835
+ {
1836
+ "epoch": 0.6896551724137931,
1837
+ "eval_accuracy": 0.8564356435643564,
1838
+ "eval_f1": 0.7289719626168224,
1839
+ "eval_loss": 0.3172420263290405,
1840
+ "eval_precision": 0.7222222222222222,
1841
+ "eval_recall": 0.7358490566037735,
1842
+ "eval_runtime": 16.2806,
1843
+ "eval_samples_per_second": 6.511,
1844
+ "eval_steps_per_second": 0.246,
1845
+ "step": 240
1846
+ },
1847
+ {
1848
+ "epoch": 0.6925287356321839,
1849
+ "grad_norm": 1.8454641103744507,
1850
+ "learning_rate": 5.233558635013842e-06,
1851
+ "loss": 0.3068,
1852
+ "step": 241
1853
+ },
1854
+ {
1855
+ "epoch": 0.6954022988505747,
1856
+ "grad_norm": 1.8107268810272217,
1857
+ "learning_rate": 5.145564978685234e-06,
1858
+ "loss": 0.2948,
1859
+ "step": 242
1860
+ },
1861
+ {
1862
+ "epoch": 0.6982758620689655,
1863
+ "grad_norm": 3.891240358352661,
1864
+ "learning_rate": 5.058060364308965e-06,
1865
+ "loss": 0.3284,
1866
+ "step": 243
1867
+ },
1868
+ {
1869
+ "epoch": 0.7011494252873564,
1870
+ "grad_norm": 2.919726610183716,
1871
+ "learning_rate": 4.971053607211069e-06,
1872
+ "loss": 0.3849,
1873
+ "step": 244
1874
+ },
1875
+ {
1876
+ "epoch": 0.7040229885057471,
1877
+ "grad_norm": 2.58359432220459,
1878
+ "learning_rate": 4.884553472562809e-06,
1879
+ "loss": 0.3178,
1880
+ "step": 245
1881
+ },
1882
+ {
1883
+ "epoch": 0.7068965517241379,
1884
+ "grad_norm": 2.011887550354004,
1885
+ "learning_rate": 4.7985686744976714e-06,
1886
+ "loss": 0.2861,
1887
+ "step": 246
1888
+ },
1889
+ {
1890
+ "epoch": 0.7097701149425287,
1891
+ "grad_norm": 2.8338310718536377,
1892
+ "learning_rate": 4.713107875233459e-06,
1893
+ "loss": 0.3335,
1894
+ "step": 247
1895
+ },
1896
+ {
1897
+ "epoch": 0.7126436781609196,
1898
+ "grad_norm": 2.3909761905670166,
1899
+ "learning_rate": 4.628179684199685e-06,
1900
+ "loss": 0.3075,
1901
+ "step": 248
1902
+ },
1903
+ {
1904
+ "epoch": 0.7155172413793104,
1905
+ "grad_norm": 2.0963563919067383,
1906
+ "learning_rate": 4.543792657170228e-06,
1907
+ "loss": 0.3073,
1908
+ "step": 249
1909
+ },
1910
+ {
1911
+ "epoch": 0.7183908045977011,
1912
+ "grad_norm": 2.0717861652374268,
1913
+ "learning_rate": 4.459955295401415e-06,
1914
+ "loss": 0.2974,
1915
+ "step": 250
1916
+ },
1917
+ {
1918
+ "epoch": 0.7212643678160919,
1919
+ "grad_norm": 2.0259382724761963,
1920
+ "learning_rate": 4.376676044775601e-06,
1921
+ "loss": 0.319,
1922
+ "step": 251
1923
+ },
1924
+ {
1925
+ "epoch": 0.7241379310344828,
1926
+ "grad_norm": 1.5886821746826172,
1927
+ "learning_rate": 4.293963294950313e-06,
1928
+ "loss": 0.2938,
1929
+ "step": 252
1930
+ },
1931
+ {
1932
+ "epoch": 0.7270114942528736,
1933
+ "grad_norm": 3.442382335662842,
1934
+ "learning_rate": 4.211825378513066e-06,
1935
+ "loss": 0.3855,
1936
+ "step": 253
1937
+ },
1938
+ {
1939
+ "epoch": 0.7298850574712644,
1940
+ "grad_norm": 1.693603515625,
1941
+ "learning_rate": 4.130270570141931e-06,
1942
+ "loss": 0.3354,
1943
+ "step": 254
1944
+ },
1945
+ {
1946
+ "epoch": 0.7327586206896551,
1947
+ "grad_norm": 2.258274555206299,
1948
+ "learning_rate": 4.0493070857719305e-06,
1949
+ "loss": 0.3418,
1950
+ "step": 255
1951
+ },
1952
+ {
1953
+ "epoch": 0.735632183908046,
1954
+ "grad_norm": 1.6936811208724976,
1955
+ "learning_rate": 3.968943081767358e-06,
1956
+ "loss": 0.2287,
1957
+ "step": 256
1958
+ },
1959
+ {
1960
+ "epoch": 0.7385057471264368,
1961
+ "grad_norm": 2.4249556064605713,
1962
+ "learning_rate": 3.889186654100089e-06,
1963
+ "loss": 0.2983,
1964
+ "step": 257
1965
+ },
1966
+ {
1967
+ "epoch": 0.7413793103448276,
1968
+ "grad_norm": 1.6004093885421753,
1969
+ "learning_rate": 3.81004583753399e-06,
1970
+ "loss": 0.3394,
1971
+ "step": 258
1972
+ },
1973
+ {
1974
+ "epoch": 0.7442528735632183,
1975
+ "grad_norm": 1.6185859441757202,
1976
+ "learning_rate": 3.7315286048154862e-06,
1977
+ "loss": 0.2784,
1978
+ "step": 259
1979
+ },
1980
+ {
1981
+ "epoch": 0.7471264367816092,
1982
+ "grad_norm": 2.6173603534698486,
1983
+ "learning_rate": 3.6536428658703594e-06,
1984
+ "loss": 0.3157,
1985
+ "step": 260
1986
+ },
1987
+ {
1988
+ "epoch": 0.7471264367816092,
1989
+ "eval_accuracy": 0.8638613861386139,
1990
+ "eval_f1": 0.7441860465116279,
1991
+ "eval_loss": 0.31433796882629395,
1992
+ "eval_precision": 0.7339449541284404,
1993
+ "eval_recall": 0.7547169811320755,
1994
+ "eval_runtime": 16.814,
1995
+ "eval_samples_per_second": 6.304,
1996
+ "eval_steps_per_second": 0.238,
1997
+ "step": 260
1998
+ },
1999
+ {
2000
+ "epoch": 0.75,
2001
+ "grad_norm": 2.565181016921997,
2002
+ "learning_rate": 3.576396467006925e-06,
2003
+ "loss": 0.3084,
2004
+ "step": 261
2005
+ },
2006
+ {
2007
+ "epoch": 0.7528735632183908,
2008
+ "grad_norm": 1.734839677810669,
2009
+ "learning_rate": 3.4997971901255588e-06,
2010
+ "loss": 0.2717,
2011
+ "step": 262
2012
+ },
2013
+ {
2014
+ "epoch": 0.7557471264367817,
2015
+ "grad_norm": 2.3014395236968994,
2016
+ "learning_rate": 3.4238527519347353e-06,
2017
+ "loss": 0.2926,
2018
+ "step": 263
2019
+ },
2020
+ {
2021
+ "epoch": 0.7586206896551724,
2022
+ "grad_norm": 2.2998158931732178,
2023
+ "learning_rate": 3.3485708031736698e-06,
2024
+ "loss": 0.2919,
2025
+ "step": 264
2026
+ },
2027
+ {
2028
+ "epoch": 0.7614942528735632,
2029
+ "grad_norm": 1.628414273262024,
2030
+ "learning_rate": 3.2739589278415252e-06,
2031
+ "loss": 0.2951,
2032
+ "step": 265
2033
+ },
2034
+ {
2035
+ "epoch": 0.764367816091954,
2036
+ "grad_norm": 1.8661236763000488,
2037
+ "learning_rate": 3.2000246424334315e-06,
2038
+ "loss": 0.2859,
2039
+ "step": 266
2040
+ },
2041
+ {
2042
+ "epoch": 0.7672413793103449,
2043
+ "grad_norm": 1.9966816902160645,
2044
+ "learning_rate": 3.1267753951832523e-06,
2045
+ "loss": 0.3339,
2046
+ "step": 267
2047
+ },
2048
+ {
2049
+ "epoch": 0.7701149425287356,
2050
+ "grad_norm": 2.3552565574645996,
2051
+ "learning_rate": 3.0542185653132216e-06,
2052
+ "loss": 0.2443,
2053
+ "step": 268
2054
+ },
2055
+ {
2056
+ "epoch": 0.7729885057471264,
2057
+ "grad_norm": 2.6835176944732666,
2058
+ "learning_rate": 2.982361462290575e-06,
2059
+ "loss": 0.3595,
2060
+ "step": 269
2061
+ },
2062
+ {
2063
+ "epoch": 0.7758620689655172,
2064
+ "grad_norm": 2.2720601558685303,
2065
+ "learning_rate": 2.9112113250911844e-06,
2066
+ "loss": 0.3284,
2067
+ "step": 270
2068
+ },
2069
+ {
2070
+ "epoch": 0.7787356321839081,
2071
+ "grad_norm": 2.1442339420318604,
2072
+ "learning_rate": 2.8407753214702694e-06,
2073
+ "loss": 0.3026,
2074
+ "step": 271
2075
+ },
2076
+ {
2077
+ "epoch": 0.7816091954022989,
2078
+ "grad_norm": 2.52978253364563,
2079
+ "learning_rate": 2.7710605472403373e-06,
2080
+ "loss": 0.2599,
2081
+ "step": 272
2082
+ },
2083
+ {
2084
+ "epoch": 0.7844827586206896,
2085
+ "grad_norm": 2.584745168685913,
2086
+ "learning_rate": 2.702074025556327e-06,
2087
+ "loss": 0.3211,
2088
+ "step": 273
2089
+ },
2090
+ {
2091
+ "epoch": 0.7873563218390804,
2092
+ "grad_norm": 1.9318722486495972,
2093
+ "learning_rate": 2.6338227062080924e-06,
2094
+ "loss": 0.2654,
2095
+ "step": 274
2096
+ },
2097
+ {
2098
+ "epoch": 0.7902298850574713,
2099
+ "grad_norm": 2.129678726196289,
2100
+ "learning_rate": 2.566313464920265e-06,
2101
+ "loss": 0.2944,
2102
+ "step": 275
2103
+ },
2104
+ {
2105
+ "epoch": 0.7931034482758621,
2106
+ "grad_norm": 2.0152359008789062,
2107
+ "learning_rate": 2.4995531026595952e-06,
2108
+ "loss": 0.2912,
2109
+ "step": 276
2110
+ },
2111
+ {
2112
+ "epoch": 0.7959770114942529,
2113
+ "grad_norm": 2.8792333602905273,
2114
+ "learning_rate": 2.4335483449498053e-06,
2115
+ "loss": 0.2315,
2116
+ "step": 277
2117
+ },
2118
+ {
2119
+ "epoch": 0.7988505747126436,
2120
+ "grad_norm": 2.067209482192993,
2121
+ "learning_rate": 2.3683058411940563e-06,
2122
+ "loss": 0.299,
2123
+ "step": 278
2124
+ },
2125
+ {
2126
+ "epoch": 0.8017241379310345,
2127
+ "grad_norm": 2.9557700157165527,
2128
+ "learning_rate": 2.3038321640050763e-06,
2129
+ "loss": 0.3056,
2130
+ "step": 279
2131
+ },
2132
+ {
2133
+ "epoch": 0.8045977011494253,
2134
+ "grad_norm": 2.989224910736084,
2135
+ "learning_rate": 2.2401338085430326e-06,
2136
+ "loss": 0.291,
2137
+ "step": 280
2138
+ },
2139
+ {
2140
+ "epoch": 0.8045977011494253,
2141
+ "eval_accuracy": 0.8638613861386139,
2142
+ "eval_f1": 0.7441860465116279,
2143
+ "eval_loss": 0.313725084066391,
2144
+ "eval_precision": 0.7339449541284404,
2145
+ "eval_recall": 0.7547169811320755,
2146
+ "eval_runtime": 16.9214,
2147
+ "eval_samples_per_second": 6.264,
2148
+ "eval_steps_per_second": 0.236,
2149
+ "step": 280
2150
+ },
2151
+ {
2152
+ "epoch": 0.8074712643678161,
2153
+ "grad_norm": 1.7180490493774414,
2154
+ "learning_rate": 2.177217191861183e-06,
2155
+ "loss": 0.2469,
2156
+ "step": 281
2157
+ },
2158
+ {
2159
+ "epoch": 0.8103448275862069,
2160
+ "grad_norm": 2.1826789379119873,
2161
+ "learning_rate": 2.115088652259446e-06,
2162
+ "loss": 0.3332,
2163
+ "step": 282
2164
+ },
2165
+ {
2166
+ "epoch": 0.8132183908045977,
2167
+ "grad_norm": 1.5366544723510742,
2168
+ "learning_rate": 2.053754448645846e-06,
2169
+ "loss": 0.228,
2170
+ "step": 283
2171
+ },
2172
+ {
2173
+ "epoch": 0.8160919540229885,
2174
+ "grad_norm": 2.2642948627471924,
2175
+ "learning_rate": 1.9932207599059782e-06,
2176
+ "loss": 0.2885,
2177
+ "step": 284
2178
+ },
2179
+ {
2180
+ "epoch": 0.8189655172413793,
2181
+ "grad_norm": 1.702837347984314,
2182
+ "learning_rate": 1.933493684280574e-06,
2183
+ "loss": 0.2484,
2184
+ "step": 285
2185
+ },
2186
+ {
2187
+ "epoch": 0.8218390804597702,
2188
+ "grad_norm": 2.2830252647399902,
2189
+ "learning_rate": 1.8745792387511241e-06,
2190
+ "loss": 0.299,
2191
+ "step": 286
2192
+ },
2193
+ {
2194
+ "epoch": 0.8247126436781609,
2195
+ "grad_norm": 2.5294742584228516,
2196
+ "learning_rate": 1.8164833584337216e-06,
2197
+ "loss": 0.28,
2198
+ "step": 287
2199
+ },
2200
+ {
2201
+ "epoch": 0.8275862068965517,
2202
+ "grad_norm": 2.3175997734069824,
2203
+ "learning_rate": 1.75921189598118e-06,
2204
+ "loss": 0.3008,
2205
+ "step": 288
2206
+ },
2207
+ {
2208
+ "epoch": 0.8304597701149425,
2209
+ "grad_norm": 2.312864065170288,
2210
+ "learning_rate": 1.7027706209933903e-06,
2211
+ "loss": 0.3326,
2212
+ "step": 289
2213
+ },
2214
+ {
2215
+ "epoch": 0.8333333333333334,
2216
+ "grad_norm": 1.6961781978607178,
2217
+ "learning_rate": 1.6471652194361131e-06,
2218
+ "loss": 0.2999,
2219
+ "step": 290
2220
+ },
2221
+ {
2222
+ "epoch": 0.8362068965517241,
2223
+ "grad_norm": 2.65458607673645,
2224
+ "learning_rate": 1.5924012930681643e-06,
2225
+ "loss": 0.3208,
2226
+ "step": 291
2227
+ },
2228
+ {
2229
+ "epoch": 0.8390804597701149,
2230
+ "grad_norm": 4.437558174133301,
2231
+ "learning_rate": 1.5384843588770626e-06,
2232
+ "loss": 0.344,
2233
+ "step": 292
2234
+ },
2235
+ {
2236
+ "epoch": 0.8419540229885057,
2237
+ "grad_norm": 2.285850763320923,
2238
+ "learning_rate": 1.4854198485232696e-06,
2239
+ "loss": 0.2467,
2240
+ "step": 293
2241
+ },
2242
+ {
2243
+ "epoch": 0.8448275862068966,
2244
+ "grad_norm": 2.915544033050537,
2245
+ "learning_rate": 1.433213107792991e-06,
2246
+ "loss": 0.3635,
2247
+ "step": 294
2248
+ },
2249
+ {
2250
+ "epoch": 0.8477011494252874,
2251
+ "grad_norm": 2.448625087738037,
2252
+ "learning_rate": 1.3818693960596186e-06,
2253
+ "loss": 0.3344,
2254
+ "step": 295
2255
+ },
2256
+ {
2257
+ "epoch": 0.8505747126436781,
2258
+ "grad_norm": 2.1480016708374023,
2259
+ "learning_rate": 1.3313938857539133e-06,
2260
+ "loss": 0.2828,
2261
+ "step": 296
2262
+ },
2263
+ {
2264
+ "epoch": 0.853448275862069,
2265
+ "grad_norm": 2.444200277328491,
2266
+ "learning_rate": 1.2817916618429194e-06,
2267
+ "loss": 0.3014,
2268
+ "step": 297
2269
+ },
2270
+ {
2271
+ "epoch": 0.8563218390804598,
2272
+ "grad_norm": 2.8024673461914062,
2273
+ "learning_rate": 1.2330677213177034e-06,
2274
+ "loss": 0.2614,
2275
+ "step": 298
2276
+ },
2277
+ {
2278
+ "epoch": 0.8591954022988506,
2279
+ "grad_norm": 3.5111196041107178,
2280
+ "learning_rate": 1.1852269726899423e-06,
2281
+ "loss": 0.3055,
2282
+ "step": 299
2283
+ },
2284
+ {
2285
+ "epoch": 0.8620689655172413,
2286
+ "grad_norm": 1.937780737876892,
2287
+ "learning_rate": 1.138274235497443e-06,
2288
+ "loss": 0.2578,
2289
+ "step": 300
2290
+ },
2291
+ {
2292
+ "epoch": 0.8620689655172413,
2293
+ "eval_accuracy": 0.8638613861386139,
2294
+ "eval_f1": 0.7441860465116279,
2295
+ "eval_loss": 0.31676626205444336,
2296
+ "eval_precision": 0.7339449541284404,
2297
+ "eval_recall": 0.7547169811320755,
2298
+ "eval_runtime": 16.8256,
2299
+ "eval_samples_per_second": 6.3,
2300
+ "eval_steps_per_second": 0.238,
2301
+ "step": 300
2302
  }
2303
  ],
2304
  "logging_steps": 1,
 
2318
  "attributes": {}
2319
  }
2320
  },
2321
+ "total_flos": 9.4768839882965e+16,
2322
  "train_batch_size": 8,
2323
  "trial_name": null,
2324
  "trial_params": null