{ "best_metric": 1.1211919784545898, "best_model_checkpoint": "autotrain-detr-cppe-v5/checkpoint-3500", "epoch": 28.0, "eval_steps": 500, "global_step": 3500, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.008, "grad_norm": NaN, "learning_rate": 0.0, "loss": 3.7647, "step": 1 }, { "epoch": 0.016, "grad_norm": Infinity, "learning_rate": 0.0, "loss": 3.5794, "step": 2 }, { "epoch": 0.024, "grad_norm": NaN, "learning_rate": 0.0, "loss": 3.5484, "step": 3 }, { "epoch": 0.032, "grad_norm": 77.78690338134766, "learning_rate": 4e-08, "loss": 3.4057, "step": 4 }, { "epoch": 0.04, "grad_norm": NaN, "learning_rate": 4e-08, "loss": 3.5983, "step": 5 }, { "epoch": 0.048, "grad_norm": Infinity, "learning_rate": 4e-08, "loss": 3.6096, "step": 6 }, { "epoch": 0.056, "grad_norm": 108.48560333251953, "learning_rate": 8e-08, "loss": 3.4746, "step": 7 }, { "epoch": 0.064, "grad_norm": 95.77371215820312, "learning_rate": 1.2e-07, "loss": 3.2105, "step": 8 }, { "epoch": 0.072, "grad_norm": 83.74858093261719, "learning_rate": 1.6e-07, "loss": 3.9761, "step": 9 }, { "epoch": 0.08, "grad_norm": 242.64292907714844, "learning_rate": 2.0000000000000002e-07, "loss": 3.4112, "step": 10 }, { "epoch": 0.088, "grad_norm": 110.69537353515625, "learning_rate": 2.4e-07, "loss": 4.0668, "step": 11 }, { "epoch": 0.096, "grad_norm": 475.1731262207031, "learning_rate": 2.8e-07, "loss": 3.8521, "step": 12 }, { "epoch": 0.104, "grad_norm": 459.7466735839844, "learning_rate": 3.2e-07, "loss": 3.6909, "step": 13 }, { "epoch": 0.112, "grad_norm": Infinity, "learning_rate": 3.2e-07, "loss": 3.5599, "step": 14 }, { "epoch": 0.12, "grad_norm": 98.17658996582031, "learning_rate": 3.6e-07, "loss": 4.2388, "step": 15 }, { "epoch": 0.128, "grad_norm": 191.837646484375, "learning_rate": 4.0000000000000003e-07, "loss": 3.6485, "step": 16 }, { "epoch": 0.136, "grad_norm": 185.78134155273438, "learning_rate": 4.4e-07, "loss": 3.6589, "step": 17 }, { "epoch": 0.144, "grad_norm": 281.3375244140625, "learning_rate": 4.8e-07, "loss": 4.0047, "step": 18 }, { "epoch": 0.152, "grad_norm": 247.6005401611328, "learning_rate": 5.2e-07, "loss": 4.2337, "step": 19 }, { "epoch": 0.16, "grad_norm": 93.20538330078125, "learning_rate": 5.6e-07, "loss": 4.1875, "step": 20 }, { "epoch": 0.168, "grad_norm": 106.6589584350586, "learning_rate": 6.000000000000001e-07, "loss": 3.5176, "step": 21 }, { "epoch": 0.176, "grad_norm": 92.88361358642578, "learning_rate": 6.4e-07, "loss": 4.1421, "step": 22 }, { "epoch": 0.184, "grad_norm": 288.2242736816406, "learning_rate": 6.8e-07, "loss": 3.8259, "step": 23 }, { "epoch": 0.192, "grad_norm": 987.2598266601562, "learning_rate": 7.2e-07, "loss": 3.633, "step": 24 }, { "epoch": 0.2, "grad_norm": 253.25917053222656, "learning_rate": 7.6e-07, "loss": 3.8135, "step": 25 }, { "epoch": 0.208, "grad_norm": 76.75688934326172, "learning_rate": 8.000000000000001e-07, "loss": 3.6798, "step": 26 }, { "epoch": 0.216, "grad_norm": 253.015869140625, "learning_rate": 8.4e-07, "loss": 4.1093, "step": 27 }, { "epoch": 0.224, "grad_norm": 356.33599853515625, "learning_rate": 8.8e-07, "loss": 3.3791, "step": 28 }, { "epoch": 0.232, "grad_norm": 1894.1221923828125, "learning_rate": 9.2e-07, "loss": 3.3055, "step": 29 }, { "epoch": 0.24, "grad_norm": 268.42889404296875, "learning_rate": 9.6e-07, "loss": 3.6915, "step": 30 }, { "epoch": 0.248, "grad_norm": 172.3096466064453, "learning_rate": 1.0000000000000002e-06, "loss": 3.7208, "step": 31 }, { "epoch": 0.256, "grad_norm": 172.0184783935547, "learning_rate": 1.04e-06, "loss": 3.4114, "step": 32 }, { "epoch": 0.264, "grad_norm": 194.84136962890625, "learning_rate": 1.08e-06, "loss": 3.3198, "step": 33 }, { "epoch": 0.272, "grad_norm": 158.36447143554688, "learning_rate": 1.12e-06, "loss": 3.1739, "step": 34 }, { "epoch": 0.28, "grad_norm": 119.14917755126953, "learning_rate": 1.16e-06, "loss": 3.7567, "step": 35 }, { "epoch": 0.288, "grad_norm": 955.8602294921875, "learning_rate": 1.2000000000000002e-06, "loss": 4.1999, "step": 36 }, { "epoch": 0.296, "grad_norm": 193.58212280273438, "learning_rate": 1.24e-06, "loss": 4.3053, "step": 37 }, { "epoch": 0.304, "grad_norm": 72.98123168945312, "learning_rate": 1.28e-06, "loss": 3.478, "step": 38 }, { "epoch": 0.312, "grad_norm": 94.13905334472656, "learning_rate": 1.32e-06, "loss": 4.0447, "step": 39 }, { "epoch": 0.32, "grad_norm": 413.984375, "learning_rate": 1.36e-06, "loss": 3.497, "step": 40 }, { "epoch": 0.328, "grad_norm": 196.7478790283203, "learning_rate": 1.4000000000000001e-06, "loss": 3.5272, "step": 41 }, { "epoch": 0.336, "grad_norm": 114.77792358398438, "learning_rate": 1.44e-06, "loss": 3.7713, "step": 42 }, { "epoch": 0.344, "grad_norm": 210.59864807128906, "learning_rate": 1.4800000000000002e-06, "loss": 3.1356, "step": 43 }, { "epoch": 0.352, "grad_norm": 1363.4066162109375, "learning_rate": 1.52e-06, "loss": 3.3702, "step": 44 }, { "epoch": 0.36, "grad_norm": 555.3329467773438, "learning_rate": 1.56e-06, "loss": 3.9584, "step": 45 }, { "epoch": 0.368, "grad_norm": 71.6576919555664, "learning_rate": 1.6000000000000001e-06, "loss": 3.4655, "step": 46 }, { "epoch": 0.376, "grad_norm": 152.8477325439453, "learning_rate": 1.6400000000000002e-06, "loss": 3.9939, "step": 47 }, { "epoch": 0.384, "grad_norm": 173.6901092529297, "learning_rate": 1.68e-06, "loss": 3.5284, "step": 48 }, { "epoch": 0.392, "grad_norm": 465.4873962402344, "learning_rate": 1.72e-06, "loss": 3.7554, "step": 49 }, { "epoch": 0.4, "grad_norm": 157.3064422607422, "learning_rate": 1.76e-06, "loss": 3.7027, "step": 50 }, { "epoch": 0.408, "grad_norm": 605.2899780273438, "learning_rate": 1.8e-06, "loss": 3.4186, "step": 51 }, { "epoch": 0.416, "grad_norm": 209.40464782714844, "learning_rate": 1.84e-06, "loss": 3.4208, "step": 52 }, { "epoch": 0.424, "grad_norm": 562.0873413085938, "learning_rate": 1.8800000000000002e-06, "loss": 3.5606, "step": 53 }, { "epoch": 0.432, "grad_norm": 174.66201782226562, "learning_rate": 1.92e-06, "loss": 3.8259, "step": 54 }, { "epoch": 0.44, "grad_norm": 359.9379577636719, "learning_rate": 1.96e-06, "loss": 3.3368, "step": 55 }, { "epoch": 0.448, "grad_norm": 2858.2978515625, "learning_rate": 2.0000000000000003e-06, "loss": 3.5768, "step": 56 }, { "epoch": 0.456, "grad_norm": 575.7567138671875, "learning_rate": 2.0400000000000004e-06, "loss": 3.9784, "step": 57 }, { "epoch": 0.464, "grad_norm": 123.05894470214844, "learning_rate": 2.08e-06, "loss": 3.5179, "step": 58 }, { "epoch": 0.472, "grad_norm": 236.787841796875, "learning_rate": 2.12e-06, "loss": 3.6087, "step": 59 }, { "epoch": 0.48, "grad_norm": 204.83021545410156, "learning_rate": 2.16e-06, "loss": 3.5816, "step": 60 }, { "epoch": 0.488, "grad_norm": 73.8226089477539, "learning_rate": 2.2e-06, "loss": 3.2287, "step": 61 }, { "epoch": 0.496, "grad_norm": 93.2711181640625, "learning_rate": 2.24e-06, "loss": 4.0687, "step": 62 }, { "epoch": 0.504, "grad_norm": 64.57947540283203, "learning_rate": 2.28e-06, "loss": 3.1884, "step": 63 }, { "epoch": 0.512, "grad_norm": 98.38155364990234, "learning_rate": 2.32e-06, "loss": 3.5648, "step": 64 }, { "epoch": 0.52, "grad_norm": 234.73277282714844, "learning_rate": 2.36e-06, "loss": 3.5528, "step": 65 }, { "epoch": 0.528, "grad_norm": 201.83653259277344, "learning_rate": 2.4000000000000003e-06, "loss": 3.1243, "step": 66 }, { "epoch": 0.536, "grad_norm": 1907.228759765625, "learning_rate": 2.4400000000000004e-06, "loss": 3.4188, "step": 67 }, { "epoch": 0.544, "grad_norm": 66.8006362915039, "learning_rate": 2.48e-06, "loss": 3.7022, "step": 68 }, { "epoch": 0.552, "grad_norm": 188.55796813964844, "learning_rate": 2.52e-06, "loss": 3.3109, "step": 69 }, { "epoch": 0.56, "grad_norm": 608.3219604492188, "learning_rate": 2.56e-06, "loss": 3.4885, "step": 70 }, { "epoch": 0.568, "grad_norm": 296.72259521484375, "learning_rate": 2.6e-06, "loss": 3.2881, "step": 71 }, { "epoch": 0.576, "grad_norm": 227.28932189941406, "learning_rate": 2.64e-06, "loss": 3.8933, "step": 72 }, { "epoch": 0.584, "grad_norm": 333.1708679199219, "learning_rate": 2.68e-06, "loss": 3.8111, "step": 73 }, { "epoch": 0.592, "grad_norm": 58.044654846191406, "learning_rate": 2.72e-06, "loss": 3.5299, "step": 74 }, { "epoch": 0.6, "grad_norm": 404.6145324707031, "learning_rate": 2.7600000000000003e-06, "loss": 3.4309, "step": 75 }, { "epoch": 0.608, "grad_norm": 156.56214904785156, "learning_rate": 2.8000000000000003e-06, "loss": 3.4178, "step": 76 }, { "epoch": 0.616, "grad_norm": 78.42012023925781, "learning_rate": 2.8400000000000003e-06, "loss": 3.7092, "step": 77 }, { "epoch": 0.624, "grad_norm": Infinity, "learning_rate": 2.8400000000000003e-06, "loss": 3.608, "step": 78 }, { "epoch": 0.632, "grad_norm": 232.09852600097656, "learning_rate": 2.88e-06, "loss": 3.0901, "step": 79 }, { "epoch": 0.64, "grad_norm": 171.36300659179688, "learning_rate": 2.92e-06, "loss": 2.9018, "step": 80 }, { "epoch": 0.648, "grad_norm": 320.4952087402344, "learning_rate": 2.9600000000000005e-06, "loss": 3.3898, "step": 81 }, { "epoch": 0.656, "grad_norm": 140.93450927734375, "learning_rate": 3e-06, "loss": 3.4313, "step": 82 }, { "epoch": 0.664, "grad_norm": 54.06045150756836, "learning_rate": 3.04e-06, "loss": 3.4359, "step": 83 }, { "epoch": 0.672, "grad_norm": 127.67021179199219, "learning_rate": 3.08e-06, "loss": 3.8169, "step": 84 }, { "epoch": 0.68, "grad_norm": 186.7892303466797, "learning_rate": 3.12e-06, "loss": 3.5391, "step": 85 }, { "epoch": 0.688, "grad_norm": 56.08567428588867, "learning_rate": 3.1600000000000007e-06, "loss": 4.3557, "step": 86 }, { "epoch": 0.696, "grad_norm": 148.00765991210938, "learning_rate": 3.2000000000000003e-06, "loss": 3.1061, "step": 87 }, { "epoch": 0.704, "grad_norm": 196.13169860839844, "learning_rate": 3.24e-06, "loss": 3.0595, "step": 88 }, { "epoch": 0.712, "grad_norm": 142.4077911376953, "learning_rate": 3.2800000000000004e-06, "loss": 3.6094, "step": 89 }, { "epoch": 0.72, "grad_norm": 74.17877197265625, "learning_rate": 3.3200000000000004e-06, "loss": 3.4753, "step": 90 }, { "epoch": 0.728, "grad_norm": 272.9970703125, "learning_rate": 3.36e-06, "loss": 3.2863, "step": 91 }, { "epoch": 0.736, "grad_norm": 212.2640380859375, "learning_rate": 3.4000000000000005e-06, "loss": 3.787, "step": 92 }, { "epoch": 0.744, "grad_norm": 133.53164672851562, "learning_rate": 3.44e-06, "loss": 3.5177, "step": 93 }, { "epoch": 0.752, "grad_norm": 120.76679229736328, "learning_rate": 3.4799999999999997e-06, "loss": 3.0062, "step": 94 }, { "epoch": 0.76, "grad_norm": 163.17042541503906, "learning_rate": 3.52e-06, "loss": 3.4405, "step": 95 }, { "epoch": 0.768, "grad_norm": 120.1762924194336, "learning_rate": 3.5600000000000002e-06, "loss": 3.5292, "step": 96 }, { "epoch": 0.776, "grad_norm": 138.59617614746094, "learning_rate": 3.6e-06, "loss": 3.3844, "step": 97 }, { "epoch": 0.784, "grad_norm": 344.3218994140625, "learning_rate": 3.6400000000000003e-06, "loss": 3.0961, "step": 98 }, { "epoch": 0.792, "grad_norm": 106.65750122070312, "learning_rate": 3.68e-06, "loss": 3.131, "step": 99 }, { "epoch": 0.8, "grad_norm": 288.0075988769531, "learning_rate": 3.72e-06, "loss": 3.8259, "step": 100 }, { "epoch": 0.808, "grad_norm": 65.88523864746094, "learning_rate": 3.7600000000000004e-06, "loss": 3.3049, "step": 101 }, { "epoch": 0.816, "grad_norm": 112.15385437011719, "learning_rate": 3.8e-06, "loss": 3.622, "step": 102 }, { "epoch": 0.824, "grad_norm": 125.27320861816406, "learning_rate": 3.84e-06, "loss": 3.3119, "step": 103 }, { "epoch": 0.832, "grad_norm": 44.02014923095703, "learning_rate": 3.88e-06, "loss": 4.3097, "step": 104 }, { "epoch": 0.84, "grad_norm": 104.0511245727539, "learning_rate": 3.92e-06, "loss": 3.8539, "step": 105 }, { "epoch": 0.848, "grad_norm": 214.08331298828125, "learning_rate": 3.96e-06, "loss": 3.1076, "step": 106 }, { "epoch": 0.856, "grad_norm": 89.59166717529297, "learning_rate": 4.000000000000001e-06, "loss": 3.2761, "step": 107 }, { "epoch": 0.864, "grad_norm": 277.7024230957031, "learning_rate": 4.04e-06, "loss": 3.2997, "step": 108 }, { "epoch": 0.872, "grad_norm": 85.34432983398438, "learning_rate": 4.080000000000001e-06, "loss": 3.0809, "step": 109 }, { "epoch": 0.88, "grad_norm": 93.38963317871094, "learning_rate": 4.12e-06, "loss": 3.6079, "step": 110 }, { "epoch": 0.888, "grad_norm": 79.35243225097656, "learning_rate": 4.16e-06, "loss": 3.3537, "step": 111 }, { "epoch": 0.896, "grad_norm": 87.19813537597656, "learning_rate": 4.2000000000000004e-06, "loss": 2.7586, "step": 112 }, { "epoch": 0.904, "grad_norm": 132.16952514648438, "learning_rate": 4.24e-06, "loss": 3.3798, "step": 113 }, { "epoch": 0.912, "grad_norm": 41.574790954589844, "learning_rate": 4.28e-06, "loss": 2.91, "step": 114 }, { "epoch": 0.92, "grad_norm": 72.4201431274414, "learning_rate": 4.32e-06, "loss": 2.7766, "step": 115 }, { "epoch": 0.928, "grad_norm": 211.33627319335938, "learning_rate": 4.360000000000001e-06, "loss": 2.9457, "step": 116 }, { "epoch": 0.936, "grad_norm": 100.65950012207031, "learning_rate": 4.4e-06, "loss": 3.1887, "step": 117 }, { "epoch": 0.944, "grad_norm": 82.49190521240234, "learning_rate": 4.440000000000001e-06, "loss": 3.7408, "step": 118 }, { "epoch": 0.952, "grad_norm": 1994.0328369140625, "learning_rate": 4.48e-06, "loss": 3.1126, "step": 119 }, { "epoch": 0.96, "grad_norm": 53.00416564941406, "learning_rate": 4.52e-06, "loss": 3.0311, "step": 120 }, { "epoch": 0.968, "grad_norm": 200.80479431152344, "learning_rate": 4.56e-06, "loss": 3.3107, "step": 121 }, { "epoch": 0.976, "grad_norm": 197.59518432617188, "learning_rate": 4.6e-06, "loss": 4.0056, "step": 122 }, { "epoch": 0.984, "grad_norm": 313.65997314453125, "learning_rate": 4.64e-06, "loss": 2.9977, "step": 123 }, { "epoch": 0.992, "grad_norm": 112.83658599853516, "learning_rate": 4.68e-06, "loss": 3.1594, "step": 124 }, { "epoch": 1.0, "grad_norm": 80.4415283203125, "learning_rate": 4.72e-06, "loss": 2.9682, "step": 125 }, { "epoch": 1.0, "eval_loss": 3.199169874191284, "eval_map": 0.003, "eval_map_50": 0.0071, "eval_map_75": 0.0025, "eval_map_Coverall": 0.0111, "eval_map_Face_Shield": 0.0, "eval_map_Gloves": 0.0012, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.0025, "eval_map_large": 0.004, "eval_map_medium": 0.0005, "eval_map_small": -1.0, "eval_mar_1": 0.0114, "eval_mar_10": 0.0445, "eval_mar_100": 0.1297, "eval_mar_100_Coverall": 0.4533, "eval_mar_100_Face_Shield": 0.0, "eval_mar_100_Gloves": 0.0623, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.1327, "eval_mar_large": 0.1589, "eval_mar_medium": 0.0176, "eval_mar_small": -1.0, "eval_runtime": 4.7163, "eval_samples_per_second": 6.149, "eval_steps_per_second": 0.424, "step": 125 }, { "epoch": 1.008, "grad_norm": 796.6498413085938, "learning_rate": 4.76e-06, "loss": 3.1554, "step": 126 }, { "epoch": 1.016, "grad_norm": 67.48271942138672, "learning_rate": 4.800000000000001e-06, "loss": 2.7514, "step": 127 }, { "epoch": 1.024, "grad_norm": 116.62411499023438, "learning_rate": 4.84e-06, "loss": 3.201, "step": 128 }, { "epoch": 1.032, "grad_norm": 257.3663635253906, "learning_rate": 4.880000000000001e-06, "loss": 3.5328, "step": 129 }, { "epoch": 1.04, "grad_norm": 102.82405090332031, "learning_rate": 4.92e-06, "loss": 2.9302, "step": 130 }, { "epoch": 1.048, "grad_norm": 337.39764404296875, "learning_rate": 4.96e-06, "loss": 3.3363, "step": 131 }, { "epoch": 1.056, "grad_norm": 59.139137268066406, "learning_rate": 5e-06, "loss": 3.7633, "step": 132 }, { "epoch": 1.064, "grad_norm": 55.291961669921875, "learning_rate": 5.04e-06, "loss": 2.6264, "step": 133 }, { "epoch": 1.072, "grad_norm": 65.71623229980469, "learning_rate": 5.08e-06, "loss": 3.33, "step": 134 }, { "epoch": 1.08, "grad_norm": 109.36235809326172, "learning_rate": 5.12e-06, "loss": 3.3, "step": 135 }, { "epoch": 1.088, "grad_norm": 74.1268539428711, "learning_rate": 5.1600000000000006e-06, "loss": 3.0468, "step": 136 }, { "epoch": 1.096, "grad_norm": 46.05384826660156, "learning_rate": 5.2e-06, "loss": 3.0235, "step": 137 }, { "epoch": 1.104, "grad_norm": 197.75096130371094, "learning_rate": 5.240000000000001e-06, "loss": 3.0261, "step": 138 }, { "epoch": 1.112, "grad_norm": 74.72071075439453, "learning_rate": 5.28e-06, "loss": 2.7489, "step": 139 }, { "epoch": 1.12, "grad_norm": 343.4049072265625, "learning_rate": 5.32e-06, "loss": 2.5224, "step": 140 }, { "epoch": 1.1280000000000001, "grad_norm": 134.068115234375, "learning_rate": 5.36e-06, "loss": 2.8338, "step": 141 }, { "epoch": 1.1360000000000001, "grad_norm": 478.9488220214844, "learning_rate": 5.4e-06, "loss": 2.9951, "step": 142 }, { "epoch": 1.144, "grad_norm": 95.44683837890625, "learning_rate": 5.44e-06, "loss": 3.0151, "step": 143 }, { "epoch": 1.152, "grad_norm": 90.36193084716797, "learning_rate": 5.48e-06, "loss": 2.6047, "step": 144 }, { "epoch": 1.16, "grad_norm": 80.18306732177734, "learning_rate": 5.5200000000000005e-06, "loss": 3.2978, "step": 145 }, { "epoch": 1.168, "grad_norm": 48.70478057861328, "learning_rate": 5.56e-06, "loss": 2.9291, "step": 146 }, { "epoch": 1.176, "grad_norm": 68.59937286376953, "learning_rate": 5.600000000000001e-06, "loss": 3.1259, "step": 147 }, { "epoch": 1.184, "grad_norm": 197.1977081298828, "learning_rate": 5.64e-06, "loss": 2.8451, "step": 148 }, { "epoch": 1.192, "grad_norm": 505.78009033203125, "learning_rate": 5.680000000000001e-06, "loss": 2.8151, "step": 149 }, { "epoch": 1.2, "grad_norm": 319.4236145019531, "learning_rate": 5.72e-06, "loss": 3.2639, "step": 150 }, { "epoch": 1.208, "grad_norm": 124.53226470947266, "learning_rate": 5.76e-06, "loss": 3.1472, "step": 151 }, { "epoch": 1.216, "grad_norm": 577.2037353515625, "learning_rate": 5.8e-06, "loss": 2.8906, "step": 152 }, { "epoch": 1.224, "grad_norm": 140.76031494140625, "learning_rate": 5.84e-06, "loss": 2.3924, "step": 153 }, { "epoch": 1.232, "grad_norm": 60.90264892578125, "learning_rate": 5.8800000000000005e-06, "loss": 2.7163, "step": 154 }, { "epoch": 1.24, "grad_norm": 192.56190490722656, "learning_rate": 5.920000000000001e-06, "loss": 3.063, "step": 155 }, { "epoch": 1.248, "grad_norm": 204.61683654785156, "learning_rate": 5.9600000000000005e-06, "loss": 3.1782, "step": 156 }, { "epoch": 1.256, "grad_norm": 81.71315002441406, "learning_rate": 6e-06, "loss": 2.5249, "step": 157 }, { "epoch": 1.264, "grad_norm": 268.7687072753906, "learning_rate": 6.040000000000001e-06, "loss": 2.8725, "step": 158 }, { "epoch": 1.272, "grad_norm": 74.00654602050781, "learning_rate": 6.08e-06, "loss": 3.402, "step": 159 }, { "epoch": 1.28, "grad_norm": 193.5031280517578, "learning_rate": 6.12e-06, "loss": 2.9344, "step": 160 }, { "epoch": 1.288, "grad_norm": 101.5681381225586, "learning_rate": 6.16e-06, "loss": 2.4157, "step": 161 }, { "epoch": 1.296, "grad_norm": 44.99148941040039, "learning_rate": 6.2e-06, "loss": 3.1815, "step": 162 }, { "epoch": 1.304, "grad_norm": 65.95751953125, "learning_rate": 6.24e-06, "loss": 2.8802, "step": 163 }, { "epoch": 1.312, "grad_norm": 179.668701171875, "learning_rate": 6.28e-06, "loss": 2.88, "step": 164 }, { "epoch": 1.32, "grad_norm": 119.9828872680664, "learning_rate": 6.320000000000001e-06, "loss": 3.2008, "step": 165 }, { "epoch": 1.328, "grad_norm": 89.03215789794922, "learning_rate": 6.360000000000001e-06, "loss": 3.067, "step": 166 }, { "epoch": 1.336, "grad_norm": 161.28749084472656, "learning_rate": 6.4000000000000006e-06, "loss": 2.7564, "step": 167 }, { "epoch": 1.3439999999999999, "grad_norm": 135.52745056152344, "learning_rate": 6.44e-06, "loss": 2.4935, "step": 168 }, { "epoch": 1.3519999999999999, "grad_norm": 44.551483154296875, "learning_rate": 6.48e-06, "loss": 2.9295, "step": 169 }, { "epoch": 1.3599999999999999, "grad_norm": 157.65103149414062, "learning_rate": 6.519999999999999e-06, "loss": 2.7715, "step": 170 }, { "epoch": 1.3679999999999999, "grad_norm": 78.12393951416016, "learning_rate": 6.560000000000001e-06, "loss": 2.8456, "step": 171 }, { "epoch": 1.376, "grad_norm": 66.47402954101562, "learning_rate": 6.6e-06, "loss": 2.6169, "step": 172 }, { "epoch": 1.384, "grad_norm": 103.39900970458984, "learning_rate": 6.640000000000001e-06, "loss": 2.4722, "step": 173 }, { "epoch": 1.392, "grad_norm": 50.431339263916016, "learning_rate": 6.68e-06, "loss": 2.7375, "step": 174 }, { "epoch": 1.4, "grad_norm": 77.15655517578125, "learning_rate": 6.72e-06, "loss": 2.7284, "step": 175 }, { "epoch": 1.408, "grad_norm": 1775.5538330078125, "learning_rate": 6.76e-06, "loss": 2.6401, "step": 176 }, { "epoch": 1.416, "grad_norm": 55.378108978271484, "learning_rate": 6.800000000000001e-06, "loss": 3.634, "step": 177 }, { "epoch": 1.424, "grad_norm": 49.10795211791992, "learning_rate": 6.840000000000001e-06, "loss": 2.6422, "step": 178 }, { "epoch": 1.432, "grad_norm": 118.33405303955078, "learning_rate": 6.88e-06, "loss": 2.391, "step": 179 }, { "epoch": 1.44, "grad_norm": 87.25895690917969, "learning_rate": 6.92e-06, "loss": 2.376, "step": 180 }, { "epoch": 1.448, "grad_norm": 179.66282653808594, "learning_rate": 6.9599999999999994e-06, "loss": 2.573, "step": 181 }, { "epoch": 1.456, "grad_norm": 69.740966796875, "learning_rate": 7.000000000000001e-06, "loss": 2.8699, "step": 182 }, { "epoch": 1.464, "grad_norm": 355.8136901855469, "learning_rate": 7.04e-06, "loss": 2.6649, "step": 183 }, { "epoch": 1.472, "grad_norm": 83.61421203613281, "learning_rate": 7.080000000000001e-06, "loss": 2.4852, "step": 184 }, { "epoch": 1.48, "grad_norm": 96.47577667236328, "learning_rate": 7.1200000000000004e-06, "loss": 2.5967, "step": 185 }, { "epoch": 1.488, "grad_norm": 412.5128173828125, "learning_rate": 7.16e-06, "loss": 2.4542, "step": 186 }, { "epoch": 1.496, "grad_norm": 303.74853515625, "learning_rate": 7.2e-06, "loss": 2.6143, "step": 187 }, { "epoch": 1.504, "grad_norm": 63.55987548828125, "learning_rate": 7.240000000000001e-06, "loss": 2.3513, "step": 188 }, { "epoch": 1.512, "grad_norm": 152.99110412597656, "learning_rate": 7.280000000000001e-06, "loss": 2.7969, "step": 189 }, { "epoch": 1.52, "grad_norm": 183.38294982910156, "learning_rate": 7.32e-06, "loss": 2.5537, "step": 190 }, { "epoch": 1.528, "grad_norm": 57.44979476928711, "learning_rate": 7.36e-06, "loss": 2.6877, "step": 191 }, { "epoch": 1.536, "grad_norm": 49.810001373291016, "learning_rate": 7.4e-06, "loss": 2.7367, "step": 192 }, { "epoch": 1.544, "grad_norm": 58.08694076538086, "learning_rate": 7.44e-06, "loss": 2.4608, "step": 193 }, { "epoch": 1.552, "grad_norm": 132.98385620117188, "learning_rate": 7.480000000000001e-06, "loss": 2.945, "step": 194 }, { "epoch": 1.56, "grad_norm": 53.765235900878906, "learning_rate": 7.520000000000001e-06, "loss": 2.5377, "step": 195 }, { "epoch": 1.568, "grad_norm": 97.69255065917969, "learning_rate": 7.5600000000000005e-06, "loss": 2.6087, "step": 196 }, { "epoch": 1.576, "grad_norm": 79.44621276855469, "learning_rate": 7.6e-06, "loss": 3.3064, "step": 197 }, { "epoch": 1.584, "grad_norm": 90.53327941894531, "learning_rate": 7.64e-06, "loss": 2.5747, "step": 198 }, { "epoch": 1.592, "grad_norm": 52.13582229614258, "learning_rate": 7.68e-06, "loss": 2.101, "step": 199 }, { "epoch": 1.6, "grad_norm": 236.89805603027344, "learning_rate": 7.72e-06, "loss": 2.5695, "step": 200 }, { "epoch": 1.608, "grad_norm": 281.5578308105469, "learning_rate": 7.76e-06, "loss": 2.3289, "step": 201 }, { "epoch": 1.616, "grad_norm": 50.198307037353516, "learning_rate": 7.8e-06, "loss": 2.6504, "step": 202 }, { "epoch": 1.624, "grad_norm": 47.60285568237305, "learning_rate": 7.84e-06, "loss": 2.7003, "step": 203 }, { "epoch": 1.6320000000000001, "grad_norm": 82.13204193115234, "learning_rate": 7.879999999999999e-06, "loss": 3.1351, "step": 204 }, { "epoch": 1.6400000000000001, "grad_norm": 533.0662841796875, "learning_rate": 7.92e-06, "loss": 2.5012, "step": 205 }, { "epoch": 1.6480000000000001, "grad_norm": 227.41090393066406, "learning_rate": 7.96e-06, "loss": 2.6574, "step": 206 }, { "epoch": 1.6560000000000001, "grad_norm": 110.71058654785156, "learning_rate": 8.000000000000001e-06, "loss": 2.5403, "step": 207 }, { "epoch": 1.6640000000000001, "grad_norm": 116.07962799072266, "learning_rate": 8.040000000000001e-06, "loss": 2.7148, "step": 208 }, { "epoch": 1.6720000000000002, "grad_norm": 115.4014663696289, "learning_rate": 8.08e-06, "loss": 2.8938, "step": 209 }, { "epoch": 1.6800000000000002, "grad_norm": 71.1370620727539, "learning_rate": 8.12e-06, "loss": 2.033, "step": 210 }, { "epoch": 1.688, "grad_norm": 98.97047424316406, "learning_rate": 8.160000000000001e-06, "loss": 2.6852, "step": 211 }, { "epoch": 1.696, "grad_norm": 55.1193733215332, "learning_rate": 8.200000000000001e-06, "loss": 2.1216, "step": 212 }, { "epoch": 1.704, "grad_norm": 43.64582061767578, "learning_rate": 8.24e-06, "loss": 2.5575, "step": 213 }, { "epoch": 1.712, "grad_norm": 192.74522399902344, "learning_rate": 8.28e-06, "loss": 2.6201, "step": 214 }, { "epoch": 1.72, "grad_norm": 70.30753326416016, "learning_rate": 8.32e-06, "loss": 2.1412, "step": 215 }, { "epoch": 1.728, "grad_norm": 48.135433197021484, "learning_rate": 8.36e-06, "loss": 2.4328, "step": 216 }, { "epoch": 1.736, "grad_norm": 109.64661407470703, "learning_rate": 8.400000000000001e-06, "loss": 2.3612, "step": 217 }, { "epoch": 1.744, "grad_norm": 39.77682876586914, "learning_rate": 8.44e-06, "loss": 2.6226, "step": 218 }, { "epoch": 1.752, "grad_norm": 282.6398010253906, "learning_rate": 8.48e-06, "loss": 2.4986, "step": 219 }, { "epoch": 1.76, "grad_norm": 87.57748413085938, "learning_rate": 8.52e-06, "loss": 2.5705, "step": 220 }, { "epoch": 1.768, "grad_norm": 172.5338592529297, "learning_rate": 8.56e-06, "loss": 2.5871, "step": 221 }, { "epoch": 1.776, "grad_norm": 164.57354736328125, "learning_rate": 8.599999999999999e-06, "loss": 2.4471, "step": 222 }, { "epoch": 1.784, "grad_norm": 98.23434448242188, "learning_rate": 8.64e-06, "loss": 2.1005, "step": 223 }, { "epoch": 1.792, "grad_norm": 365.817138671875, "learning_rate": 8.68e-06, "loss": 2.5471, "step": 224 }, { "epoch": 1.8, "grad_norm": 60.467594146728516, "learning_rate": 8.720000000000001e-06, "loss": 2.1616, "step": 225 }, { "epoch": 1.808, "grad_norm": 70.09989166259766, "learning_rate": 8.76e-06, "loss": 2.2444, "step": 226 }, { "epoch": 1.8159999999999998, "grad_norm": 269.2063293457031, "learning_rate": 8.8e-06, "loss": 1.9487, "step": 227 }, { "epoch": 1.8239999999999998, "grad_norm": 35.3549690246582, "learning_rate": 8.840000000000002e-06, "loss": 2.3632, "step": 228 }, { "epoch": 1.8319999999999999, "grad_norm": 57.87598419189453, "learning_rate": 8.880000000000001e-06, "loss": 2.7313, "step": 229 }, { "epoch": 1.8399999999999999, "grad_norm": 111.15446472167969, "learning_rate": 8.920000000000001e-06, "loss": 2.9041, "step": 230 }, { "epoch": 1.8479999999999999, "grad_norm": 35.166385650634766, "learning_rate": 8.96e-06, "loss": 2.7072, "step": 231 }, { "epoch": 1.8559999999999999, "grad_norm": 30.82782554626465, "learning_rate": 9e-06, "loss": 2.9186, "step": 232 }, { "epoch": 1.8639999999999999, "grad_norm": 37.70854187011719, "learning_rate": 9.04e-06, "loss": 2.2305, "step": 233 }, { "epoch": 1.8719999999999999, "grad_norm": 51.58017349243164, "learning_rate": 9.080000000000001e-06, "loss": 2.3848, "step": 234 }, { "epoch": 1.88, "grad_norm": 70.08966064453125, "learning_rate": 9.12e-06, "loss": 2.1018, "step": 235 }, { "epoch": 1.888, "grad_norm": 89.23370361328125, "learning_rate": 9.16e-06, "loss": 2.6029, "step": 236 }, { "epoch": 1.896, "grad_norm": 98.90917205810547, "learning_rate": 9.2e-06, "loss": 2.5963, "step": 237 }, { "epoch": 1.904, "grad_norm": 42.784881591796875, "learning_rate": 9.24e-06, "loss": 1.9623, "step": 238 }, { "epoch": 1.912, "grad_norm": 46.257713317871094, "learning_rate": 9.28e-06, "loss": 2.3263, "step": 239 }, { "epoch": 1.92, "grad_norm": 87.98348999023438, "learning_rate": 9.32e-06, "loss": 2.3103, "step": 240 }, { "epoch": 1.928, "grad_norm": 286.05609130859375, "learning_rate": 9.36e-06, "loss": 2.297, "step": 241 }, { "epoch": 1.936, "grad_norm": 109.1703872680664, "learning_rate": 9.4e-06, "loss": 3.8257, "step": 242 }, { "epoch": 1.944, "grad_norm": 28.61661148071289, "learning_rate": 9.44e-06, "loss": 2.8889, "step": 243 }, { "epoch": 1.952, "grad_norm": 42.9791259765625, "learning_rate": 9.48e-06, "loss": 2.1742, "step": 244 }, { "epoch": 1.96, "grad_norm": 91.47789001464844, "learning_rate": 9.52e-06, "loss": 2.2972, "step": 245 }, { "epoch": 1.968, "grad_norm": 28.153461456298828, "learning_rate": 9.560000000000002e-06, "loss": 2.5779, "step": 246 }, { "epoch": 1.976, "grad_norm": 48.34021759033203, "learning_rate": 9.600000000000001e-06, "loss": 2.3498, "step": 247 }, { "epoch": 1.984, "grad_norm": 37.073509216308594, "learning_rate": 9.640000000000001e-06, "loss": 2.1839, "step": 248 }, { "epoch": 1.992, "grad_norm": 95.86463165283203, "learning_rate": 9.68e-06, "loss": 2.2108, "step": 249 }, { "epoch": 2.0, "grad_norm": 27.09062957763672, "learning_rate": 9.72e-06, "loss": 2.1269, "step": 250 }, { "epoch": 2.0, "eval_loss": 2.448371648788452, "eval_map": 0.0034, "eval_map_50": 0.0089, "eval_map_75": 0.0023, "eval_map_Coverall": 0.0087, "eval_map_Face_Shield": 0.0, "eval_map_Gloves": 0.0021, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.0061, "eval_map_large": 0.0046, "eval_map_medium": 0.0016, "eval_map_small": -1.0, "eval_mar_1": 0.0074, "eval_mar_10": 0.0378, "eval_mar_100": 0.1533, "eval_mar_100_Coverall": 0.4222, "eval_mar_100_Face_Shield": 0.0, "eval_mar_100_Gloves": 0.1, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.2442, "eval_mar_large": 0.19, "eval_mar_medium": 0.0469, "eval_mar_small": -1.0, "eval_runtime": 5.1279, "eval_samples_per_second": 5.655, "eval_steps_per_second": 0.39, "step": 250 }, { "epoch": 2.008, "grad_norm": 39.40643310546875, "learning_rate": 9.760000000000001e-06, "loss": 2.6033, "step": 251 }, { "epoch": 2.016, "grad_norm": 50.726863861083984, "learning_rate": 9.800000000000001e-06, "loss": 2.3992, "step": 252 }, { "epoch": 2.024, "grad_norm": 53.38621520996094, "learning_rate": 9.84e-06, "loss": 2.6541, "step": 253 }, { "epoch": 2.032, "grad_norm": 74.36039733886719, "learning_rate": 9.88e-06, "loss": 2.6036, "step": 254 }, { "epoch": 2.04, "grad_norm": 176.05856323242188, "learning_rate": 9.92e-06, "loss": 2.076, "step": 255 }, { "epoch": 2.048, "grad_norm": 172.13526916503906, "learning_rate": 9.96e-06, "loss": 2.3677, "step": 256 }, { "epoch": 2.056, "grad_norm": 61.77644729614258, "learning_rate": 1e-05, "loss": 2.4746, "step": 257 }, { "epoch": 2.064, "grad_norm": 81.54024505615234, "learning_rate": 1.004e-05, "loss": 2.1018, "step": 258 }, { "epoch": 2.072, "grad_norm": 36.59720230102539, "learning_rate": 1.008e-05, "loss": 2.3076, "step": 259 }, { "epoch": 2.08, "grad_norm": 33.267311096191406, "learning_rate": 1.012e-05, "loss": 2.1734, "step": 260 }, { "epoch": 2.088, "grad_norm": 72.76124572753906, "learning_rate": 1.016e-05, "loss": 2.2879, "step": 261 }, { "epoch": 2.096, "grad_norm": 1224.3466796875, "learning_rate": 1.02e-05, "loss": 2.1048, "step": 262 }, { "epoch": 2.104, "grad_norm": 160.9603729248047, "learning_rate": 1.024e-05, "loss": 2.4036, "step": 263 }, { "epoch": 2.112, "grad_norm": 88.5594482421875, "learning_rate": 1.0280000000000002e-05, "loss": 2.5326, "step": 264 }, { "epoch": 2.12, "grad_norm": 130.5643768310547, "learning_rate": 1.0320000000000001e-05, "loss": 2.092, "step": 265 }, { "epoch": 2.128, "grad_norm": 51.851070404052734, "learning_rate": 1.036e-05, "loss": 2.5953, "step": 266 }, { "epoch": 2.136, "grad_norm": 92.67764282226562, "learning_rate": 1.04e-05, "loss": 2.2654, "step": 267 }, { "epoch": 2.144, "grad_norm": 97.90907287597656, "learning_rate": 1.0440000000000002e-05, "loss": 1.9029, "step": 268 }, { "epoch": 2.152, "grad_norm": 261.92071533203125, "learning_rate": 1.0480000000000001e-05, "loss": 2.1784, "step": 269 }, { "epoch": 2.16, "grad_norm": 37.40739059448242, "learning_rate": 1.0520000000000001e-05, "loss": 2.8531, "step": 270 }, { "epoch": 2.168, "grad_norm": 131.7286376953125, "learning_rate": 1.056e-05, "loss": 2.496, "step": 271 }, { "epoch": 2.176, "grad_norm": 166.04078674316406, "learning_rate": 1.06e-05, "loss": 2.8992, "step": 272 }, { "epoch": 2.184, "grad_norm": 53.211273193359375, "learning_rate": 1.064e-05, "loss": 3.0267, "step": 273 }, { "epoch": 2.192, "grad_norm": 49.57517623901367, "learning_rate": 1.0680000000000001e-05, "loss": 2.1909, "step": 274 }, { "epoch": 2.2, "grad_norm": 37.94221878051758, "learning_rate": 1.072e-05, "loss": 2.2964, "step": 275 }, { "epoch": 2.208, "grad_norm": 49.24840545654297, "learning_rate": 1.076e-05, "loss": 1.8916, "step": 276 }, { "epoch": 2.216, "grad_norm": 59.73188400268555, "learning_rate": 1.08e-05, "loss": 2.5125, "step": 277 }, { "epoch": 2.224, "grad_norm": 203.0868377685547, "learning_rate": 1.084e-05, "loss": 1.9523, "step": 278 }, { "epoch": 2.232, "grad_norm": 687.378173828125, "learning_rate": 1.088e-05, "loss": 2.1064, "step": 279 }, { "epoch": 2.24, "grad_norm": 51.71220397949219, "learning_rate": 1.092e-05, "loss": 2.074, "step": 280 }, { "epoch": 2.248, "grad_norm": 50.3124885559082, "learning_rate": 1.096e-05, "loss": 1.788, "step": 281 }, { "epoch": 2.2560000000000002, "grad_norm": 84.72232055664062, "learning_rate": 1.1000000000000001e-05, "loss": 2.3018, "step": 282 }, { "epoch": 2.2640000000000002, "grad_norm": 90.34915924072266, "learning_rate": 1.1040000000000001e-05, "loss": 2.1277, "step": 283 }, { "epoch": 2.2720000000000002, "grad_norm": 46.22057342529297, "learning_rate": 1.108e-05, "loss": 2.2432, "step": 284 }, { "epoch": 2.2800000000000002, "grad_norm": 44.82282257080078, "learning_rate": 1.112e-05, "loss": 2.4324, "step": 285 }, { "epoch": 2.288, "grad_norm": 78.85606384277344, "learning_rate": 1.1160000000000002e-05, "loss": 1.9494, "step": 286 }, { "epoch": 2.296, "grad_norm": 56.56631088256836, "learning_rate": 1.1200000000000001e-05, "loss": 2.5278, "step": 287 }, { "epoch": 2.304, "grad_norm": 67.23284149169922, "learning_rate": 1.124e-05, "loss": 1.917, "step": 288 }, { "epoch": 2.312, "grad_norm": 79.56423950195312, "learning_rate": 1.128e-05, "loss": 2.0291, "step": 289 }, { "epoch": 2.32, "grad_norm": 57.38280487060547, "learning_rate": 1.132e-05, "loss": 2.1841, "step": 290 }, { "epoch": 2.328, "grad_norm": 36.522804260253906, "learning_rate": 1.1360000000000001e-05, "loss": 3.5089, "step": 291 }, { "epoch": 2.336, "grad_norm": 31.034595489501953, "learning_rate": 1.1400000000000001e-05, "loss": 2.083, "step": 292 }, { "epoch": 2.344, "grad_norm": 28.933242797851562, "learning_rate": 1.144e-05, "loss": 2.2627, "step": 293 }, { "epoch": 2.352, "grad_norm": 52.85391616821289, "learning_rate": 1.148e-05, "loss": 2.5658, "step": 294 }, { "epoch": 2.36, "grad_norm": 30.031009674072266, "learning_rate": 1.152e-05, "loss": 2.3892, "step": 295 }, { "epoch": 2.368, "grad_norm": 95.13689422607422, "learning_rate": 1.156e-05, "loss": 2.2757, "step": 296 }, { "epoch": 2.376, "grad_norm": 150.7166290283203, "learning_rate": 1.16e-05, "loss": 2.222, "step": 297 }, { "epoch": 2.384, "grad_norm": 96.91800689697266, "learning_rate": 1.164e-05, "loss": 2.385, "step": 298 }, { "epoch": 2.392, "grad_norm": 50.40971374511719, "learning_rate": 1.168e-05, "loss": 2.1415, "step": 299 }, { "epoch": 2.4, "grad_norm": 138.08953857421875, "learning_rate": 1.172e-05, "loss": 2.2024, "step": 300 }, { "epoch": 2.408, "grad_norm": 45.51215362548828, "learning_rate": 1.1760000000000001e-05, "loss": 2.1601, "step": 301 }, { "epoch": 2.416, "grad_norm": 59.66596603393555, "learning_rate": 1.18e-05, "loss": 1.8275, "step": 302 }, { "epoch": 2.424, "grad_norm": 34.29298400878906, "learning_rate": 1.1840000000000002e-05, "loss": 2.7403, "step": 303 }, { "epoch": 2.432, "grad_norm": 74.86711120605469, "learning_rate": 1.1880000000000001e-05, "loss": 1.9664, "step": 304 }, { "epoch": 2.44, "grad_norm": 43.11443328857422, "learning_rate": 1.1920000000000001e-05, "loss": 2.4439, "step": 305 }, { "epoch": 2.448, "grad_norm": 110.46451568603516, "learning_rate": 1.196e-05, "loss": 1.724, "step": 306 }, { "epoch": 2.456, "grad_norm": 29.48759651184082, "learning_rate": 1.2e-05, "loss": 2.1309, "step": 307 }, { "epoch": 2.464, "grad_norm": 45.893367767333984, "learning_rate": 1.204e-05, "loss": 2.2854, "step": 308 }, { "epoch": 2.472, "grad_norm": 57.40233612060547, "learning_rate": 1.2080000000000001e-05, "loss": 1.9963, "step": 309 }, { "epoch": 2.48, "grad_norm": 72.47592163085938, "learning_rate": 1.2120000000000001e-05, "loss": 1.7987, "step": 310 }, { "epoch": 2.488, "grad_norm": 42.913536071777344, "learning_rate": 1.216e-05, "loss": 1.852, "step": 311 }, { "epoch": 2.496, "grad_norm": 32.600589752197266, "learning_rate": 1.22e-05, "loss": 1.8963, "step": 312 }, { "epoch": 2.504, "grad_norm": 151.99925231933594, "learning_rate": 1.224e-05, "loss": 2.0845, "step": 313 }, { "epoch": 2.512, "grad_norm": 48.653011322021484, "learning_rate": 1.2280000000000001e-05, "loss": 2.8127, "step": 314 }, { "epoch": 2.52, "grad_norm": 30.87468910217285, "learning_rate": 1.232e-05, "loss": 2.8549, "step": 315 }, { "epoch": 2.528, "grad_norm": 52.589630126953125, "learning_rate": 1.236e-05, "loss": 2.2878, "step": 316 }, { "epoch": 2.536, "grad_norm": 71.09173583984375, "learning_rate": 1.24e-05, "loss": 2.0326, "step": 317 }, { "epoch": 2.544, "grad_norm": 64.04904174804688, "learning_rate": 1.244e-05, "loss": 2.0018, "step": 318 }, { "epoch": 2.552, "grad_norm": 50.424530029296875, "learning_rate": 1.248e-05, "loss": 2.1064, "step": 319 }, { "epoch": 2.56, "grad_norm": 247.2557373046875, "learning_rate": 1.252e-05, "loss": 2.4301, "step": 320 }, { "epoch": 2.568, "grad_norm": 84.4631118774414, "learning_rate": 1.256e-05, "loss": 1.9769, "step": 321 }, { "epoch": 2.576, "grad_norm": 292.1867980957031, "learning_rate": 1.2600000000000001e-05, "loss": 1.5092, "step": 322 }, { "epoch": 2.584, "grad_norm": 56.897438049316406, "learning_rate": 1.2640000000000003e-05, "loss": 2.0396, "step": 323 }, { "epoch": 2.592, "grad_norm": 86.41392517089844, "learning_rate": 1.268e-05, "loss": 2.7279, "step": 324 }, { "epoch": 2.6, "grad_norm": 31.030855178833008, "learning_rate": 1.2720000000000002e-05, "loss": 2.2786, "step": 325 }, { "epoch": 2.608, "grad_norm": 59.19820785522461, "learning_rate": 1.276e-05, "loss": 1.7783, "step": 326 }, { "epoch": 2.616, "grad_norm": 54.35472106933594, "learning_rate": 1.2800000000000001e-05, "loss": 1.9867, "step": 327 }, { "epoch": 2.624, "grad_norm": 55.79251480102539, "learning_rate": 1.2839999999999999e-05, "loss": 1.8881, "step": 328 }, { "epoch": 2.632, "grad_norm": 53.597251892089844, "learning_rate": 1.288e-05, "loss": 1.8012, "step": 329 }, { "epoch": 2.64, "grad_norm": 55.52839660644531, "learning_rate": 1.2920000000000002e-05, "loss": 2.0027, "step": 330 }, { "epoch": 2.648, "grad_norm": 63.209571838378906, "learning_rate": 1.296e-05, "loss": 1.6098, "step": 331 }, { "epoch": 2.656, "grad_norm": 27.037527084350586, "learning_rate": 1.3000000000000001e-05, "loss": 1.7439, "step": 332 }, { "epoch": 2.664, "grad_norm": 71.8198471069336, "learning_rate": 1.3039999999999999e-05, "loss": 2.0587, "step": 333 }, { "epoch": 2.672, "grad_norm": 46.474090576171875, "learning_rate": 1.308e-05, "loss": 1.8791, "step": 334 }, { "epoch": 2.68, "grad_norm": 57.82027053833008, "learning_rate": 1.3120000000000001e-05, "loss": 2.4165, "step": 335 }, { "epoch": 2.6879999999999997, "grad_norm": 59.35544204711914, "learning_rate": 1.316e-05, "loss": 1.8466, "step": 336 }, { "epoch": 2.6959999999999997, "grad_norm": 43.2574577331543, "learning_rate": 1.32e-05, "loss": 2.1831, "step": 337 }, { "epoch": 2.7039999999999997, "grad_norm": 48.62727737426758, "learning_rate": 1.324e-05, "loss": 1.9553, "step": 338 }, { "epoch": 2.7119999999999997, "grad_norm": 56.28993225097656, "learning_rate": 1.3280000000000002e-05, "loss": 1.8955, "step": 339 }, { "epoch": 2.7199999999999998, "grad_norm": 81.49232482910156, "learning_rate": 1.3320000000000001e-05, "loss": 1.9789, "step": 340 }, { "epoch": 2.7279999999999998, "grad_norm": 162.9506378173828, "learning_rate": 1.336e-05, "loss": 2.5699, "step": 341 }, { "epoch": 2.7359999999999998, "grad_norm": 61.69477844238281, "learning_rate": 1.3400000000000002e-05, "loss": 1.5233, "step": 342 }, { "epoch": 2.7439999999999998, "grad_norm": 36.2951774597168, "learning_rate": 1.344e-05, "loss": 2.153, "step": 343 }, { "epoch": 2.752, "grad_norm": 39.258731842041016, "learning_rate": 1.3480000000000001e-05, "loss": 1.799, "step": 344 }, { "epoch": 2.76, "grad_norm": 114.69364166259766, "learning_rate": 1.352e-05, "loss": 2.162, "step": 345 }, { "epoch": 2.768, "grad_norm": 66.65507507324219, "learning_rate": 1.356e-05, "loss": 2.3778, "step": 346 }, { "epoch": 2.776, "grad_norm": 36.43579864501953, "learning_rate": 1.3600000000000002e-05, "loss": 3.0387, "step": 347 }, { "epoch": 2.784, "grad_norm": 93.34954833984375, "learning_rate": 1.364e-05, "loss": 2.524, "step": 348 }, { "epoch": 2.792, "grad_norm": 125.66114044189453, "learning_rate": 1.3680000000000001e-05, "loss": 2.0317, "step": 349 }, { "epoch": 2.8, "grad_norm": 140.08042907714844, "learning_rate": 1.3719999999999999e-05, "loss": 2.3824, "step": 350 }, { "epoch": 2.808, "grad_norm": 40.36646270751953, "learning_rate": 1.376e-05, "loss": 2.0749, "step": 351 }, { "epoch": 2.816, "grad_norm": 206.07095336914062, "learning_rate": 1.3800000000000002e-05, "loss": 2.2653, "step": 352 }, { "epoch": 2.824, "grad_norm": 43.985809326171875, "learning_rate": 1.384e-05, "loss": 1.9578, "step": 353 }, { "epoch": 2.832, "grad_norm": 73.71430206298828, "learning_rate": 1.3880000000000001e-05, "loss": 2.0711, "step": 354 }, { "epoch": 2.84, "grad_norm": 36.876102447509766, "learning_rate": 1.3919999999999999e-05, "loss": 2.0957, "step": 355 }, { "epoch": 2.848, "grad_norm": 117.355224609375, "learning_rate": 1.396e-05, "loss": 2.117, "step": 356 }, { "epoch": 2.856, "grad_norm": 48.71852493286133, "learning_rate": 1.4000000000000001e-05, "loss": 2.8482, "step": 357 }, { "epoch": 2.864, "grad_norm": 118.73247528076172, "learning_rate": 1.4040000000000001e-05, "loss": 1.8168, "step": 358 }, { "epoch": 2.872, "grad_norm": 34.8645133972168, "learning_rate": 1.408e-05, "loss": 1.6391, "step": 359 }, { "epoch": 2.88, "grad_norm": 125.28209686279297, "learning_rate": 1.412e-05, "loss": 2.1219, "step": 360 }, { "epoch": 2.888, "grad_norm": 31.454875946044922, "learning_rate": 1.4160000000000002e-05, "loss": 1.8796, "step": 361 }, { "epoch": 2.896, "grad_norm": 26.860172271728516, "learning_rate": 1.42e-05, "loss": 2.139, "step": 362 }, { "epoch": 2.904, "grad_norm": 144.8955535888672, "learning_rate": 1.4240000000000001e-05, "loss": 1.8899, "step": 363 }, { "epoch": 2.912, "grad_norm": 33.64907455444336, "learning_rate": 1.4280000000000002e-05, "loss": 2.3266, "step": 364 }, { "epoch": 2.92, "grad_norm": 27.8485107421875, "learning_rate": 1.432e-05, "loss": 1.9192, "step": 365 }, { "epoch": 2.928, "grad_norm": 378.2554626464844, "learning_rate": 1.4360000000000001e-05, "loss": 2.2981, "step": 366 }, { "epoch": 2.936, "grad_norm": 40.27680206298828, "learning_rate": 1.44e-05, "loss": 3.2389, "step": 367 }, { "epoch": 2.944, "grad_norm": 101.9461669921875, "learning_rate": 1.444e-05, "loss": 2.6813, "step": 368 }, { "epoch": 2.952, "grad_norm": 51.557373046875, "learning_rate": 1.4480000000000002e-05, "loss": 1.9848, "step": 369 }, { "epoch": 2.96, "grad_norm": 82.65685272216797, "learning_rate": 1.452e-05, "loss": 1.8172, "step": 370 }, { "epoch": 2.968, "grad_norm": 78.33104705810547, "learning_rate": 1.4560000000000001e-05, "loss": 1.9507, "step": 371 }, { "epoch": 2.976, "grad_norm": 72.49238586425781, "learning_rate": 1.4599999999999999e-05, "loss": 2.7772, "step": 372 }, { "epoch": 2.984, "grad_norm": 28.128610610961914, "learning_rate": 1.464e-05, "loss": 1.5382, "step": 373 }, { "epoch": 2.992, "grad_norm": 101.78814697265625, "learning_rate": 1.4680000000000002e-05, "loss": 2.166, "step": 374 }, { "epoch": 3.0, "grad_norm": 33.306236267089844, "learning_rate": 1.472e-05, "loss": 2.0813, "step": 375 }, { "epoch": 3.0, "eval_loss": 2.3122060298919678, "eval_map": 0.0112, "eval_map_50": 0.0249, "eval_map_75": 0.0074, "eval_map_Coverall": 0.0332, "eval_map_Face_Shield": 0.0, "eval_map_Gloves": 0.007, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.0156, "eval_map_large": 0.0176, "eval_map_medium": 0.0054, "eval_map_small": -1.0, "eval_mar_1": 0.0205, "eval_mar_10": 0.1201, "eval_mar_100": 0.1953, "eval_mar_100_Coverall": 0.5267, "eval_mar_100_Face_Shield": 0.0, "eval_mar_100_Gloves": 0.1557, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.2942, "eval_mar_large": 0.2333, "eval_mar_medium": 0.072, "eval_mar_small": -1.0, "eval_runtime": 3.8646, "eval_samples_per_second": 7.504, "eval_steps_per_second": 0.518, "step": 375 }, { "epoch": 3.008, "grad_norm": 52.80413055419922, "learning_rate": 1.4760000000000001e-05, "loss": 2.2911, "step": 376 }, { "epoch": 3.016, "grad_norm": 88.40685272216797, "learning_rate": 1.48e-05, "loss": 2.0524, "step": 377 }, { "epoch": 3.024, "grad_norm": 32.43350601196289, "learning_rate": 1.4840000000000002e-05, "loss": 1.9958, "step": 378 }, { "epoch": 3.032, "grad_norm": 74.32752990722656, "learning_rate": 1.488e-05, "loss": 2.1392, "step": 379 }, { "epoch": 3.04, "grad_norm": 66.74808502197266, "learning_rate": 1.4920000000000001e-05, "loss": 2.2912, "step": 380 }, { "epoch": 3.048, "grad_norm": 51.44910430908203, "learning_rate": 1.4960000000000002e-05, "loss": 2.1773, "step": 381 }, { "epoch": 3.056, "grad_norm": 73.20143127441406, "learning_rate": 1.5e-05, "loss": 1.4737, "step": 382 }, { "epoch": 3.064, "grad_norm": 466.8901672363281, "learning_rate": 1.5040000000000002e-05, "loss": 3.6267, "step": 383 }, { "epoch": 3.072, "grad_norm": 248.07421875, "learning_rate": 1.508e-05, "loss": 2.3962, "step": 384 }, { "epoch": 3.08, "grad_norm": 65.55439758300781, "learning_rate": 1.5120000000000001e-05, "loss": 2.0188, "step": 385 }, { "epoch": 3.088, "grad_norm": 68.95693969726562, "learning_rate": 1.5160000000000002e-05, "loss": 2.1386, "step": 386 }, { "epoch": 3.096, "grad_norm": 61.28804016113281, "learning_rate": 1.52e-05, "loss": 1.942, "step": 387 }, { "epoch": 3.104, "grad_norm": 272.7109680175781, "learning_rate": 1.5240000000000001e-05, "loss": 2.3847, "step": 388 }, { "epoch": 3.112, "grad_norm": 38.75571823120117, "learning_rate": 1.528e-05, "loss": 2.0857, "step": 389 }, { "epoch": 3.12, "grad_norm": 43.487388610839844, "learning_rate": 1.5320000000000002e-05, "loss": 1.9707, "step": 390 }, { "epoch": 3.128, "grad_norm": 115.66447448730469, "learning_rate": 1.536e-05, "loss": 2.1673, "step": 391 }, { "epoch": 3.136, "grad_norm": 37.15654754638672, "learning_rate": 1.54e-05, "loss": 2.3778, "step": 392 }, { "epoch": 3.144, "grad_norm": 92.24887084960938, "learning_rate": 1.544e-05, "loss": 2.0552, "step": 393 }, { "epoch": 3.152, "grad_norm": 107.00526428222656, "learning_rate": 1.548e-05, "loss": 1.8879, "step": 394 }, { "epoch": 3.16, "grad_norm": 49.70380783081055, "learning_rate": 1.552e-05, "loss": 2.1704, "step": 395 }, { "epoch": 3.168, "grad_norm": 24.562654495239258, "learning_rate": 1.556e-05, "loss": 2.4545, "step": 396 }, { "epoch": 3.176, "grad_norm": 112.97836303710938, "learning_rate": 1.56e-05, "loss": 2.0826, "step": 397 }, { "epoch": 3.184, "grad_norm": 210.188720703125, "learning_rate": 1.5640000000000003e-05, "loss": 1.8722, "step": 398 }, { "epoch": 3.192, "grad_norm": 35.67578125, "learning_rate": 1.568e-05, "loss": 2.8365, "step": 399 }, { "epoch": 3.2, "grad_norm": 71.40760040283203, "learning_rate": 1.5720000000000002e-05, "loss": 1.9652, "step": 400 }, { "epoch": 3.208, "grad_norm": 37.027008056640625, "learning_rate": 1.5759999999999998e-05, "loss": 2.0436, "step": 401 }, { "epoch": 3.216, "grad_norm": 239.24008178710938, "learning_rate": 1.58e-05, "loss": 1.6139, "step": 402 }, { "epoch": 3.224, "grad_norm": 44.519351959228516, "learning_rate": 1.584e-05, "loss": 1.678, "step": 403 }, { "epoch": 3.232, "grad_norm": 58.42421340942383, "learning_rate": 1.588e-05, "loss": 2.1748, "step": 404 }, { "epoch": 3.24, "grad_norm": 118.97122192382812, "learning_rate": 1.592e-05, "loss": 1.9105, "step": 405 }, { "epoch": 3.248, "grad_norm": 37.9080924987793, "learning_rate": 1.596e-05, "loss": 1.589, "step": 406 }, { "epoch": 3.2560000000000002, "grad_norm": 46.215946197509766, "learning_rate": 1.6000000000000003e-05, "loss": 2.251, "step": 407 }, { "epoch": 3.2640000000000002, "grad_norm": 33.926395416259766, "learning_rate": 1.604e-05, "loss": 1.6076, "step": 408 }, { "epoch": 3.2720000000000002, "grad_norm": 42.20576477050781, "learning_rate": 1.6080000000000002e-05, "loss": 1.8139, "step": 409 }, { "epoch": 3.2800000000000002, "grad_norm": 31.049842834472656, "learning_rate": 1.612e-05, "loss": 1.8921, "step": 410 }, { "epoch": 3.288, "grad_norm": 111.8499984741211, "learning_rate": 1.616e-05, "loss": 2.4626, "step": 411 }, { "epoch": 3.296, "grad_norm": 62.97211837768555, "learning_rate": 1.62e-05, "loss": 2.2227, "step": 412 }, { "epoch": 3.304, "grad_norm": 27.85821533203125, "learning_rate": 1.624e-05, "loss": 1.951, "step": 413 }, { "epoch": 3.312, "grad_norm": 110.91194152832031, "learning_rate": 1.628e-05, "loss": 1.9355, "step": 414 }, { "epoch": 3.32, "grad_norm": 445.6070861816406, "learning_rate": 1.6320000000000003e-05, "loss": 2.0597, "step": 415 }, { "epoch": 3.328, "grad_norm": 96.45201110839844, "learning_rate": 1.636e-05, "loss": 1.9671, "step": 416 }, { "epoch": 3.336, "grad_norm": 70.24889373779297, "learning_rate": 1.6400000000000002e-05, "loss": 1.9062, "step": 417 }, { "epoch": 3.344, "grad_norm": 38.19294738769531, "learning_rate": 1.644e-05, "loss": 1.8195, "step": 418 }, { "epoch": 3.352, "grad_norm": 34.101234436035156, "learning_rate": 1.648e-05, "loss": 1.9693, "step": 419 }, { "epoch": 3.36, "grad_norm": 61.45088195800781, "learning_rate": 1.652e-05, "loss": 2.1167, "step": 420 }, { "epoch": 3.368, "grad_norm": 39.59849166870117, "learning_rate": 1.656e-05, "loss": 2.2704, "step": 421 }, { "epoch": 3.376, "grad_norm": 79.68204498291016, "learning_rate": 1.66e-05, "loss": 1.9875, "step": 422 }, { "epoch": 3.384, "grad_norm": 35.363670349121094, "learning_rate": 1.664e-05, "loss": 2.0907, "step": 423 }, { "epoch": 3.392, "grad_norm": 181.94827270507812, "learning_rate": 1.668e-05, "loss": 3.2711, "step": 424 }, { "epoch": 3.4, "grad_norm": 132.45245361328125, "learning_rate": 1.672e-05, "loss": 1.5414, "step": 425 }, { "epoch": 3.408, "grad_norm": 119.83882141113281, "learning_rate": 1.6760000000000002e-05, "loss": 1.789, "step": 426 }, { "epoch": 3.416, "grad_norm": 122.90579986572266, "learning_rate": 1.6800000000000002e-05, "loss": 2.1137, "step": 427 }, { "epoch": 3.424, "grad_norm": 34.288063049316406, "learning_rate": 1.684e-05, "loss": 1.8294, "step": 428 }, { "epoch": 3.432, "grad_norm": 33.30628204345703, "learning_rate": 1.688e-05, "loss": 2.0441, "step": 429 }, { "epoch": 3.44, "grad_norm": 27.0107421875, "learning_rate": 1.692e-05, "loss": 2.105, "step": 430 }, { "epoch": 3.448, "grad_norm": 34.81801986694336, "learning_rate": 1.696e-05, "loss": 2.2047, "step": 431 }, { "epoch": 3.456, "grad_norm": 63.026283264160156, "learning_rate": 1.7000000000000003e-05, "loss": 2.391, "step": 432 }, { "epoch": 3.464, "grad_norm": 69.45915222167969, "learning_rate": 1.704e-05, "loss": 1.9138, "step": 433 }, { "epoch": 3.472, "grad_norm": 34.77034378051758, "learning_rate": 1.7080000000000002e-05, "loss": 1.8953, "step": 434 }, { "epoch": 3.48, "grad_norm": 48.59021759033203, "learning_rate": 1.712e-05, "loss": 1.9903, "step": 435 }, { "epoch": 3.488, "grad_norm": 70.6480484008789, "learning_rate": 1.7160000000000002e-05, "loss": 1.8928, "step": 436 }, { "epoch": 3.496, "grad_norm": 67.3900146484375, "learning_rate": 1.7199999999999998e-05, "loss": 1.9622, "step": 437 }, { "epoch": 3.504, "grad_norm": 28.18996810913086, "learning_rate": 1.724e-05, "loss": 2.4716, "step": 438 }, { "epoch": 3.512, "grad_norm": 35.31182098388672, "learning_rate": 1.728e-05, "loss": 2.7351, "step": 439 }, { "epoch": 3.52, "grad_norm": 84.55098724365234, "learning_rate": 1.732e-05, "loss": 2.0066, "step": 440 }, { "epoch": 3.528, "grad_norm": 45.07231903076172, "learning_rate": 1.736e-05, "loss": 1.5244, "step": 441 }, { "epoch": 3.536, "grad_norm": 64.9188003540039, "learning_rate": 1.74e-05, "loss": 1.5072, "step": 442 }, { "epoch": 3.544, "grad_norm": 55.2289924621582, "learning_rate": 1.7440000000000002e-05, "loss": 2.007, "step": 443 }, { "epoch": 3.552, "grad_norm": 85.31075286865234, "learning_rate": 1.7480000000000002e-05, "loss": 2.3092, "step": 444 }, { "epoch": 3.56, "grad_norm": 122.44104766845703, "learning_rate": 1.752e-05, "loss": 1.7834, "step": 445 }, { "epoch": 3.568, "grad_norm": 39.47575759887695, "learning_rate": 1.756e-05, "loss": 1.6332, "step": 446 }, { "epoch": 3.576, "grad_norm": 48.50057601928711, "learning_rate": 1.76e-05, "loss": 2.1607, "step": 447 }, { "epoch": 3.584, "grad_norm": 50.25776672363281, "learning_rate": 1.764e-05, "loss": 1.6387, "step": 448 }, { "epoch": 3.592, "grad_norm": 104.55358123779297, "learning_rate": 1.7680000000000004e-05, "loss": 1.9352, "step": 449 }, { "epoch": 3.6, "grad_norm": 64.26606750488281, "learning_rate": 1.772e-05, "loss": 1.9418, "step": 450 }, { "epoch": 3.608, "grad_norm": 176.99349975585938, "learning_rate": 1.7760000000000003e-05, "loss": 1.7859, "step": 451 }, { "epoch": 3.616, "grad_norm": 40.675716400146484, "learning_rate": 1.78e-05, "loss": 2.5081, "step": 452 }, { "epoch": 3.624, "grad_norm": 42.773468017578125, "learning_rate": 1.7840000000000002e-05, "loss": 1.5694, "step": 453 }, { "epoch": 3.632, "grad_norm": 43.35564041137695, "learning_rate": 1.7879999999999998e-05, "loss": 1.4251, "step": 454 }, { "epoch": 3.64, "grad_norm": 58.18408966064453, "learning_rate": 1.792e-05, "loss": 1.9614, "step": 455 }, { "epoch": 3.648, "grad_norm": 101.57181549072266, "learning_rate": 1.796e-05, "loss": 2.1699, "step": 456 }, { "epoch": 3.656, "grad_norm": 39.314117431640625, "learning_rate": 1.8e-05, "loss": 1.9693, "step": 457 }, { "epoch": 3.664, "grad_norm": 36.13252258300781, "learning_rate": 1.804e-05, "loss": 2.1346, "step": 458 }, { "epoch": 3.672, "grad_norm": 229.06832885742188, "learning_rate": 1.808e-05, "loss": 2.1151, "step": 459 }, { "epoch": 3.68, "grad_norm": 76.81083679199219, "learning_rate": 1.812e-05, "loss": 2.0737, "step": 460 }, { "epoch": 3.6879999999999997, "grad_norm": 31.367904663085938, "learning_rate": 1.8160000000000002e-05, "loss": 2.5988, "step": 461 }, { "epoch": 3.6959999999999997, "grad_norm": 40.239830017089844, "learning_rate": 1.8200000000000002e-05, "loss": 2.2701, "step": 462 }, { "epoch": 3.7039999999999997, "grad_norm": 24.977514266967773, "learning_rate": 1.824e-05, "loss": 2.2707, "step": 463 }, { "epoch": 3.7119999999999997, "grad_norm": 37.11481857299805, "learning_rate": 1.828e-05, "loss": 2.1916, "step": 464 }, { "epoch": 3.7199999999999998, "grad_norm": 32.68657302856445, "learning_rate": 1.832e-05, "loss": 2.6613, "step": 465 }, { "epoch": 3.7279999999999998, "grad_norm": 86.15445709228516, "learning_rate": 1.8360000000000004e-05, "loss": 1.6418, "step": 466 }, { "epoch": 3.7359999999999998, "grad_norm": 61.08357238769531, "learning_rate": 1.84e-05, "loss": 2.0083, "step": 467 }, { "epoch": 3.7439999999999998, "grad_norm": 28.14809226989746, "learning_rate": 1.8440000000000003e-05, "loss": 2.7326, "step": 468 }, { "epoch": 3.752, "grad_norm": 128.6154022216797, "learning_rate": 1.848e-05, "loss": 1.9052, "step": 469 }, { "epoch": 3.76, "grad_norm": 42.86823272705078, "learning_rate": 1.8520000000000002e-05, "loss": 1.8922, "step": 470 }, { "epoch": 3.768, "grad_norm": 39.10599899291992, "learning_rate": 1.856e-05, "loss": 2.1006, "step": 471 }, { "epoch": 3.776, "grad_norm": 41.55687713623047, "learning_rate": 1.86e-05, "loss": 2.272, "step": 472 }, { "epoch": 3.784, "grad_norm": 139.12828063964844, "learning_rate": 1.864e-05, "loss": 1.7843, "step": 473 }, { "epoch": 3.792, "grad_norm": 65.83170318603516, "learning_rate": 1.868e-05, "loss": 1.8048, "step": 474 }, { "epoch": 3.8, "grad_norm": 95.87605285644531, "learning_rate": 1.872e-05, "loss": 1.83, "step": 475 }, { "epoch": 3.808, "grad_norm": 58.01918411254883, "learning_rate": 1.876e-05, "loss": 1.7956, "step": 476 }, { "epoch": 3.816, "grad_norm": 41.494930267333984, "learning_rate": 1.88e-05, "loss": 2.378, "step": 477 }, { "epoch": 3.824, "grad_norm": 68.37093353271484, "learning_rate": 1.8840000000000003e-05, "loss": 1.8718, "step": 478 }, { "epoch": 3.832, "grad_norm": 45.310211181640625, "learning_rate": 1.888e-05, "loss": 1.9396, "step": 479 }, { "epoch": 3.84, "grad_norm": 55.037696838378906, "learning_rate": 1.8920000000000002e-05, "loss": 1.9462, "step": 480 }, { "epoch": 3.848, "grad_norm": 71.48654174804688, "learning_rate": 1.896e-05, "loss": 2.305, "step": 481 }, { "epoch": 3.856, "grad_norm": 72.05767059326172, "learning_rate": 1.9e-05, "loss": 2.1199, "step": 482 }, { "epoch": 3.864, "grad_norm": 38.36381530761719, "learning_rate": 1.904e-05, "loss": 2.1868, "step": 483 }, { "epoch": 3.872, "grad_norm": 93.61650085449219, "learning_rate": 1.908e-05, "loss": 2.185, "step": 484 }, { "epoch": 3.88, "grad_norm": 170.9607391357422, "learning_rate": 1.9120000000000003e-05, "loss": 1.6729, "step": 485 }, { "epoch": 3.888, "grad_norm": 85.98609924316406, "learning_rate": 1.916e-05, "loss": 1.8832, "step": 486 }, { "epoch": 3.896, "grad_norm": 74.00772857666016, "learning_rate": 1.9200000000000003e-05, "loss": 1.898, "step": 487 }, { "epoch": 3.904, "grad_norm": 131.0572967529297, "learning_rate": 1.924e-05, "loss": 1.8703, "step": 488 }, { "epoch": 3.912, "grad_norm": 55.103153228759766, "learning_rate": 1.9280000000000002e-05, "loss": 1.9462, "step": 489 }, { "epoch": 3.92, "grad_norm": 51.951717376708984, "learning_rate": 1.932e-05, "loss": 1.7467, "step": 490 }, { "epoch": 3.928, "grad_norm": 41.3943977355957, "learning_rate": 1.936e-05, "loss": 2.0752, "step": 491 }, { "epoch": 3.936, "grad_norm": 39.20026779174805, "learning_rate": 1.94e-05, "loss": 2.9686, "step": 492 }, { "epoch": 3.944, "grad_norm": 48.24716567993164, "learning_rate": 1.944e-05, "loss": 2.0835, "step": 493 }, { "epoch": 3.952, "grad_norm": 142.83473205566406, "learning_rate": 1.948e-05, "loss": 1.9451, "step": 494 }, { "epoch": 3.96, "grad_norm": 151.28143310546875, "learning_rate": 1.9520000000000003e-05, "loss": 1.7078, "step": 495 }, { "epoch": 3.968, "grad_norm": 66.08721923828125, "learning_rate": 1.956e-05, "loss": 2.1238, "step": 496 }, { "epoch": 3.976, "grad_norm": 79.62031555175781, "learning_rate": 1.9600000000000002e-05, "loss": 2.6447, "step": 497 }, { "epoch": 3.984, "grad_norm": 107.43721008300781, "learning_rate": 1.9640000000000002e-05, "loss": 1.6716, "step": 498 }, { "epoch": 3.992, "grad_norm": 34.98879623413086, "learning_rate": 1.968e-05, "loss": 2.3847, "step": 499 }, { "epoch": 4.0, "grad_norm": 690.2536010742188, "learning_rate": 1.972e-05, "loss": 1.5189, "step": 500 }, { "epoch": 4.0, "eval_loss": 2.065403938293457, "eval_map": 0.0434, "eval_map_50": 0.0916, "eval_map_75": 0.0369, "eval_map_Coverall": 0.1702, "eval_map_Face_Shield": 0.0, "eval_map_Gloves": 0.0175, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.0292, "eval_map_large": 0.0481, "eval_map_medium": 0.0122, "eval_map_small": -1.0, "eval_mar_1": 0.0733, "eval_mar_10": 0.1834, "eval_mar_100": 0.2482, "eval_mar_100_Coverall": 0.6, "eval_mar_100_Face_Shield": 0.0, "eval_mar_100_Gloves": 0.2738, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.3673, "eval_mar_large": 0.2756, "eval_mar_medium": 0.1312, "eval_mar_small": -1.0, "eval_runtime": 4.056, "eval_samples_per_second": 7.15, "eval_steps_per_second": 0.493, "step": 500 }, { "epoch": 4.008, "grad_norm": 213.56011962890625, "learning_rate": 1.976e-05, "loss": 2.3688, "step": 501 }, { "epoch": 4.016, "grad_norm": 36.80381774902344, "learning_rate": 1.9800000000000004e-05, "loss": 1.5491, "step": 502 }, { "epoch": 4.024, "grad_norm": 37.24774932861328, "learning_rate": 1.984e-05, "loss": 1.6311, "step": 503 }, { "epoch": 4.032, "grad_norm": 43.581146240234375, "learning_rate": 1.9880000000000003e-05, "loss": 2.0199, "step": 504 }, { "epoch": 4.04, "grad_norm": 37.4881591796875, "learning_rate": 1.992e-05, "loss": 1.8715, "step": 505 }, { "epoch": 4.048, "grad_norm": 41.86263656616211, "learning_rate": 1.9960000000000002e-05, "loss": 2.0841, "step": 506 }, { "epoch": 4.056, "grad_norm": 30.05976676940918, "learning_rate": 2e-05, "loss": 2.3186, "step": 507 }, { "epoch": 4.064, "grad_norm": 74.99699401855469, "learning_rate": 2.004e-05, "loss": 2.2129, "step": 508 }, { "epoch": 4.072, "grad_norm": 30.653533935546875, "learning_rate": 2.008e-05, "loss": 1.9534, "step": 509 }, { "epoch": 4.08, "grad_norm": 51.2486457824707, "learning_rate": 2.012e-05, "loss": 1.8672, "step": 510 }, { "epoch": 4.088, "grad_norm": 37.590797424316406, "learning_rate": 2.016e-05, "loss": 1.7624, "step": 511 }, { "epoch": 4.096, "grad_norm": 255.04005432128906, "learning_rate": 2.0200000000000003e-05, "loss": 1.9707, "step": 512 }, { "epoch": 4.104, "grad_norm": 37.34677505493164, "learning_rate": 2.024e-05, "loss": 1.5684, "step": 513 }, { "epoch": 4.112, "grad_norm": 27.298480987548828, "learning_rate": 2.0280000000000002e-05, "loss": 2.2293, "step": 514 }, { "epoch": 4.12, "grad_norm": 26.77246856689453, "learning_rate": 2.032e-05, "loss": 2.0494, "step": 515 }, { "epoch": 4.128, "grad_norm": 63.78914260864258, "learning_rate": 2.036e-05, "loss": 3.1964, "step": 516 }, { "epoch": 4.136, "grad_norm": 43.779605865478516, "learning_rate": 2.04e-05, "loss": 2.1317, "step": 517 }, { "epoch": 4.144, "grad_norm": 28.60671615600586, "learning_rate": 2.044e-05, "loss": 2.1227, "step": 518 }, { "epoch": 4.152, "grad_norm": 55.6387939453125, "learning_rate": 2.048e-05, "loss": 2.1883, "step": 519 }, { "epoch": 4.16, "grad_norm": 88.91744995117188, "learning_rate": 2.052e-05, "loss": 2.1507, "step": 520 }, { "epoch": 4.168, "grad_norm": 27.12938690185547, "learning_rate": 2.0560000000000003e-05, "loss": 1.7588, "step": 521 }, { "epoch": 4.176, "grad_norm": 38.345794677734375, "learning_rate": 2.06e-05, "loss": 2.15, "step": 522 }, { "epoch": 4.184, "grad_norm": 33.177040100097656, "learning_rate": 2.0640000000000002e-05, "loss": 2.1614, "step": 523 }, { "epoch": 4.192, "grad_norm": 119.05635070800781, "learning_rate": 2.0680000000000002e-05, "loss": 2.4004, "step": 524 }, { "epoch": 4.2, "grad_norm": 64.1428451538086, "learning_rate": 2.072e-05, "loss": 1.8897, "step": 525 }, { "epoch": 4.208, "grad_norm": 34.35500717163086, "learning_rate": 2.076e-05, "loss": 1.8187, "step": 526 }, { "epoch": 4.216, "grad_norm": 34.7771110534668, "learning_rate": 2.08e-05, "loss": 2.2632, "step": 527 }, { "epoch": 4.224, "grad_norm": 58.736820220947266, "learning_rate": 2.084e-05, "loss": 1.7782, "step": 528 }, { "epoch": 4.232, "grad_norm": 42.68964767456055, "learning_rate": 2.0880000000000003e-05, "loss": 2.0164, "step": 529 }, { "epoch": 4.24, "grad_norm": 91.17841339111328, "learning_rate": 2.092e-05, "loss": 2.0212, "step": 530 }, { "epoch": 4.248, "grad_norm": 40.67975997924805, "learning_rate": 2.0960000000000003e-05, "loss": 1.832, "step": 531 }, { "epoch": 4.256, "grad_norm": 37.67129898071289, "learning_rate": 2.1e-05, "loss": 1.6608, "step": 532 }, { "epoch": 4.264, "grad_norm": 59.465415954589844, "learning_rate": 2.1040000000000002e-05, "loss": 1.6949, "step": 533 }, { "epoch": 4.272, "grad_norm": 39.28069305419922, "learning_rate": 2.1079999999999998e-05, "loss": 1.8612, "step": 534 }, { "epoch": 4.28, "grad_norm": 49.91886901855469, "learning_rate": 2.112e-05, "loss": 2.2597, "step": 535 }, { "epoch": 4.288, "grad_norm": 51.30653762817383, "learning_rate": 2.116e-05, "loss": 1.9628, "step": 536 }, { "epoch": 4.296, "grad_norm": 34.23979568481445, "learning_rate": 2.12e-05, "loss": 1.6682, "step": 537 }, { "epoch": 4.304, "grad_norm": 129.12928771972656, "learning_rate": 2.124e-05, "loss": 1.7062, "step": 538 }, { "epoch": 4.312, "grad_norm": 63.03652572631836, "learning_rate": 2.128e-05, "loss": 2.2172, "step": 539 }, { "epoch": 4.32, "grad_norm": 31.76857566833496, "learning_rate": 2.1320000000000003e-05, "loss": 1.7776, "step": 540 }, { "epoch": 4.328, "grad_norm": 129.7205810546875, "learning_rate": 2.1360000000000002e-05, "loss": 1.731, "step": 541 }, { "epoch": 4.336, "grad_norm": 40.54275131225586, "learning_rate": 2.1400000000000002e-05, "loss": 1.6681, "step": 542 }, { "epoch": 4.344, "grad_norm": 48.4668083190918, "learning_rate": 2.144e-05, "loss": 2.4678, "step": 543 }, { "epoch": 4.352, "grad_norm": 85.39000701904297, "learning_rate": 2.148e-05, "loss": 2.344, "step": 544 }, { "epoch": 4.36, "grad_norm": 48.075538635253906, "learning_rate": 2.152e-05, "loss": 2.0467, "step": 545 }, { "epoch": 4.368, "grad_norm": 50.60781478881836, "learning_rate": 2.1560000000000004e-05, "loss": 1.7894, "step": 546 }, { "epoch": 4.376, "grad_norm": 51.76850891113281, "learning_rate": 2.16e-05, "loss": 1.9974, "step": 547 }, { "epoch": 4.384, "grad_norm": 51.28594207763672, "learning_rate": 2.1640000000000003e-05, "loss": 1.7943, "step": 548 }, { "epoch": 4.392, "grad_norm": 58.66964340209961, "learning_rate": 2.168e-05, "loss": 1.7789, "step": 549 }, { "epoch": 4.4, "grad_norm": 50.7975959777832, "learning_rate": 2.1720000000000002e-05, "loss": 2.1568, "step": 550 }, { "epoch": 4.408, "grad_norm": 44.10758590698242, "learning_rate": 2.176e-05, "loss": 2.1729, "step": 551 }, { "epoch": 4.416, "grad_norm": 38.370880126953125, "learning_rate": 2.18e-05, "loss": 2.3436, "step": 552 }, { "epoch": 4.424, "grad_norm": 37.50664138793945, "learning_rate": 2.184e-05, "loss": 1.8594, "step": 553 }, { "epoch": 4.432, "grad_norm": 382.5447692871094, "learning_rate": 2.188e-05, "loss": 2.0742, "step": 554 }, { "epoch": 4.44, "grad_norm": 71.58792877197266, "learning_rate": 2.192e-05, "loss": 1.6765, "step": 555 }, { "epoch": 4.448, "grad_norm": 36.87540054321289, "learning_rate": 2.196e-05, "loss": 1.9544, "step": 556 }, { "epoch": 4.456, "grad_norm": 107.00870513916016, "learning_rate": 2.2000000000000003e-05, "loss": 1.8905, "step": 557 }, { "epoch": 4.464, "grad_norm": 78.26593017578125, "learning_rate": 2.2040000000000002e-05, "loss": 1.9652, "step": 558 }, { "epoch": 4.4719999999999995, "grad_norm": 164.8754119873047, "learning_rate": 2.2080000000000002e-05, "loss": 2.1457, "step": 559 }, { "epoch": 4.48, "grad_norm": 79.11858367919922, "learning_rate": 2.212e-05, "loss": 2.2029, "step": 560 }, { "epoch": 4.4879999999999995, "grad_norm": 37.65850067138672, "learning_rate": 2.216e-05, "loss": 3.6684, "step": 561 }, { "epoch": 4.496, "grad_norm": 43.14323806762695, "learning_rate": 2.22e-05, "loss": 1.5636, "step": 562 }, { "epoch": 4.504, "grad_norm": 50.906822204589844, "learning_rate": 2.224e-05, "loss": 1.6341, "step": 563 }, { "epoch": 4.5120000000000005, "grad_norm": 97.14201354980469, "learning_rate": 2.228e-05, "loss": 2.6054, "step": 564 }, { "epoch": 4.52, "grad_norm": 34.852386474609375, "learning_rate": 2.2320000000000003e-05, "loss": 1.9201, "step": 565 }, { "epoch": 4.5280000000000005, "grad_norm": 32.71520233154297, "learning_rate": 2.236e-05, "loss": 1.8689, "step": 566 }, { "epoch": 4.536, "grad_norm": 73.92832946777344, "learning_rate": 2.2400000000000002e-05, "loss": 2.4786, "step": 567 }, { "epoch": 4.5440000000000005, "grad_norm": 61.985679626464844, "learning_rate": 2.244e-05, "loss": 2.2993, "step": 568 }, { "epoch": 4.552, "grad_norm": 54.1404914855957, "learning_rate": 2.248e-05, "loss": 1.7424, "step": 569 }, { "epoch": 4.5600000000000005, "grad_norm": 29.013261795043945, "learning_rate": 2.252e-05, "loss": 1.9061, "step": 570 }, { "epoch": 4.568, "grad_norm": 42.91585922241211, "learning_rate": 2.256e-05, "loss": 1.8837, "step": 571 }, { "epoch": 4.576, "grad_norm": 39.932865142822266, "learning_rate": 2.26e-05, "loss": 1.6096, "step": 572 }, { "epoch": 4.584, "grad_norm": 68.98956298828125, "learning_rate": 2.264e-05, "loss": 1.8298, "step": 573 }, { "epoch": 4.592, "grad_norm": 37.7781982421875, "learning_rate": 2.268e-05, "loss": 1.8432, "step": 574 }, { "epoch": 4.6, "grad_norm": 54.63825607299805, "learning_rate": 2.2720000000000003e-05, "loss": 1.37, "step": 575 }, { "epoch": 4.608, "grad_norm": 55.95085906982422, "learning_rate": 2.2760000000000002e-05, "loss": 2.0393, "step": 576 }, { "epoch": 4.616, "grad_norm": 38.6992073059082, "learning_rate": 2.2800000000000002e-05, "loss": 1.7509, "step": 577 }, { "epoch": 4.624, "grad_norm": 32.34169387817383, "learning_rate": 2.284e-05, "loss": 1.885, "step": 578 }, { "epoch": 4.632, "grad_norm": 27.00955581665039, "learning_rate": 2.288e-05, "loss": 1.7132, "step": 579 }, { "epoch": 4.64, "grad_norm": 38.197200775146484, "learning_rate": 2.292e-05, "loss": 2.1345, "step": 580 }, { "epoch": 4.648, "grad_norm": 157.8726043701172, "learning_rate": 2.296e-05, "loss": 1.7146, "step": 581 }, { "epoch": 4.656, "grad_norm": 41.13805389404297, "learning_rate": 2.3000000000000003e-05, "loss": 2.2573, "step": 582 }, { "epoch": 4.664, "grad_norm": 35.48239517211914, "learning_rate": 2.304e-05, "loss": 1.7565, "step": 583 }, { "epoch": 4.672, "grad_norm": 60.06611251831055, "learning_rate": 2.3080000000000003e-05, "loss": 1.7163, "step": 584 }, { "epoch": 4.68, "grad_norm": 56.61114501953125, "learning_rate": 2.312e-05, "loss": 2.0072, "step": 585 }, { "epoch": 4.688, "grad_norm": 25.396968841552734, "learning_rate": 2.3160000000000002e-05, "loss": 1.4322, "step": 586 }, { "epoch": 4.696, "grad_norm": 32.381736755371094, "learning_rate": 2.32e-05, "loss": 1.9005, "step": 587 }, { "epoch": 4.704, "grad_norm": 49.218563079833984, "learning_rate": 2.324e-05, "loss": 1.9479, "step": 588 }, { "epoch": 4.712, "grad_norm": 36.69306945800781, "learning_rate": 2.328e-05, "loss": 1.7467, "step": 589 }, { "epoch": 4.72, "grad_norm": 54.948768615722656, "learning_rate": 2.332e-05, "loss": 2.4478, "step": 590 }, { "epoch": 4.728, "grad_norm": 38.16517639160156, "learning_rate": 2.336e-05, "loss": 1.6513, "step": 591 }, { "epoch": 4.736, "grad_norm": 74.72283172607422, "learning_rate": 2.3400000000000003e-05, "loss": 1.7245, "step": 592 }, { "epoch": 4.744, "grad_norm": 97.70283508300781, "learning_rate": 2.344e-05, "loss": 1.4542, "step": 593 }, { "epoch": 4.752, "grad_norm": 28.652185440063477, "learning_rate": 2.3480000000000002e-05, "loss": 1.8668, "step": 594 }, { "epoch": 4.76, "grad_norm": 54.77566909790039, "learning_rate": 2.3520000000000002e-05, "loss": 2.5354, "step": 595 }, { "epoch": 4.768, "grad_norm": 25.975568771362305, "learning_rate": 2.356e-05, "loss": 2.0953, "step": 596 }, { "epoch": 4.776, "grad_norm": 42.309654235839844, "learning_rate": 2.36e-05, "loss": 1.6143, "step": 597 }, { "epoch": 4.784, "grad_norm": 46.84841537475586, "learning_rate": 2.364e-05, "loss": 1.5529, "step": 598 }, { "epoch": 4.792, "grad_norm": 40.326011657714844, "learning_rate": 2.3680000000000004e-05, "loss": 1.5366, "step": 599 }, { "epoch": 4.8, "grad_norm": 36.484031677246094, "learning_rate": 2.372e-05, "loss": 1.8699, "step": 600 }, { "epoch": 4.808, "grad_norm": 57.96602249145508, "learning_rate": 2.3760000000000003e-05, "loss": 2.4817, "step": 601 }, { "epoch": 4.816, "grad_norm": 66.86128234863281, "learning_rate": 2.38e-05, "loss": 1.6227, "step": 602 }, { "epoch": 4.824, "grad_norm": 40.419864654541016, "learning_rate": 2.3840000000000002e-05, "loss": 1.734, "step": 603 }, { "epoch": 4.832, "grad_norm": 46.498348236083984, "learning_rate": 2.3880000000000002e-05, "loss": 1.8443, "step": 604 }, { "epoch": 4.84, "grad_norm": 46.31009292602539, "learning_rate": 2.392e-05, "loss": 1.6813, "step": 605 }, { "epoch": 4.848, "grad_norm": 52.2984504699707, "learning_rate": 2.396e-05, "loss": 1.9944, "step": 606 }, { "epoch": 4.856, "grad_norm": 34.20144271850586, "learning_rate": 2.4e-05, "loss": 1.9264, "step": 607 }, { "epoch": 4.864, "grad_norm": 69.26210021972656, "learning_rate": 2.404e-05, "loss": 1.6991, "step": 608 }, { "epoch": 4.872, "grad_norm": 37.09944534301758, "learning_rate": 2.408e-05, "loss": 2.209, "step": 609 }, { "epoch": 4.88, "grad_norm": 74.6870346069336, "learning_rate": 2.412e-05, "loss": 2.049, "step": 610 }, { "epoch": 4.888, "grad_norm": 58.14792251586914, "learning_rate": 2.4160000000000002e-05, "loss": 1.6161, "step": 611 }, { "epoch": 4.896, "grad_norm": 64.94216918945312, "learning_rate": 2.4200000000000002e-05, "loss": 1.9952, "step": 612 }, { "epoch": 4.904, "grad_norm": 19.742971420288086, "learning_rate": 2.4240000000000002e-05, "loss": 2.1557, "step": 613 }, { "epoch": 4.912, "grad_norm": 70.8616714477539, "learning_rate": 2.428e-05, "loss": 1.8321, "step": 614 }, { "epoch": 4.92, "grad_norm": 38.12445831298828, "learning_rate": 2.432e-05, "loss": 1.3007, "step": 615 }, { "epoch": 4.928, "grad_norm": 69.1763687133789, "learning_rate": 2.4360000000000004e-05, "loss": 1.8271, "step": 616 }, { "epoch": 4.936, "grad_norm": 59.37450408935547, "learning_rate": 2.44e-05, "loss": 1.4851, "step": 617 }, { "epoch": 4.944, "grad_norm": 64.68832397460938, "learning_rate": 2.4440000000000003e-05, "loss": 1.6293, "step": 618 }, { "epoch": 4.952, "grad_norm": 68.6485824584961, "learning_rate": 2.448e-05, "loss": 2.7474, "step": 619 }, { "epoch": 4.96, "grad_norm": 39.674442291259766, "learning_rate": 2.4520000000000002e-05, "loss": 2.1046, "step": 620 }, { "epoch": 4.968, "grad_norm": 68.67539978027344, "learning_rate": 2.4560000000000002e-05, "loss": 1.8729, "step": 621 }, { "epoch": 4.976, "grad_norm": 46.620365142822266, "learning_rate": 2.46e-05, "loss": 2.4367, "step": 622 }, { "epoch": 4.984, "grad_norm": 38.2707633972168, "learning_rate": 2.464e-05, "loss": 2.05, "step": 623 }, { "epoch": 4.992, "grad_norm": 54.82331848144531, "learning_rate": 2.468e-05, "loss": 2.7197, "step": 624 }, { "epoch": 5.0, "grad_norm": 48.1609992980957, "learning_rate": 2.472e-05, "loss": 1.8018, "step": 625 }, { "epoch": 5.0, "eval_loss": 2.1594154834747314, "eval_map": 0.0773, "eval_map_50": 0.1717, "eval_map_75": 0.0574, "eval_map_Coverall": 0.2719, "eval_map_Face_Shield": 0.0, "eval_map_Gloves": 0.0304, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.084, "eval_map_large": 0.0917, "eval_map_medium": 0.0242, "eval_map_small": -1.0, "eval_mar_1": 0.1095, "eval_mar_10": 0.2158, "eval_mar_100": 0.2355, "eval_mar_100_Coverall": 0.6533, "eval_mar_100_Face_Shield": 0.0, "eval_mar_100_Gloves": 0.182, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.3423, "eval_mar_large": 0.2646, "eval_mar_medium": 0.1001, "eval_mar_small": -1.0, "eval_runtime": 5.612, "eval_samples_per_second": 5.167, "eval_steps_per_second": 0.356, "step": 625 }, { "epoch": 5.008, "grad_norm": 95.09942626953125, "learning_rate": 2.476e-05, "loss": 1.407, "step": 626 }, { "epoch": 5.016, "grad_norm": 52.8339729309082, "learning_rate": 2.48e-05, "loss": 1.6835, "step": 627 }, { "epoch": 5.024, "grad_norm": 40.936866760253906, "learning_rate": 2.4840000000000003e-05, "loss": 1.9363, "step": 628 }, { "epoch": 5.032, "grad_norm": 40.12213897705078, "learning_rate": 2.488e-05, "loss": 1.9367, "step": 629 }, { "epoch": 5.04, "grad_norm": 223.8928680419922, "learning_rate": 2.4920000000000002e-05, "loss": 1.7851, "step": 630 }, { "epoch": 5.048, "grad_norm": 748.873291015625, "learning_rate": 2.496e-05, "loss": 1.3691, "step": 631 }, { "epoch": 5.056, "grad_norm": 100.40495300292969, "learning_rate": 2.5e-05, "loss": 1.6533, "step": 632 }, { "epoch": 5.064, "grad_norm": 324.6820373535156, "learning_rate": 2.504e-05, "loss": 1.9519, "step": 633 }, { "epoch": 5.072, "grad_norm": 1842.655029296875, "learning_rate": 2.5080000000000004e-05, "loss": 1.9051, "step": 634 }, { "epoch": 5.08, "grad_norm": 35.031715393066406, "learning_rate": 2.512e-05, "loss": 2.1447, "step": 635 }, { "epoch": 5.088, "grad_norm": 40.729217529296875, "learning_rate": 2.516e-05, "loss": 1.723, "step": 636 }, { "epoch": 5.096, "grad_norm": 44.89295196533203, "learning_rate": 2.5200000000000003e-05, "loss": 2.4509, "step": 637 }, { "epoch": 5.104, "grad_norm": 147.69224548339844, "learning_rate": 2.5240000000000002e-05, "loss": 1.672, "step": 638 }, { "epoch": 5.112, "grad_norm": 53.78084182739258, "learning_rate": 2.5280000000000005e-05, "loss": 1.744, "step": 639 }, { "epoch": 5.12, "grad_norm": 50.27755355834961, "learning_rate": 2.5319999999999998e-05, "loss": 1.7573, "step": 640 }, { "epoch": 5.128, "grad_norm": 52.8342399597168, "learning_rate": 2.536e-05, "loss": 1.758, "step": 641 }, { "epoch": 5.136, "grad_norm": 76.53311920166016, "learning_rate": 2.54e-05, "loss": 2.2458, "step": 642 }, { "epoch": 5.144, "grad_norm": 43.57012939453125, "learning_rate": 2.5440000000000004e-05, "loss": 2.0112, "step": 643 }, { "epoch": 5.152, "grad_norm": 229.47958374023438, "learning_rate": 2.5480000000000003e-05, "loss": 1.9302, "step": 644 }, { "epoch": 5.16, "grad_norm": 37.86599349975586, "learning_rate": 2.552e-05, "loss": 1.9479, "step": 645 }, { "epoch": 5.168, "grad_norm": 38.26551055908203, "learning_rate": 2.556e-05, "loss": 2.0349, "step": 646 }, { "epoch": 5.176, "grad_norm": 34.309173583984375, "learning_rate": 2.5600000000000002e-05, "loss": 1.9101, "step": 647 }, { "epoch": 5.184, "grad_norm": 52.87360382080078, "learning_rate": 2.5640000000000002e-05, "loss": 2.2921, "step": 648 }, { "epoch": 5.192, "grad_norm": 35.30905532836914, "learning_rate": 2.5679999999999998e-05, "loss": 2.0562, "step": 649 }, { "epoch": 5.2, "grad_norm": 35.69474411010742, "learning_rate": 2.572e-05, "loss": 2.1916, "step": 650 }, { "epoch": 5.208, "grad_norm": 54.64889144897461, "learning_rate": 2.576e-05, "loss": 1.8878, "step": 651 }, { "epoch": 5.216, "grad_norm": 27.56044578552246, "learning_rate": 2.58e-05, "loss": 2.3803, "step": 652 }, { "epoch": 5.224, "grad_norm": 44.14836120605469, "learning_rate": 2.5840000000000003e-05, "loss": 1.8241, "step": 653 }, { "epoch": 5.232, "grad_norm": 67.99580383300781, "learning_rate": 2.588e-05, "loss": 1.703, "step": 654 }, { "epoch": 5.24, "grad_norm": 112.09241485595703, "learning_rate": 2.592e-05, "loss": 2.0857, "step": 655 }, { "epoch": 5.248, "grad_norm": 71.30339050292969, "learning_rate": 2.5960000000000002e-05, "loss": 2.016, "step": 656 }, { "epoch": 5.256, "grad_norm": 71.0361099243164, "learning_rate": 2.6000000000000002e-05, "loss": 1.5857, "step": 657 }, { "epoch": 5.264, "grad_norm": 131.64195251464844, "learning_rate": 2.6040000000000005e-05, "loss": 2.9429, "step": 658 }, { "epoch": 5.272, "grad_norm": 220.017822265625, "learning_rate": 2.6079999999999998e-05, "loss": 1.6079, "step": 659 }, { "epoch": 5.28, "grad_norm": 59.14936828613281, "learning_rate": 2.612e-05, "loss": 1.4021, "step": 660 }, { "epoch": 5.288, "grad_norm": 36.16281509399414, "learning_rate": 2.616e-05, "loss": 1.7894, "step": 661 }, { "epoch": 5.296, "grad_norm": 36.58060836791992, "learning_rate": 2.6200000000000003e-05, "loss": 2.564, "step": 662 }, { "epoch": 5.304, "grad_norm": 859.4911499023438, "learning_rate": 2.6240000000000003e-05, "loss": 1.6873, "step": 663 }, { "epoch": 5.312, "grad_norm": 36.83635711669922, "learning_rate": 2.628e-05, "loss": 1.6534, "step": 664 }, { "epoch": 5.32, "grad_norm": 47.701751708984375, "learning_rate": 2.632e-05, "loss": 2.0851, "step": 665 }, { "epoch": 5.328, "grad_norm": 50.402862548828125, "learning_rate": 2.6360000000000002e-05, "loss": 2.0151, "step": 666 }, { "epoch": 5.336, "grad_norm": 50.213958740234375, "learning_rate": 2.64e-05, "loss": 1.596, "step": 667 }, { "epoch": 5.344, "grad_norm": 34.16267776489258, "learning_rate": 2.6440000000000004e-05, "loss": 2.2429, "step": 668 }, { "epoch": 5.352, "grad_norm": 38.9686164855957, "learning_rate": 2.648e-05, "loss": 2.0964, "step": 669 }, { "epoch": 5.36, "grad_norm": 169.48973083496094, "learning_rate": 2.652e-05, "loss": 2.0351, "step": 670 }, { "epoch": 5.368, "grad_norm": Infinity, "learning_rate": 2.652e-05, "loss": 1.7699, "step": 671 }, { "epoch": 5.376, "grad_norm": 56.51689529418945, "learning_rate": 2.6560000000000003e-05, "loss": 1.9291, "step": 672 }, { "epoch": 5.384, "grad_norm": 69.02115631103516, "learning_rate": 2.6600000000000003e-05, "loss": 2.3173, "step": 673 }, { "epoch": 5.392, "grad_norm": 379.314697265625, "learning_rate": 2.6640000000000002e-05, "loss": 1.8387, "step": 674 }, { "epoch": 5.4, "grad_norm": 33.449283599853516, "learning_rate": 2.668e-05, "loss": 2.4318, "step": 675 }, { "epoch": 5.408, "grad_norm": 54.52973937988281, "learning_rate": 2.672e-05, "loss": 2.2506, "step": 676 }, { "epoch": 5.416, "grad_norm": 231.23403930664062, "learning_rate": 2.676e-05, "loss": 1.8228, "step": 677 }, { "epoch": 5.424, "grad_norm": 54.915740966796875, "learning_rate": 2.6800000000000004e-05, "loss": 2.1002, "step": 678 }, { "epoch": 5.432, "grad_norm": 52.09461212158203, "learning_rate": 2.6840000000000004e-05, "loss": 1.8627, "step": 679 }, { "epoch": 5.44, "grad_norm": 39.45806121826172, "learning_rate": 2.688e-05, "loss": 2.2386, "step": 680 }, { "epoch": 5.448, "grad_norm": 45.451385498046875, "learning_rate": 2.692e-05, "loss": 2.0632, "step": 681 }, { "epoch": 5.456, "grad_norm": 107.24671173095703, "learning_rate": 2.6960000000000003e-05, "loss": 2.1001, "step": 682 }, { "epoch": 5.464, "grad_norm": 68.86515808105469, "learning_rate": 2.7000000000000002e-05, "loss": 1.9795, "step": 683 }, { "epoch": 5.4719999999999995, "grad_norm": 61.899295806884766, "learning_rate": 2.704e-05, "loss": 1.5976, "step": 684 }, { "epoch": 5.48, "grad_norm": 32.78185272216797, "learning_rate": 2.7079999999999998e-05, "loss": 1.9206, "step": 685 }, { "epoch": 5.4879999999999995, "grad_norm": 42.747745513916016, "learning_rate": 2.712e-05, "loss": 1.8835, "step": 686 }, { "epoch": 5.496, "grad_norm": 464.728515625, "learning_rate": 2.716e-05, "loss": 1.7183, "step": 687 }, { "epoch": 5.504, "grad_norm": 57.542640686035156, "learning_rate": 2.7200000000000004e-05, "loss": 1.5283, "step": 688 }, { "epoch": 5.5120000000000005, "grad_norm": 123.94038391113281, "learning_rate": 2.724e-05, "loss": 2.1891, "step": 689 }, { "epoch": 5.52, "grad_norm": 24.80342674255371, "learning_rate": 2.728e-05, "loss": 1.4168, "step": 690 }, { "epoch": 5.5280000000000005, "grad_norm": 43.04991912841797, "learning_rate": 2.7320000000000003e-05, "loss": 1.9657, "step": 691 }, { "epoch": 5.536, "grad_norm": 39.218360900878906, "learning_rate": 2.7360000000000002e-05, "loss": 1.6258, "step": 692 }, { "epoch": 5.5440000000000005, "grad_norm": 72.5625991821289, "learning_rate": 2.7400000000000002e-05, "loss": 2.6483, "step": 693 }, { "epoch": 5.552, "grad_norm": 42.804386138916016, "learning_rate": 2.7439999999999998e-05, "loss": 1.4099, "step": 694 }, { "epoch": 5.5600000000000005, "grad_norm": 67.83485412597656, "learning_rate": 2.748e-05, "loss": 2.2104, "step": 695 }, { "epoch": 5.568, "grad_norm": 47.27434158325195, "learning_rate": 2.752e-05, "loss": 1.9744, "step": 696 }, { "epoch": 5.576, "grad_norm": 29.079395294189453, "learning_rate": 2.7560000000000004e-05, "loss": 2.1265, "step": 697 }, { "epoch": 5.584, "grad_norm": 101.6113510131836, "learning_rate": 2.7600000000000003e-05, "loss": 2.1563, "step": 698 }, { "epoch": 5.592, "grad_norm": 100.66471862792969, "learning_rate": 2.764e-05, "loss": 1.8331, "step": 699 }, { "epoch": 5.6, "grad_norm": 54.200984954833984, "learning_rate": 2.768e-05, "loss": 1.5849, "step": 700 }, { "epoch": 5.608, "grad_norm": 32.185752868652344, "learning_rate": 2.7720000000000002e-05, "loss": 1.7104, "step": 701 }, { "epoch": 5.616, "grad_norm": 48.01735305786133, "learning_rate": 2.7760000000000002e-05, "loss": 2.2856, "step": 702 }, { "epoch": 5.624, "grad_norm": 68.22238159179688, "learning_rate": 2.7800000000000005e-05, "loss": 1.6029, "step": 703 }, { "epoch": 5.632, "grad_norm": 100.78208923339844, "learning_rate": 2.7839999999999998e-05, "loss": 1.9535, "step": 704 }, { "epoch": 5.64, "grad_norm": 32.64128494262695, "learning_rate": 2.788e-05, "loss": 1.608, "step": 705 }, { "epoch": 5.648, "grad_norm": 40.184661865234375, "learning_rate": 2.792e-05, "loss": 2.2738, "step": 706 }, { "epoch": 5.656, "grad_norm": 38.5816535949707, "learning_rate": 2.7960000000000003e-05, "loss": 1.9938, "step": 707 }, { "epoch": 5.664, "grad_norm": 50.9681282043457, "learning_rate": 2.8000000000000003e-05, "loss": 1.6765, "step": 708 }, { "epoch": 5.672, "grad_norm": 57.82577896118164, "learning_rate": 2.804e-05, "loss": 2.5217, "step": 709 }, { "epoch": 5.68, "grad_norm": 38.00436019897461, "learning_rate": 2.8080000000000002e-05, "loss": 1.8884, "step": 710 }, { "epoch": 5.688, "grad_norm": 197.0482635498047, "learning_rate": 2.8120000000000002e-05, "loss": 2.8818, "step": 711 }, { "epoch": 5.696, "grad_norm": 79.74591827392578, "learning_rate": 2.816e-05, "loss": 2.2074, "step": 712 }, { "epoch": 5.704, "grad_norm": 62.878597259521484, "learning_rate": 2.8199999999999998e-05, "loss": 2.3645, "step": 713 }, { "epoch": 5.712, "grad_norm": 115.38941955566406, "learning_rate": 2.824e-05, "loss": 1.7957, "step": 714 }, { "epoch": 5.72, "grad_norm": 105.66313934326172, "learning_rate": 2.828e-05, "loss": 1.7554, "step": 715 }, { "epoch": 5.728, "grad_norm": 73.88673400878906, "learning_rate": 2.8320000000000003e-05, "loss": 1.5641, "step": 716 }, { "epoch": 5.736, "grad_norm": 73.68846893310547, "learning_rate": 2.8360000000000003e-05, "loss": 1.8796, "step": 717 }, { "epoch": 5.744, "grad_norm": 73.1540756225586, "learning_rate": 2.84e-05, "loss": 2.1558, "step": 718 }, { "epoch": 5.752, "grad_norm": 39.108062744140625, "learning_rate": 2.844e-05, "loss": 2.0874, "step": 719 }, { "epoch": 5.76, "grad_norm": 117.43362426757812, "learning_rate": 2.8480000000000002e-05, "loss": 2.0461, "step": 720 }, { "epoch": 5.768, "grad_norm": 59.849388122558594, "learning_rate": 2.852e-05, "loss": 2.2024, "step": 721 }, { "epoch": 5.776, "grad_norm": 77.19963836669922, "learning_rate": 2.8560000000000004e-05, "loss": 1.7907, "step": 722 }, { "epoch": 5.784, "grad_norm": 188.14620971679688, "learning_rate": 2.86e-05, "loss": 2.3019, "step": 723 }, { "epoch": 5.792, "grad_norm": 754.0563354492188, "learning_rate": 2.864e-05, "loss": 1.7807, "step": 724 }, { "epoch": 5.8, "grad_norm": 93.97193145751953, "learning_rate": 2.868e-05, "loss": 1.6058, "step": 725 }, { "epoch": 5.808, "grad_norm": 142.83718872070312, "learning_rate": 2.8720000000000003e-05, "loss": 3.5328, "step": 726 }, { "epoch": 5.816, "grad_norm": 76.73177337646484, "learning_rate": 2.8760000000000002e-05, "loss": 1.5749, "step": 727 }, { "epoch": 5.824, "grad_norm": 42.20380401611328, "learning_rate": 2.88e-05, "loss": 1.4294, "step": 728 }, { "epoch": 5.832, "grad_norm": 378.66986083984375, "learning_rate": 2.8840000000000002e-05, "loss": 1.8315, "step": 729 }, { "epoch": 5.84, "grad_norm": 54.609134674072266, "learning_rate": 2.888e-05, "loss": 1.5127, "step": 730 }, { "epoch": 5.848, "grad_norm": 31.97462272644043, "learning_rate": 2.8920000000000004e-05, "loss": 1.9056, "step": 731 }, { "epoch": 5.856, "grad_norm": 51.00986099243164, "learning_rate": 2.8960000000000004e-05, "loss": 2.6377, "step": 732 }, { "epoch": 5.864, "grad_norm": 403.56744384765625, "learning_rate": 2.9e-05, "loss": 1.6403, "step": 733 }, { "epoch": 5.872, "grad_norm": 44.03303146362305, "learning_rate": 2.904e-05, "loss": 1.8309, "step": 734 }, { "epoch": 5.88, "grad_norm": 55.06475830078125, "learning_rate": 2.9080000000000003e-05, "loss": 1.7614, "step": 735 }, { "epoch": 5.888, "grad_norm": 117.85295867919922, "learning_rate": 2.9120000000000002e-05, "loss": 2.3027, "step": 736 }, { "epoch": 5.896, "grad_norm": 218.1259002685547, "learning_rate": 2.9160000000000005e-05, "loss": 2.0939, "step": 737 }, { "epoch": 5.904, "grad_norm": 49.662864685058594, "learning_rate": 2.9199999999999998e-05, "loss": 1.6723, "step": 738 }, { "epoch": 5.912, "grad_norm": 275.537109375, "learning_rate": 2.924e-05, "loss": 2.0607, "step": 739 }, { "epoch": 5.92, "grad_norm": 49.812801361083984, "learning_rate": 2.928e-05, "loss": 2.0895, "step": 740 }, { "epoch": 5.928, "grad_norm": 268.5863952636719, "learning_rate": 2.9320000000000004e-05, "loss": 2.5821, "step": 741 }, { "epoch": 5.936, "grad_norm": 187.7859649658203, "learning_rate": 2.9360000000000003e-05, "loss": 2.0054, "step": 742 }, { "epoch": 5.944, "grad_norm": 47.41761016845703, "learning_rate": 2.94e-05, "loss": 2.2874, "step": 743 }, { "epoch": 5.952, "grad_norm": 83.04234313964844, "learning_rate": 2.944e-05, "loss": 2.0462, "step": 744 }, { "epoch": 5.96, "grad_norm": 68.64269256591797, "learning_rate": 2.9480000000000002e-05, "loss": 1.6442, "step": 745 }, { "epoch": 5.968, "grad_norm": 182.9205780029297, "learning_rate": 2.9520000000000002e-05, "loss": 2.6215, "step": 746 }, { "epoch": 5.976, "grad_norm": 94.86433410644531, "learning_rate": 2.9559999999999998e-05, "loss": 1.975, "step": 747 }, { "epoch": 5.984, "grad_norm": 173.90562438964844, "learning_rate": 2.96e-05, "loss": 1.8538, "step": 748 }, { "epoch": 5.992, "grad_norm": 70.42318725585938, "learning_rate": 2.964e-05, "loss": 1.886, "step": 749 }, { "epoch": 6.0, "grad_norm": 48.52217483520508, "learning_rate": 2.9680000000000004e-05, "loss": 1.7393, "step": 750 }, { "epoch": 6.0, "eval_loss": 1.892723798751831, "eval_map": 0.0896, "eval_map_50": 0.1925, "eval_map_75": 0.0683, "eval_map_Coverall": 0.2534, "eval_map_Face_Shield": 0.0, "eval_map_Gloves": 0.0567, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.1379, "eval_map_large": 0.122, "eval_map_medium": 0.0337, "eval_map_small": -1.0, "eval_mar_1": 0.1012, "eval_mar_10": 0.2369, "eval_mar_100": 0.2802, "eval_mar_100_Coverall": 0.7111, "eval_mar_100_Face_Shield": 0.0, "eval_mar_100_Gloves": 0.282, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.4077, "eval_mar_large": 0.3156, "eval_mar_medium": 0.1347, "eval_mar_small": -1.0, "eval_runtime": 3.8119, "eval_samples_per_second": 7.608, "eval_steps_per_second": 0.525, "step": 750 }, { "epoch": 6.008, "grad_norm": 36.288063049316406, "learning_rate": 2.9720000000000003e-05, "loss": 1.6187, "step": 751 }, { "epoch": 6.016, "grad_norm": 39.109947204589844, "learning_rate": 2.976e-05, "loss": 1.5834, "step": 752 }, { "epoch": 6.024, "grad_norm": 146.5767364501953, "learning_rate": 2.98e-05, "loss": 1.9622, "step": 753 }, { "epoch": 6.032, "grad_norm": 52.06295394897461, "learning_rate": 2.9840000000000002e-05, "loss": 2.0431, "step": 754 }, { "epoch": 6.04, "grad_norm": 63.41004943847656, "learning_rate": 2.9880000000000002e-05, "loss": 1.6864, "step": 755 }, { "epoch": 6.048, "grad_norm": 91.52657318115234, "learning_rate": 2.9920000000000005e-05, "loss": 2.1403, "step": 756 }, { "epoch": 6.056, "grad_norm": 46.82609558105469, "learning_rate": 2.9959999999999998e-05, "loss": 1.5573, "step": 757 }, { "epoch": 6.064, "grad_norm": 209.34072875976562, "learning_rate": 3e-05, "loss": 2.4318, "step": 758 }, { "epoch": 6.072, "grad_norm": 54.6248893737793, "learning_rate": 3.004e-05, "loss": 1.6932, "step": 759 }, { "epoch": 6.08, "grad_norm": 101.04603576660156, "learning_rate": 3.0080000000000003e-05, "loss": 1.8649, "step": 760 }, { "epoch": 6.088, "grad_norm": 126.71275329589844, "learning_rate": 3.0120000000000003e-05, "loss": 2.0857, "step": 761 }, { "epoch": 6.096, "grad_norm": 73.93370819091797, "learning_rate": 3.016e-05, "loss": 1.9195, "step": 762 }, { "epoch": 6.104, "grad_norm": 33.98806381225586, "learning_rate": 3.02e-05, "loss": 2.0004, "step": 763 }, { "epoch": 6.112, "grad_norm": 48.305789947509766, "learning_rate": 3.0240000000000002e-05, "loss": 1.6071, "step": 764 }, { "epoch": 6.12, "grad_norm": 46.73958969116211, "learning_rate": 3.028e-05, "loss": 2.0526, "step": 765 }, { "epoch": 6.128, "grad_norm": 63.837738037109375, "learning_rate": 3.0320000000000004e-05, "loss": 1.4757, "step": 766 }, { "epoch": 6.136, "grad_norm": 83.43203735351562, "learning_rate": 3.036e-05, "loss": 1.4671, "step": 767 }, { "epoch": 6.144, "grad_norm": 49.89427947998047, "learning_rate": 3.04e-05, "loss": 1.6423, "step": 768 }, { "epoch": 6.152, "grad_norm": 41.069252014160156, "learning_rate": 3.0440000000000003e-05, "loss": 1.4455, "step": 769 }, { "epoch": 6.16, "grad_norm": 64.02425384521484, "learning_rate": 3.0480000000000003e-05, "loss": 1.9539, "step": 770 }, { "epoch": 6.168, "grad_norm": 44.166343688964844, "learning_rate": 3.0520000000000006e-05, "loss": 2.2294, "step": 771 }, { "epoch": 6.176, "grad_norm": 81.62739562988281, "learning_rate": 3.056e-05, "loss": 2.2351, "step": 772 }, { "epoch": 6.184, "grad_norm": 70.05719757080078, "learning_rate": 3.06e-05, "loss": 1.7254, "step": 773 }, { "epoch": 6.192, "grad_norm": 80.47914123535156, "learning_rate": 3.0640000000000005e-05, "loss": 1.9151, "step": 774 }, { "epoch": 6.2, "grad_norm": 50.06690216064453, "learning_rate": 3.0680000000000004e-05, "loss": 1.8497, "step": 775 }, { "epoch": 6.208, "grad_norm": 68.76950073242188, "learning_rate": 3.072e-05, "loss": 1.5221, "step": 776 }, { "epoch": 6.216, "grad_norm": 96.3576889038086, "learning_rate": 3.076e-05, "loss": 1.6291, "step": 777 }, { "epoch": 6.224, "grad_norm": 40.66973114013672, "learning_rate": 3.08e-05, "loss": 1.6483, "step": 778 }, { "epoch": 6.232, "grad_norm": 61.8912467956543, "learning_rate": 3.084e-05, "loss": 1.6789, "step": 779 }, { "epoch": 6.24, "grad_norm": 38.66373062133789, "learning_rate": 3.088e-05, "loss": 1.9507, "step": 780 }, { "epoch": 6.248, "grad_norm": 39.2679443359375, "learning_rate": 3.092e-05, "loss": 2.1699, "step": 781 }, { "epoch": 6.256, "grad_norm": 37.553672790527344, "learning_rate": 3.096e-05, "loss": 1.5834, "step": 782 }, { "epoch": 6.264, "grad_norm": 111.61589050292969, "learning_rate": 3.1e-05, "loss": 1.8162, "step": 783 }, { "epoch": 6.272, "grad_norm": 41.927120208740234, "learning_rate": 3.104e-05, "loss": 2.2827, "step": 784 }, { "epoch": 6.28, "grad_norm": 342.44781494140625, "learning_rate": 3.108e-05, "loss": 1.4735, "step": 785 }, { "epoch": 6.288, "grad_norm": 80.82258605957031, "learning_rate": 3.112e-05, "loss": 2.0136, "step": 786 }, { "epoch": 6.296, "grad_norm": 108.03018188476562, "learning_rate": 3.116e-05, "loss": 1.6675, "step": 787 }, { "epoch": 6.304, "grad_norm": 61.716426849365234, "learning_rate": 3.12e-05, "loss": 1.7574, "step": 788 }, { "epoch": 6.312, "grad_norm": 43.284793853759766, "learning_rate": 3.1240000000000006e-05, "loss": 1.7754, "step": 789 }, { "epoch": 6.32, "grad_norm": 28.397775650024414, "learning_rate": 3.1280000000000005e-05, "loss": 1.9532, "step": 790 }, { "epoch": 6.328, "grad_norm": 41.500518798828125, "learning_rate": 3.132e-05, "loss": 1.696, "step": 791 }, { "epoch": 6.336, "grad_norm": 24.285240173339844, "learning_rate": 3.136e-05, "loss": 1.911, "step": 792 }, { "epoch": 6.344, "grad_norm": 38.26093292236328, "learning_rate": 3.1400000000000004e-05, "loss": 2.0075, "step": 793 }, { "epoch": 6.352, "grad_norm": 35.401458740234375, "learning_rate": 3.1440000000000004e-05, "loss": 1.35, "step": 794 }, { "epoch": 6.36, "grad_norm": 25.01190185546875, "learning_rate": 3.1480000000000004e-05, "loss": 1.9639, "step": 795 }, { "epoch": 6.368, "grad_norm": 28.09592056274414, "learning_rate": 3.1519999999999996e-05, "loss": 2.0247, "step": 796 }, { "epoch": 6.376, "grad_norm": 27.725692749023438, "learning_rate": 3.156e-05, "loss": 1.4062, "step": 797 }, { "epoch": 6.384, "grad_norm": 40.1574821472168, "learning_rate": 3.16e-05, "loss": 1.2067, "step": 798 }, { "epoch": 6.392, "grad_norm": 44.77125549316406, "learning_rate": 3.164e-05, "loss": 1.8265, "step": 799 }, { "epoch": 6.4, "grad_norm": 41.25481033325195, "learning_rate": 3.168e-05, "loss": 1.6134, "step": 800 }, { "epoch": 6.408, "grad_norm": 57.87846374511719, "learning_rate": 3.172e-05, "loss": 2.1131, "step": 801 }, { "epoch": 6.416, "grad_norm": 35.25403594970703, "learning_rate": 3.176e-05, "loss": 1.7165, "step": 802 }, { "epoch": 6.424, "grad_norm": 23.69053077697754, "learning_rate": 3.18e-05, "loss": 1.7339, "step": 803 }, { "epoch": 6.432, "grad_norm": 50.450618743896484, "learning_rate": 3.184e-05, "loss": 2.0994, "step": 804 }, { "epoch": 6.44, "grad_norm": 24.78369140625, "learning_rate": 3.188e-05, "loss": 1.71, "step": 805 }, { "epoch": 6.448, "grad_norm": 46.273738861083984, "learning_rate": 3.192e-05, "loss": 1.3666, "step": 806 }, { "epoch": 6.456, "grad_norm": 53.620338439941406, "learning_rate": 3.196e-05, "loss": 1.6924, "step": 807 }, { "epoch": 6.464, "grad_norm": 34.80326843261719, "learning_rate": 3.2000000000000005e-05, "loss": 1.5091, "step": 808 }, { "epoch": 6.4719999999999995, "grad_norm": 40.26123809814453, "learning_rate": 3.2040000000000005e-05, "loss": 2.1132, "step": 809 }, { "epoch": 6.48, "grad_norm": 60.93488311767578, "learning_rate": 3.208e-05, "loss": 2.1601, "step": 810 }, { "epoch": 6.4879999999999995, "grad_norm": 86.97779083251953, "learning_rate": 3.212e-05, "loss": 1.9726, "step": 811 }, { "epoch": 6.496, "grad_norm": 65.03072357177734, "learning_rate": 3.2160000000000004e-05, "loss": 1.4941, "step": 812 }, { "epoch": 6.504, "grad_norm": 57.65848922729492, "learning_rate": 3.2200000000000003e-05, "loss": 1.5259, "step": 813 }, { "epoch": 6.5120000000000005, "grad_norm": 39.3818473815918, "learning_rate": 3.224e-05, "loss": 1.8171, "step": 814 }, { "epoch": 6.52, "grad_norm": 64.32617950439453, "learning_rate": 3.2279999999999996e-05, "loss": 1.8432, "step": 815 }, { "epoch": 6.5280000000000005, "grad_norm": 62.300880432128906, "learning_rate": 3.232e-05, "loss": 2.7079, "step": 816 }, { "epoch": 6.536, "grad_norm": 77.65179443359375, "learning_rate": 3.236e-05, "loss": 1.9681, "step": 817 }, { "epoch": 6.5440000000000005, "grad_norm": 37.7744140625, "learning_rate": 3.24e-05, "loss": 1.5022, "step": 818 }, { "epoch": 6.552, "grad_norm": 31.328384399414062, "learning_rate": 3.244e-05, "loss": 1.3827, "step": 819 }, { "epoch": 6.5600000000000005, "grad_norm": 89.73069763183594, "learning_rate": 3.248e-05, "loss": 1.9438, "step": 820 }, { "epoch": 6.568, "grad_norm": 19.123144149780273, "learning_rate": 3.252e-05, "loss": 1.1759, "step": 821 }, { "epoch": 6.576, "grad_norm": 52.087799072265625, "learning_rate": 3.256e-05, "loss": 1.3932, "step": 822 }, { "epoch": 6.584, "grad_norm": 46.079620361328125, "learning_rate": 3.26e-05, "loss": 1.654, "step": 823 }, { "epoch": 6.592, "grad_norm": 147.39454650878906, "learning_rate": 3.2640000000000006e-05, "loss": 1.6826, "step": 824 }, { "epoch": 6.6, "grad_norm": 78.81685638427734, "learning_rate": 3.268e-05, "loss": 2.0671, "step": 825 }, { "epoch": 6.608, "grad_norm": 46.345680236816406, "learning_rate": 3.272e-05, "loss": 2.1784, "step": 826 }, { "epoch": 6.616, "grad_norm": 41.73802185058594, "learning_rate": 3.2760000000000005e-05, "loss": 1.2149, "step": 827 }, { "epoch": 6.624, "grad_norm": 50.908714294433594, "learning_rate": 3.2800000000000004e-05, "loss": 1.8891, "step": 828 }, { "epoch": 6.632, "grad_norm": 53.989566802978516, "learning_rate": 3.2840000000000004e-05, "loss": 1.687, "step": 829 }, { "epoch": 6.64, "grad_norm": 37.97653579711914, "learning_rate": 3.288e-05, "loss": 1.7698, "step": 830 }, { "epoch": 6.648, "grad_norm": 105.75123596191406, "learning_rate": 3.292e-05, "loss": 1.9693, "step": 831 }, { "epoch": 6.656, "grad_norm": 61.80316162109375, "learning_rate": 3.296e-05, "loss": 1.5365, "step": 832 }, { "epoch": 6.664, "grad_norm": 27.801485061645508, "learning_rate": 3.3e-05, "loss": 1.2958, "step": 833 }, { "epoch": 6.672, "grad_norm": 19.399944305419922, "learning_rate": 3.304e-05, "loss": 1.6093, "step": 834 }, { "epoch": 6.68, "grad_norm": 66.26407623291016, "learning_rate": 3.308e-05, "loss": 1.6424, "step": 835 }, { "epoch": 6.688, "grad_norm": 31.393800735473633, "learning_rate": 3.312e-05, "loss": 1.5822, "step": 836 }, { "epoch": 6.696, "grad_norm": 40.430023193359375, "learning_rate": 3.316e-05, "loss": 1.5707, "step": 837 }, { "epoch": 6.704, "grad_norm": 109.60406494140625, "learning_rate": 3.32e-05, "loss": 1.3296, "step": 838 }, { "epoch": 6.712, "grad_norm": 49.48638916015625, "learning_rate": 3.324e-05, "loss": 1.4531, "step": 839 }, { "epoch": 6.72, "grad_norm": 77.32699584960938, "learning_rate": 3.328e-05, "loss": 1.5792, "step": 840 }, { "epoch": 6.728, "grad_norm": 591.3939819335938, "learning_rate": 3.332e-05, "loss": 1.7034, "step": 841 }, { "epoch": 6.736, "grad_norm": 64.49906158447266, "learning_rate": 3.336e-05, "loss": 1.5517, "step": 842 }, { "epoch": 6.744, "grad_norm": 40.588436126708984, "learning_rate": 3.3400000000000005e-05, "loss": 1.9602, "step": 843 }, { "epoch": 6.752, "grad_norm": 36.09979248046875, "learning_rate": 3.344e-05, "loss": 1.4576, "step": 844 }, { "epoch": 6.76, "grad_norm": 26.901275634765625, "learning_rate": 3.348e-05, "loss": 1.3912, "step": 845 }, { "epoch": 6.768, "grad_norm": 65.56688690185547, "learning_rate": 3.3520000000000004e-05, "loss": 2.1255, "step": 846 }, { "epoch": 6.776, "grad_norm": 42.962039947509766, "learning_rate": 3.3560000000000004e-05, "loss": 2.3312, "step": 847 }, { "epoch": 6.784, "grad_norm": 57.568729400634766, "learning_rate": 3.3600000000000004e-05, "loss": 1.6575, "step": 848 }, { "epoch": 6.792, "grad_norm": 16.43877601623535, "learning_rate": 3.3639999999999996e-05, "loss": 2.6443, "step": 849 }, { "epoch": 6.8, "grad_norm": 44.48012924194336, "learning_rate": 3.368e-05, "loss": 1.8302, "step": 850 }, { "epoch": 6.808, "grad_norm": 54.38038635253906, "learning_rate": 3.372e-05, "loss": 1.4362, "step": 851 }, { "epoch": 6.816, "grad_norm": 35.525421142578125, "learning_rate": 3.376e-05, "loss": 1.4769, "step": 852 }, { "epoch": 6.824, "grad_norm": 38.458290100097656, "learning_rate": 3.38e-05, "loss": 1.5442, "step": 853 }, { "epoch": 6.832, "grad_norm": 38.852699279785156, "learning_rate": 3.384e-05, "loss": 2.253, "step": 854 }, { "epoch": 6.84, "grad_norm": 29.801671981811523, "learning_rate": 3.388e-05, "loss": 1.8469, "step": 855 }, { "epoch": 6.848, "grad_norm": 38.59683609008789, "learning_rate": 3.392e-05, "loss": 2.0068, "step": 856 }, { "epoch": 6.856, "grad_norm": 57.996089935302734, "learning_rate": 3.396e-05, "loss": 1.6776, "step": 857 }, { "epoch": 6.864, "grad_norm": 43.388343811035156, "learning_rate": 3.4000000000000007e-05, "loss": 1.2522, "step": 858 }, { "epoch": 6.872, "grad_norm": 29.790287017822266, "learning_rate": 3.404e-05, "loss": 2.2431, "step": 859 }, { "epoch": 6.88, "grad_norm": 1670.096435546875, "learning_rate": 3.408e-05, "loss": 1.6271, "step": 860 }, { "epoch": 6.888, "grad_norm": 31.733957290649414, "learning_rate": 3.412e-05, "loss": 1.8073, "step": 861 }, { "epoch": 6.896, "grad_norm": 34.71796417236328, "learning_rate": 3.4160000000000005e-05, "loss": 1.9358, "step": 862 }, { "epoch": 6.904, "grad_norm": 58.20541000366211, "learning_rate": 3.4200000000000005e-05, "loss": 2.0281, "step": 863 }, { "epoch": 6.912, "grad_norm": 28.445371627807617, "learning_rate": 3.424e-05, "loss": 3.0979, "step": 864 }, { "epoch": 6.92, "grad_norm": 78.70452117919922, "learning_rate": 3.4280000000000004e-05, "loss": 1.7806, "step": 865 }, { "epoch": 6.928, "grad_norm": 30.18097496032715, "learning_rate": 3.4320000000000003e-05, "loss": 1.6944, "step": 866 }, { "epoch": 6.936, "grad_norm": 56.56443405151367, "learning_rate": 3.436e-05, "loss": 2.3669, "step": 867 }, { "epoch": 6.944, "grad_norm": 65.27404022216797, "learning_rate": 3.4399999999999996e-05, "loss": 1.914, "step": 868 }, { "epoch": 6.952, "grad_norm": 44.35979080200195, "learning_rate": 3.444e-05, "loss": 1.8296, "step": 869 }, { "epoch": 6.96, "grad_norm": 31.611217498779297, "learning_rate": 3.448e-05, "loss": 1.452, "step": 870 }, { "epoch": 6.968, "grad_norm": 32.21579360961914, "learning_rate": 3.452e-05, "loss": 3.2222, "step": 871 }, { "epoch": 6.976, "grad_norm": 30.862838745117188, "learning_rate": 3.456e-05, "loss": 1.7047, "step": 872 }, { "epoch": 6.984, "grad_norm": 69.37967681884766, "learning_rate": 3.46e-05, "loss": 1.7251, "step": 873 }, { "epoch": 6.992, "grad_norm": 117.07239532470703, "learning_rate": 3.464e-05, "loss": 1.6212, "step": 874 }, { "epoch": 7.0, "grad_norm": 24.411697387695312, "learning_rate": 3.468e-05, "loss": 1.2856, "step": 875 }, { "epoch": 7.0, "eval_loss": 1.743046760559082, "eval_map": 0.1281, "eval_map_50": 0.251, "eval_map_75": 0.1115, "eval_map_Coverall": 0.3266, "eval_map_Face_Shield": 0.0, "eval_map_Gloves": 0.0995, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.2144, "eval_map_large": 0.1365, "eval_map_medium": 0.0757, "eval_map_small": -1.0, "eval_mar_1": 0.126, "eval_mar_10": 0.2515, "eval_mar_100": 0.2736, "eval_mar_100_Coverall": 0.6467, "eval_mar_100_Face_Shield": 0.0, "eval_mar_100_Gloves": 0.2885, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.4327, "eval_mar_large": 0.2812, "eval_mar_medium": 0.1722, "eval_mar_small": -1.0, "eval_runtime": 4.1071, "eval_samples_per_second": 7.061, "eval_steps_per_second": 0.487, "step": 875 }, { "epoch": 7.008, "grad_norm": 29.237077713012695, "learning_rate": 3.472e-05, "loss": 1.7706, "step": 876 }, { "epoch": 7.016, "grad_norm": 26.02581787109375, "learning_rate": 3.4760000000000006e-05, "loss": 1.6916, "step": 877 }, { "epoch": 7.024, "grad_norm": 47.70826721191406, "learning_rate": 3.48e-05, "loss": 1.5874, "step": 878 }, { "epoch": 7.032, "grad_norm": 65.81073760986328, "learning_rate": 3.484e-05, "loss": 1.3771, "step": 879 }, { "epoch": 7.04, "grad_norm": 20.568031311035156, "learning_rate": 3.4880000000000005e-05, "loss": 1.77, "step": 880 }, { "epoch": 7.048, "grad_norm": 29.5637149810791, "learning_rate": 3.4920000000000004e-05, "loss": 1.7037, "step": 881 }, { "epoch": 7.056, "grad_norm": 25.235280990600586, "learning_rate": 3.4960000000000004e-05, "loss": 1.6385, "step": 882 }, { "epoch": 7.064, "grad_norm": 83.94258117675781, "learning_rate": 3.5e-05, "loss": 1.4955, "step": 883 }, { "epoch": 7.072, "grad_norm": 59.49884033203125, "learning_rate": 3.504e-05, "loss": 2.1166, "step": 884 }, { "epoch": 7.08, "grad_norm": 49.34141540527344, "learning_rate": 3.508e-05, "loss": 1.7293, "step": 885 }, { "epoch": 7.088, "grad_norm": 32.78325653076172, "learning_rate": 3.512e-05, "loss": 1.4707, "step": 886 }, { "epoch": 7.096, "grad_norm": 64.7719497680664, "learning_rate": 3.516e-05, "loss": 1.5312, "step": 887 }, { "epoch": 7.104, "grad_norm": 59.04649353027344, "learning_rate": 3.52e-05, "loss": 1.9254, "step": 888 }, { "epoch": 7.112, "grad_norm": 39.2728385925293, "learning_rate": 3.524e-05, "loss": 1.4111, "step": 889 }, { "epoch": 7.12, "grad_norm": 67.5690689086914, "learning_rate": 3.528e-05, "loss": 1.4613, "step": 890 }, { "epoch": 7.128, "grad_norm": 41.239803314208984, "learning_rate": 3.532e-05, "loss": 1.4791, "step": 891 }, { "epoch": 7.136, "grad_norm": 27.427261352539062, "learning_rate": 3.536000000000001e-05, "loss": 1.4413, "step": 892 }, { "epoch": 7.144, "grad_norm": 21.67972755432129, "learning_rate": 3.54e-05, "loss": 1.6597, "step": 893 }, { "epoch": 7.152, "grad_norm": 22.83672523498535, "learning_rate": 3.544e-05, "loss": 1.1873, "step": 894 }, { "epoch": 7.16, "grad_norm": 27.35342788696289, "learning_rate": 3.548e-05, "loss": 1.4794, "step": 895 }, { "epoch": 7.168, "grad_norm": 36.28207778930664, "learning_rate": 3.5520000000000006e-05, "loss": 1.9726, "step": 896 }, { "epoch": 7.176, "grad_norm": 30.660799026489258, "learning_rate": 3.5560000000000005e-05, "loss": 1.7291, "step": 897 }, { "epoch": 7.184, "grad_norm": 23.93620491027832, "learning_rate": 3.56e-05, "loss": 1.5374, "step": 898 }, { "epoch": 7.192, "grad_norm": 37.9943962097168, "learning_rate": 3.5640000000000004e-05, "loss": 1.4934, "step": 899 }, { "epoch": 7.2, "grad_norm": 72.4600830078125, "learning_rate": 3.5680000000000004e-05, "loss": 1.5932, "step": 900 }, { "epoch": 7.208, "grad_norm": 39.38890838623047, "learning_rate": 3.5720000000000004e-05, "loss": 1.5481, "step": 901 }, { "epoch": 7.216, "grad_norm": 33.00720977783203, "learning_rate": 3.5759999999999996e-05, "loss": 1.9039, "step": 902 }, { "epoch": 7.224, "grad_norm": 63.22310256958008, "learning_rate": 3.58e-05, "loss": 2.0117, "step": 903 }, { "epoch": 7.232, "grad_norm": 27.527082443237305, "learning_rate": 3.584e-05, "loss": 1.7098, "step": 904 }, { "epoch": 7.24, "grad_norm": 121.68600463867188, "learning_rate": 3.588e-05, "loss": 1.4417, "step": 905 }, { "epoch": 7.248, "grad_norm": 153.53900146484375, "learning_rate": 3.592e-05, "loss": 1.8148, "step": 906 }, { "epoch": 7.256, "grad_norm": 51.15332794189453, "learning_rate": 3.596e-05, "loss": 2.0989, "step": 907 }, { "epoch": 7.264, "grad_norm": 36.8256950378418, "learning_rate": 3.6e-05, "loss": 1.6345, "step": 908 }, { "epoch": 7.272, "grad_norm": 37.83415603637695, "learning_rate": 3.604e-05, "loss": 2.4836, "step": 909 }, { "epoch": 7.28, "grad_norm": 185.8329620361328, "learning_rate": 3.608e-05, "loss": 1.5101, "step": 910 }, { "epoch": 7.288, "grad_norm": 103.86714172363281, "learning_rate": 3.6120000000000007e-05, "loss": 1.3484, "step": 911 }, { "epoch": 7.296, "grad_norm": 51.8475456237793, "learning_rate": 3.616e-05, "loss": 2.3384, "step": 912 }, { "epoch": 7.304, "grad_norm": 61.230167388916016, "learning_rate": 3.62e-05, "loss": 2.0588, "step": 913 }, { "epoch": 7.312, "grad_norm": 60.49231719970703, "learning_rate": 3.624e-05, "loss": 1.5657, "step": 914 }, { "epoch": 7.32, "grad_norm": 46.788856506347656, "learning_rate": 3.6280000000000005e-05, "loss": 1.3891, "step": 915 }, { "epoch": 7.328, "grad_norm": 61.767948150634766, "learning_rate": 3.6320000000000005e-05, "loss": 1.7917, "step": 916 }, { "epoch": 7.336, "grad_norm": 46.87237548828125, "learning_rate": 3.636e-05, "loss": 1.8899, "step": 917 }, { "epoch": 7.344, "grad_norm": 24.98194122314453, "learning_rate": 3.6400000000000004e-05, "loss": 1.3486, "step": 918 }, { "epoch": 7.352, "grad_norm": 28.518583297729492, "learning_rate": 3.6440000000000003e-05, "loss": 1.4534, "step": 919 }, { "epoch": 7.36, "grad_norm": 196.3174591064453, "learning_rate": 3.648e-05, "loss": 1.7714, "step": 920 }, { "epoch": 7.368, "grad_norm": 26.588294982910156, "learning_rate": 3.652e-05, "loss": 1.4319, "step": 921 }, { "epoch": 7.376, "grad_norm": 92.36872100830078, "learning_rate": 3.656e-05, "loss": 1.5421, "step": 922 }, { "epoch": 7.384, "grad_norm": 64.01502227783203, "learning_rate": 3.66e-05, "loss": 1.4334, "step": 923 }, { "epoch": 7.392, "grad_norm": 38.13044738769531, "learning_rate": 3.664e-05, "loss": 1.7888, "step": 924 }, { "epoch": 7.4, "grad_norm": 41.167423248291016, "learning_rate": 3.668e-05, "loss": 1.5757, "step": 925 }, { "epoch": 7.408, "grad_norm": 41.09132385253906, "learning_rate": 3.672000000000001e-05, "loss": 1.4019, "step": 926 }, { "epoch": 7.416, "grad_norm": 83.70414733886719, "learning_rate": 3.676e-05, "loss": 2.0367, "step": 927 }, { "epoch": 7.424, "grad_norm": 34.31705093383789, "learning_rate": 3.68e-05, "loss": 1.5519, "step": 928 }, { "epoch": 7.432, "grad_norm": 62.462669372558594, "learning_rate": 3.684e-05, "loss": 1.3122, "step": 929 }, { "epoch": 7.44, "grad_norm": 30.35878562927246, "learning_rate": 3.6880000000000006e-05, "loss": 1.4686, "step": 930 }, { "epoch": 7.448, "grad_norm": 35.03276062011719, "learning_rate": 3.692e-05, "loss": 1.7141, "step": 931 }, { "epoch": 7.456, "grad_norm": 28.640705108642578, "learning_rate": 3.696e-05, "loss": 1.8855, "step": 932 }, { "epoch": 7.464, "grad_norm": 55.1007080078125, "learning_rate": 3.7e-05, "loss": 2.3157, "step": 933 }, { "epoch": 7.4719999999999995, "grad_norm": 34.68144607543945, "learning_rate": 3.7040000000000005e-05, "loss": 1.7741, "step": 934 }, { "epoch": 7.48, "grad_norm": 33.71240997314453, "learning_rate": 3.7080000000000004e-05, "loss": 1.9995, "step": 935 }, { "epoch": 7.4879999999999995, "grad_norm": 37.396141052246094, "learning_rate": 3.712e-05, "loss": 2.1571, "step": 936 }, { "epoch": 7.496, "grad_norm": 39.06034851074219, "learning_rate": 3.716e-05, "loss": 1.8494, "step": 937 }, { "epoch": 7.504, "grad_norm": 33.423057556152344, "learning_rate": 3.72e-05, "loss": 1.4854, "step": 938 }, { "epoch": 7.5120000000000005, "grad_norm": 52.30845260620117, "learning_rate": 3.724e-05, "loss": 1.6727, "step": 939 }, { "epoch": 7.52, "grad_norm": 66.44094848632812, "learning_rate": 3.728e-05, "loss": 1.3486, "step": 940 }, { "epoch": 7.5280000000000005, "grad_norm": 26.791662216186523, "learning_rate": 3.732e-05, "loss": 2.2979, "step": 941 }, { "epoch": 7.536, "grad_norm": 57.77303695678711, "learning_rate": 3.736e-05, "loss": 1.9935, "step": 942 }, { "epoch": 7.5440000000000005, "grad_norm": 22.733219146728516, "learning_rate": 3.74e-05, "loss": 1.2339, "step": 943 }, { "epoch": 7.552, "grad_norm": 45.030521392822266, "learning_rate": 3.744e-05, "loss": 3.5208, "step": 944 }, { "epoch": 7.5600000000000005, "grad_norm": 62.99386978149414, "learning_rate": 3.748000000000001e-05, "loss": 1.5395, "step": 945 }, { "epoch": 7.568, "grad_norm": 62.45362091064453, "learning_rate": 3.752e-05, "loss": 1.6948, "step": 946 }, { "epoch": 7.576, "grad_norm": 130.82594299316406, "learning_rate": 3.756e-05, "loss": 2.1938, "step": 947 }, { "epoch": 7.584, "grad_norm": 40.462135314941406, "learning_rate": 3.76e-05, "loss": 1.7734, "step": 948 }, { "epoch": 7.592, "grad_norm": 397.6959228515625, "learning_rate": 3.7640000000000006e-05, "loss": 1.6563, "step": 949 }, { "epoch": 7.6, "grad_norm": 66.17484283447266, "learning_rate": 3.7680000000000005e-05, "loss": 1.7259, "step": 950 }, { "epoch": 7.608, "grad_norm": 43.8519401550293, "learning_rate": 3.772e-05, "loss": 2.1641, "step": 951 }, { "epoch": 7.616, "grad_norm": 42.76572799682617, "learning_rate": 3.776e-05, "loss": 1.5023, "step": 952 }, { "epoch": 7.624, "grad_norm": 31.608631134033203, "learning_rate": 3.7800000000000004e-05, "loss": 2.4664, "step": 953 }, { "epoch": 7.632, "grad_norm": 41.443389892578125, "learning_rate": 3.7840000000000004e-05, "loss": 1.4048, "step": 954 }, { "epoch": 7.64, "grad_norm": 26.35574722290039, "learning_rate": 3.788e-05, "loss": 3.0967, "step": 955 }, { "epoch": 7.648, "grad_norm": 49.83362579345703, "learning_rate": 3.792e-05, "loss": 1.6092, "step": 956 }, { "epoch": 7.656, "grad_norm": 47.420684814453125, "learning_rate": 3.796e-05, "loss": 1.6343, "step": 957 }, { "epoch": 7.664, "grad_norm": 40.61265563964844, "learning_rate": 3.8e-05, "loss": 1.4702, "step": 958 }, { "epoch": 7.672, "grad_norm": 34.79324722290039, "learning_rate": 3.804e-05, "loss": 1.8803, "step": 959 }, { "epoch": 7.68, "grad_norm": 48.82750701904297, "learning_rate": 3.808e-05, "loss": 2.2988, "step": 960 }, { "epoch": 7.688, "grad_norm": 39.71497344970703, "learning_rate": 3.812e-05, "loss": 1.7393, "step": 961 }, { "epoch": 7.696, "grad_norm": 221.5572052001953, "learning_rate": 3.816e-05, "loss": 1.9524, "step": 962 }, { "epoch": 7.704, "grad_norm": 50.4652099609375, "learning_rate": 3.82e-05, "loss": 1.8829, "step": 963 }, { "epoch": 7.712, "grad_norm": 35.702857971191406, "learning_rate": 3.8240000000000007e-05, "loss": 1.3716, "step": 964 }, { "epoch": 7.72, "grad_norm": 50.34186935424805, "learning_rate": 3.828e-05, "loss": 1.6263, "step": 965 }, { "epoch": 7.728, "grad_norm": 42.98079299926758, "learning_rate": 3.832e-05, "loss": 1.6349, "step": 966 }, { "epoch": 7.736, "grad_norm": 62.84578323364258, "learning_rate": 3.836e-05, "loss": 2.3481, "step": 967 }, { "epoch": 7.744, "grad_norm": 47.67018508911133, "learning_rate": 3.8400000000000005e-05, "loss": 1.111, "step": 968 }, { "epoch": 7.752, "grad_norm": 264.1081848144531, "learning_rate": 3.8440000000000005e-05, "loss": 2.1089, "step": 969 }, { "epoch": 7.76, "grad_norm": 45.704898834228516, "learning_rate": 3.848e-05, "loss": 1.7596, "step": 970 }, { "epoch": 7.768, "grad_norm": 45.10026931762695, "learning_rate": 3.8520000000000004e-05, "loss": 1.4119, "step": 971 }, { "epoch": 7.776, "grad_norm": 36.60276794433594, "learning_rate": 3.8560000000000004e-05, "loss": 1.4999, "step": 972 }, { "epoch": 7.784, "grad_norm": 143.51817321777344, "learning_rate": 3.86e-05, "loss": 1.4655, "step": 973 }, { "epoch": 7.792, "grad_norm": 50.664703369140625, "learning_rate": 3.864e-05, "loss": 1.7798, "step": 974 }, { "epoch": 7.8, "grad_norm": 49.93855285644531, "learning_rate": 3.868e-05, "loss": 2.3963, "step": 975 }, { "epoch": 7.808, "grad_norm": 49.94970703125, "learning_rate": 3.872e-05, "loss": 2.0943, "step": 976 }, { "epoch": 7.816, "grad_norm": 59.85983657836914, "learning_rate": 3.876e-05, "loss": 1.6371, "step": 977 }, { "epoch": 7.824, "grad_norm": 49.94376754760742, "learning_rate": 3.88e-05, "loss": 1.5845, "step": 978 }, { "epoch": 7.832, "grad_norm": 34.747406005859375, "learning_rate": 3.884e-05, "loss": 1.4891, "step": 979 }, { "epoch": 7.84, "grad_norm": 45.58784484863281, "learning_rate": 3.888e-05, "loss": 1.5439, "step": 980 }, { "epoch": 7.848, "grad_norm": 21.36298942565918, "learning_rate": 3.892e-05, "loss": 1.4131, "step": 981 }, { "epoch": 7.856, "grad_norm": 67.09605407714844, "learning_rate": 3.896e-05, "loss": 1.96, "step": 982 }, { "epoch": 7.864, "grad_norm": 79.9212646484375, "learning_rate": 3.9000000000000006e-05, "loss": 1.8166, "step": 983 }, { "epoch": 7.872, "grad_norm": 41.58958053588867, "learning_rate": 3.9040000000000006e-05, "loss": 1.7697, "step": 984 }, { "epoch": 7.88, "grad_norm": 62.00831985473633, "learning_rate": 3.908e-05, "loss": 1.7695, "step": 985 }, { "epoch": 7.888, "grad_norm": 30.187389373779297, "learning_rate": 3.912e-05, "loss": 2.117, "step": 986 }, { "epoch": 7.896, "grad_norm": 55.76369094848633, "learning_rate": 3.9160000000000005e-05, "loss": 1.867, "step": 987 }, { "epoch": 7.904, "grad_norm": 144.4292449951172, "learning_rate": 3.9200000000000004e-05, "loss": 1.8012, "step": 988 }, { "epoch": 7.912, "grad_norm": 45.232357025146484, "learning_rate": 3.9240000000000004e-05, "loss": 1.6627, "step": 989 }, { "epoch": 7.92, "grad_norm": 44.90407180786133, "learning_rate": 3.9280000000000003e-05, "loss": 1.7545, "step": 990 }, { "epoch": 7.928, "grad_norm": 121.72639465332031, "learning_rate": 3.932e-05, "loss": 2.3281, "step": 991 }, { "epoch": 7.936, "grad_norm": 33.911808013916016, "learning_rate": 3.936e-05, "loss": 1.3027, "step": 992 }, { "epoch": 7.944, "grad_norm": 105.22753143310547, "learning_rate": 3.94e-05, "loss": 2.4354, "step": 993 }, { "epoch": 7.952, "grad_norm": 658.6481323242188, "learning_rate": 3.944e-05, "loss": 1.5474, "step": 994 }, { "epoch": 7.96, "grad_norm": 22.020870208740234, "learning_rate": 3.948e-05, "loss": 1.6183, "step": 995 }, { "epoch": 7.968, "grad_norm": 37.19474411010742, "learning_rate": 3.952e-05, "loss": 1.3981, "step": 996 }, { "epoch": 7.976, "grad_norm": 55.30864715576172, "learning_rate": 3.956e-05, "loss": 2.2576, "step": 997 }, { "epoch": 7.984, "grad_norm": 29.523712158203125, "learning_rate": 3.960000000000001e-05, "loss": 1.6365, "step": 998 }, { "epoch": 7.992, "grad_norm": 47.40150833129883, "learning_rate": 3.964e-05, "loss": 1.52, "step": 999 }, { "epoch": 8.0, "grad_norm": 68.46942901611328, "learning_rate": 3.968e-05, "loss": 1.611, "step": 1000 }, { "epoch": 8.0, "eval_loss": 1.7991023063659668, "eval_map": 0.1578, "eval_map_50": 0.3319, "eval_map_75": 0.1114, "eval_map_Coverall": 0.351, "eval_map_Face_Shield": 0.0761, "eval_map_Gloves": 0.1035, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.2582, "eval_map_large": 0.1852, "eval_map_medium": 0.0677, "eval_map_small": -1.0, "eval_mar_1": 0.1505, "eval_mar_10": 0.2704, "eval_mar_100": 0.2884, "eval_mar_100_Coverall": 0.5933, "eval_mar_100_Face_Shield": 0.1294, "eval_mar_100_Gloves": 0.323, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.3962, "eval_mar_large": 0.3313, "eval_mar_medium": 0.1377, "eval_mar_small": -1.0, "eval_runtime": 4.5042, "eval_samples_per_second": 6.438, "eval_steps_per_second": 0.444, "step": 1000 }, { "epoch": 8.008, "grad_norm": 57.48580551147461, "learning_rate": 3.972e-05, "loss": 1.8399, "step": 1001 }, { "epoch": 8.016, "grad_norm": 62.9305534362793, "learning_rate": 3.9760000000000006e-05, "loss": 2.0488, "step": 1002 }, { "epoch": 8.024, "grad_norm": 56.34527587890625, "learning_rate": 3.9800000000000005e-05, "loss": 1.6091, "step": 1003 }, { "epoch": 8.032, "grad_norm": 45.525108337402344, "learning_rate": 3.984e-05, "loss": 2.4008, "step": 1004 }, { "epoch": 8.04, "grad_norm": 51.26131820678711, "learning_rate": 3.988e-05, "loss": 1.9271, "step": 1005 }, { "epoch": 8.048, "grad_norm": 39.405982971191406, "learning_rate": 3.9920000000000004e-05, "loss": 1.7391, "step": 1006 }, { "epoch": 8.056, "grad_norm": 32.52248764038086, "learning_rate": 3.9960000000000004e-05, "loss": 1.558, "step": 1007 }, { "epoch": 8.064, "grad_norm": 101.31131744384766, "learning_rate": 4e-05, "loss": 1.8997, "step": 1008 }, { "epoch": 8.072, "grad_norm": 42.6553955078125, "learning_rate": 4.004e-05, "loss": 1.8067, "step": 1009 }, { "epoch": 8.08, "grad_norm": 38.097660064697266, "learning_rate": 4.008e-05, "loss": 1.7697, "step": 1010 }, { "epoch": 8.088, "grad_norm": 72.8248291015625, "learning_rate": 4.012e-05, "loss": 1.6035, "step": 1011 }, { "epoch": 8.096, "grad_norm": 40.50526809692383, "learning_rate": 4.016e-05, "loss": 1.5173, "step": 1012 }, { "epoch": 8.104, "grad_norm": 44.967559814453125, "learning_rate": 4.02e-05, "loss": 1.7443, "step": 1013 }, { "epoch": 8.112, "grad_norm": 49.76681137084961, "learning_rate": 4.024e-05, "loss": 1.8183, "step": 1014 }, { "epoch": 8.12, "grad_norm": 115.75785064697266, "learning_rate": 4.028e-05, "loss": 1.6309, "step": 1015 }, { "epoch": 8.128, "grad_norm": 76.01524353027344, "learning_rate": 4.032e-05, "loss": 2.0343, "step": 1016 }, { "epoch": 8.136, "grad_norm": 32.0152473449707, "learning_rate": 4.0360000000000007e-05, "loss": 2.1357, "step": 1017 }, { "epoch": 8.144, "grad_norm": 72.71578216552734, "learning_rate": 4.0400000000000006e-05, "loss": 1.9281, "step": 1018 }, { "epoch": 8.152, "grad_norm": 23.867515563964844, "learning_rate": 4.044e-05, "loss": 1.8279, "step": 1019 }, { "epoch": 8.16, "grad_norm": 35.344852447509766, "learning_rate": 4.048e-05, "loss": 1.6606, "step": 1020 }, { "epoch": 8.168, "grad_norm": 55.491180419921875, "learning_rate": 4.0520000000000005e-05, "loss": 1.6175, "step": 1021 }, { "epoch": 8.176, "grad_norm": 188.6601104736328, "learning_rate": 4.0560000000000005e-05, "loss": 1.9427, "step": 1022 }, { "epoch": 8.184, "grad_norm": 53.44888687133789, "learning_rate": 4.0600000000000004e-05, "loss": 1.425, "step": 1023 }, { "epoch": 8.192, "grad_norm": 67.0040512084961, "learning_rate": 4.064e-05, "loss": 2.1425, "step": 1024 }, { "epoch": 8.2, "grad_norm": 24.83396339416504, "learning_rate": 4.0680000000000004e-05, "loss": 1.5947, "step": 1025 }, { "epoch": 8.208, "grad_norm": 32.91032028198242, "learning_rate": 4.072e-05, "loss": 1.4672, "step": 1026 }, { "epoch": 8.216, "grad_norm": 57.55417251586914, "learning_rate": 4.076e-05, "loss": 1.3241, "step": 1027 }, { "epoch": 8.224, "grad_norm": 18.943954467773438, "learning_rate": 4.08e-05, "loss": 1.528, "step": 1028 }, { "epoch": 8.232, "grad_norm": 40.499759674072266, "learning_rate": 4.084e-05, "loss": 1.6169, "step": 1029 }, { "epoch": 8.24, "grad_norm": 69.40813446044922, "learning_rate": 4.088e-05, "loss": 2.2329, "step": 1030 }, { "epoch": 8.248, "grad_norm": 48.32653045654297, "learning_rate": 4.092e-05, "loss": 1.7621, "step": 1031 }, { "epoch": 8.256, "grad_norm": 49.332916259765625, "learning_rate": 4.096e-05, "loss": 1.827, "step": 1032 }, { "epoch": 8.264, "grad_norm": 40.08510208129883, "learning_rate": 4.1e-05, "loss": 1.4733, "step": 1033 }, { "epoch": 8.272, "grad_norm": 45.85405349731445, "learning_rate": 4.104e-05, "loss": 1.7686, "step": 1034 }, { "epoch": 8.28, "grad_norm": 42.45307159423828, "learning_rate": 4.108e-05, "loss": 1.5218, "step": 1035 }, { "epoch": 8.288, "grad_norm": 20.828763961791992, "learning_rate": 4.1120000000000006e-05, "loss": 1.8762, "step": 1036 }, { "epoch": 8.296, "grad_norm": 36.65519714355469, "learning_rate": 4.1160000000000006e-05, "loss": 2.1501, "step": 1037 }, { "epoch": 8.304, "grad_norm": 42.23794174194336, "learning_rate": 4.12e-05, "loss": 1.7034, "step": 1038 }, { "epoch": 8.312, "grad_norm": 36.237422943115234, "learning_rate": 4.124e-05, "loss": 1.7659, "step": 1039 }, { "epoch": 8.32, "grad_norm": 173.26412963867188, "learning_rate": 4.1280000000000005e-05, "loss": 1.6217, "step": 1040 }, { "epoch": 8.328, "grad_norm": 31.29490089416504, "learning_rate": 4.1320000000000004e-05, "loss": 1.7731, "step": 1041 }, { "epoch": 8.336, "grad_norm": 36.79054641723633, "learning_rate": 4.1360000000000004e-05, "loss": 2.0846, "step": 1042 }, { "epoch": 8.344, "grad_norm": 179.9676513671875, "learning_rate": 4.14e-05, "loss": 1.7608, "step": 1043 }, { "epoch": 8.352, "grad_norm": 167.3611602783203, "learning_rate": 4.144e-05, "loss": 1.6031, "step": 1044 }, { "epoch": 8.36, "grad_norm": 74.68074035644531, "learning_rate": 4.148e-05, "loss": 1.6311, "step": 1045 }, { "epoch": 8.368, "grad_norm": 53.6862907409668, "learning_rate": 4.152e-05, "loss": 1.3773, "step": 1046 }, { "epoch": 8.376, "grad_norm": 51.12096405029297, "learning_rate": 4.156e-05, "loss": 2.055, "step": 1047 }, { "epoch": 8.384, "grad_norm": 36.24363327026367, "learning_rate": 4.16e-05, "loss": 1.6339, "step": 1048 }, { "epoch": 8.392, "grad_norm": 25.604211807250977, "learning_rate": 4.164e-05, "loss": 1.3624, "step": 1049 }, { "epoch": 8.4, "grad_norm": 28.11526107788086, "learning_rate": 4.168e-05, "loss": 1.5191, "step": 1050 }, { "epoch": 8.408, "grad_norm": 26.024486541748047, "learning_rate": 4.172e-05, "loss": 1.6749, "step": 1051 }, { "epoch": 8.416, "grad_norm": 27.851905822753906, "learning_rate": 4.176000000000001e-05, "loss": 1.5898, "step": 1052 }, { "epoch": 8.424, "grad_norm": 30.63368034362793, "learning_rate": 4.18e-05, "loss": 1.5245, "step": 1053 }, { "epoch": 8.432, "grad_norm": 30.987964630126953, "learning_rate": 4.184e-05, "loss": 1.9162, "step": 1054 }, { "epoch": 8.44, "grad_norm": 66.06137084960938, "learning_rate": 4.1880000000000006e-05, "loss": 2.0404, "step": 1055 }, { "epoch": 8.448, "grad_norm": 43.669559478759766, "learning_rate": 4.1920000000000005e-05, "loss": 2.1172, "step": 1056 }, { "epoch": 8.456, "grad_norm": 32.956626892089844, "learning_rate": 4.196e-05, "loss": 1.5642, "step": 1057 }, { "epoch": 8.464, "grad_norm": 49.43326187133789, "learning_rate": 4.2e-05, "loss": 1.7689, "step": 1058 }, { "epoch": 8.472, "grad_norm": 63.85902404785156, "learning_rate": 4.2040000000000004e-05, "loss": 1.4325, "step": 1059 }, { "epoch": 8.48, "grad_norm": 40.584381103515625, "learning_rate": 4.2080000000000004e-05, "loss": 1.8303, "step": 1060 }, { "epoch": 8.488, "grad_norm": 58.36891555786133, "learning_rate": 4.212e-05, "loss": 1.2401, "step": 1061 }, { "epoch": 8.496, "grad_norm": 179.8626708984375, "learning_rate": 4.2159999999999996e-05, "loss": 1.6187, "step": 1062 }, { "epoch": 8.504, "grad_norm": 20.45338249206543, "learning_rate": 4.22e-05, "loss": 1.6452, "step": 1063 }, { "epoch": 8.512, "grad_norm": 44.0299072265625, "learning_rate": 4.224e-05, "loss": 1.3889, "step": 1064 }, { "epoch": 8.52, "grad_norm": 34.58027648925781, "learning_rate": 4.228e-05, "loss": 2.3509, "step": 1065 }, { "epoch": 8.528, "grad_norm": 230.09124755859375, "learning_rate": 4.232e-05, "loss": 1.365, "step": 1066 }, { "epoch": 8.536, "grad_norm": 46.689361572265625, "learning_rate": 4.236e-05, "loss": 1.7337, "step": 1067 }, { "epoch": 8.544, "grad_norm": 24.479175567626953, "learning_rate": 4.24e-05, "loss": 1.3967, "step": 1068 }, { "epoch": 8.552, "grad_norm": 34.27241897583008, "learning_rate": 4.244e-05, "loss": 1.6582, "step": 1069 }, { "epoch": 8.56, "grad_norm": 76.92857360839844, "learning_rate": 4.248e-05, "loss": 1.4535, "step": 1070 }, { "epoch": 8.568, "grad_norm": 50.729217529296875, "learning_rate": 4.2520000000000006e-05, "loss": 1.5152, "step": 1071 }, { "epoch": 8.576, "grad_norm": 64.37968444824219, "learning_rate": 4.256e-05, "loss": 1.9412, "step": 1072 }, { "epoch": 8.584, "grad_norm": 40.97882843017578, "learning_rate": 4.26e-05, "loss": 1.4787, "step": 1073 }, { "epoch": 8.592, "grad_norm": 42.098567962646484, "learning_rate": 4.2640000000000005e-05, "loss": 1.5122, "step": 1074 }, { "epoch": 8.6, "grad_norm": 146.32589721679688, "learning_rate": 4.2680000000000005e-05, "loss": 2.1811, "step": 1075 }, { "epoch": 8.608, "grad_norm": 393.81011962890625, "learning_rate": 4.2720000000000004e-05, "loss": 2.2481, "step": 1076 }, { "epoch": 8.616, "grad_norm": 46.59668731689453, "learning_rate": 4.276e-05, "loss": 1.6594, "step": 1077 }, { "epoch": 8.624, "grad_norm": 52.15157699584961, "learning_rate": 4.2800000000000004e-05, "loss": 1.4766, "step": 1078 }, { "epoch": 8.632, "grad_norm": 295.7452697753906, "learning_rate": 4.284e-05, "loss": 1.951, "step": 1079 }, { "epoch": 8.64, "grad_norm": 93.49295806884766, "learning_rate": 4.288e-05, "loss": 1.6792, "step": 1080 }, { "epoch": 8.648, "grad_norm": 127.58560180664062, "learning_rate": 4.292e-05, "loss": 1.9326, "step": 1081 }, { "epoch": 8.656, "grad_norm": 62.9177131652832, "learning_rate": 4.296e-05, "loss": 1.8073, "step": 1082 }, { "epoch": 8.664, "grad_norm": 56.21320724487305, "learning_rate": 4.3e-05, "loss": 1.6273, "step": 1083 }, { "epoch": 8.672, "grad_norm": 45.622093200683594, "learning_rate": 4.304e-05, "loss": 2.1301, "step": 1084 }, { "epoch": 8.68, "grad_norm": 89.34099578857422, "learning_rate": 4.308e-05, "loss": 2.0655, "step": 1085 }, { "epoch": 8.688, "grad_norm": 44.30347442626953, "learning_rate": 4.312000000000001e-05, "loss": 2.1873, "step": 1086 }, { "epoch": 8.696, "grad_norm": 94.17753601074219, "learning_rate": 4.316e-05, "loss": 1.6337, "step": 1087 }, { "epoch": 8.704, "grad_norm": 72.4117202758789, "learning_rate": 4.32e-05, "loss": 2.161, "step": 1088 }, { "epoch": 8.712, "grad_norm": 25.83119010925293, "learning_rate": 4.324e-05, "loss": 1.7512, "step": 1089 }, { "epoch": 8.72, "grad_norm": 40.540470123291016, "learning_rate": 4.3280000000000006e-05, "loss": 1.5306, "step": 1090 }, { "epoch": 8.728, "grad_norm": 46.5672492980957, "learning_rate": 4.332e-05, "loss": 1.8315, "step": 1091 }, { "epoch": 8.736, "grad_norm": 544.8280639648438, "learning_rate": 4.336e-05, "loss": 1.8182, "step": 1092 }, { "epoch": 8.744, "grad_norm": 50.20187759399414, "learning_rate": 4.3400000000000005e-05, "loss": 1.837, "step": 1093 }, { "epoch": 8.752, "grad_norm": 43.95297622680664, "learning_rate": 4.3440000000000004e-05, "loss": 1.7869, "step": 1094 }, { "epoch": 8.76, "grad_norm": 193.81625366210938, "learning_rate": 4.3480000000000004e-05, "loss": 1.8738, "step": 1095 }, { "epoch": 8.768, "grad_norm": 138.75045776367188, "learning_rate": 4.352e-05, "loss": 2.7224, "step": 1096 }, { "epoch": 8.776, "grad_norm": 30.466577529907227, "learning_rate": 4.356e-05, "loss": 2.1526, "step": 1097 }, { "epoch": 8.784, "grad_norm": 51.993141174316406, "learning_rate": 4.36e-05, "loss": 1.863, "step": 1098 }, { "epoch": 8.792, "grad_norm": 255.81507873535156, "learning_rate": 4.364e-05, "loss": 1.6801, "step": 1099 }, { "epoch": 8.8, "grad_norm": 65.06553649902344, "learning_rate": 4.368e-05, "loss": 1.7657, "step": 1100 }, { "epoch": 8.808, "grad_norm": 39.906982421875, "learning_rate": 4.372e-05, "loss": 2.0164, "step": 1101 }, { "epoch": 8.816, "grad_norm": 48.4552001953125, "learning_rate": 4.376e-05, "loss": 2.1643, "step": 1102 }, { "epoch": 8.824, "grad_norm": 49.121551513671875, "learning_rate": 4.38e-05, "loss": 2.2022, "step": 1103 }, { "epoch": 8.832, "grad_norm": 24.219932556152344, "learning_rate": 4.384e-05, "loss": 2.2211, "step": 1104 }, { "epoch": 8.84, "grad_norm": 173.47067260742188, "learning_rate": 4.388000000000001e-05, "loss": 1.579, "step": 1105 }, { "epoch": 8.848, "grad_norm": 30.374401092529297, "learning_rate": 4.392e-05, "loss": 3.4093, "step": 1106 }, { "epoch": 8.856, "grad_norm": 39.386741638183594, "learning_rate": 4.396e-05, "loss": 1.7054, "step": 1107 }, { "epoch": 8.864, "grad_norm": 35.97609329223633, "learning_rate": 4.4000000000000006e-05, "loss": 1.9028, "step": 1108 }, { "epoch": 8.872, "grad_norm": 46.171234130859375, "learning_rate": 4.4040000000000005e-05, "loss": 1.8504, "step": 1109 }, { "epoch": 8.88, "grad_norm": 40.387176513671875, "learning_rate": 4.4080000000000005e-05, "loss": 1.5469, "step": 1110 }, { "epoch": 8.888, "grad_norm": 44.64543914794922, "learning_rate": 4.412e-05, "loss": 1.8043, "step": 1111 }, { "epoch": 8.896, "grad_norm": 182.77273559570312, "learning_rate": 4.4160000000000004e-05, "loss": 1.6103, "step": 1112 }, { "epoch": 8.904, "grad_norm": 43.03998565673828, "learning_rate": 4.4200000000000004e-05, "loss": 3.212, "step": 1113 }, { "epoch": 8.912, "grad_norm": 28.021257400512695, "learning_rate": 4.424e-05, "loss": 2.0263, "step": 1114 }, { "epoch": 8.92, "grad_norm": 18.74997901916504, "learning_rate": 4.428e-05, "loss": 1.4696, "step": 1115 }, { "epoch": 8.928, "grad_norm": 21.50969886779785, "learning_rate": 4.432e-05, "loss": 1.9313, "step": 1116 }, { "epoch": 8.936, "grad_norm": 76.1159896850586, "learning_rate": 4.436e-05, "loss": 1.8199, "step": 1117 }, { "epoch": 8.943999999999999, "grad_norm": 29.63441276550293, "learning_rate": 4.44e-05, "loss": 1.776, "step": 1118 }, { "epoch": 8.952, "grad_norm": 40.85992431640625, "learning_rate": 4.444e-05, "loss": 1.7427, "step": 1119 }, { "epoch": 8.96, "grad_norm": 79.09730529785156, "learning_rate": 4.448e-05, "loss": 1.6786, "step": 1120 }, { "epoch": 8.968, "grad_norm": 46.53868103027344, "learning_rate": 4.452e-05, "loss": 2.0599, "step": 1121 }, { "epoch": 8.975999999999999, "grad_norm": 39.9390983581543, "learning_rate": 4.456e-05, "loss": 2.4269, "step": 1122 }, { "epoch": 8.984, "grad_norm": 45.91835403442383, "learning_rate": 4.46e-05, "loss": 1.4041, "step": 1123 }, { "epoch": 8.992, "grad_norm": 84.60075378417969, "learning_rate": 4.4640000000000006e-05, "loss": 1.8115, "step": 1124 }, { "epoch": 9.0, "grad_norm": 45.26688003540039, "learning_rate": 4.468e-05, "loss": 1.205, "step": 1125 }, { "epoch": 9.0, "eval_loss": 2.0126149654388428, "eval_map": 0.0805, "eval_map_50": 0.1942, "eval_map_75": 0.0563, "eval_map_Coverall": 0.2414, "eval_map_Face_Shield": 0.0138, "eval_map_Gloves": 0.0434, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.104, "eval_map_large": 0.1014, "eval_map_medium": 0.0229, "eval_map_small": -1.0, "eval_mar_1": 0.1056, "eval_mar_10": 0.1903, "eval_mar_100": 0.1919, "eval_mar_100_Coverall": 0.5156, "eval_mar_100_Face_Shield": 0.0471, "eval_mar_100_Gloves": 0.1951, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.2019, "eval_mar_large": 0.2287, "eval_mar_medium": 0.0614, "eval_mar_small": -1.0, "eval_runtime": 3.7973, "eval_samples_per_second": 7.637, "eval_steps_per_second": 0.527, "step": 1125 }, { "epoch": 9.008, "grad_norm": 37.5550651550293, "learning_rate": 4.472e-05, "loss": 1.7919, "step": 1126 }, { "epoch": 9.016, "grad_norm": 48.8674430847168, "learning_rate": 4.4760000000000005e-05, "loss": 1.5319, "step": 1127 }, { "epoch": 9.024, "grad_norm": 41.42637252807617, "learning_rate": 4.4800000000000005e-05, "loss": 1.2275, "step": 1128 }, { "epoch": 9.032, "grad_norm": 34.008243560791016, "learning_rate": 4.4840000000000004e-05, "loss": 1.7834, "step": 1129 }, { "epoch": 9.04, "grad_norm": 36.02488327026367, "learning_rate": 4.488e-05, "loss": 2.2415, "step": 1130 }, { "epoch": 9.048, "grad_norm": 55.592994689941406, "learning_rate": 4.4920000000000004e-05, "loss": 1.6743, "step": 1131 }, { "epoch": 9.056, "grad_norm": 26.781211853027344, "learning_rate": 4.496e-05, "loss": 1.9712, "step": 1132 }, { "epoch": 9.064, "grad_norm": 44.30107116699219, "learning_rate": 4.5e-05, "loss": 1.7061, "step": 1133 }, { "epoch": 9.072, "grad_norm": 49.373046875, "learning_rate": 4.504e-05, "loss": 1.8941, "step": 1134 }, { "epoch": 9.08, "grad_norm": 48.76734924316406, "learning_rate": 4.508e-05, "loss": 2.2069, "step": 1135 }, { "epoch": 9.088, "grad_norm": 32.015201568603516, "learning_rate": 4.512e-05, "loss": 1.8085, "step": 1136 }, { "epoch": 9.096, "grad_norm": 57.1119384765625, "learning_rate": 4.516e-05, "loss": 1.8722, "step": 1137 }, { "epoch": 9.104, "grad_norm": 28.915603637695312, "learning_rate": 4.52e-05, "loss": 1.6116, "step": 1138 }, { "epoch": 9.112, "grad_norm": 38.762237548828125, "learning_rate": 4.524000000000001e-05, "loss": 1.8864, "step": 1139 }, { "epoch": 9.12, "grad_norm": 41.42697525024414, "learning_rate": 4.528e-05, "loss": 1.8456, "step": 1140 }, { "epoch": 9.128, "grad_norm": 84.95235443115234, "learning_rate": 4.532e-05, "loss": 2.1157, "step": 1141 }, { "epoch": 9.136, "grad_norm": 52.05299758911133, "learning_rate": 4.536e-05, "loss": 1.701, "step": 1142 }, { "epoch": 9.144, "grad_norm": 56.37342071533203, "learning_rate": 4.5400000000000006e-05, "loss": 2.0627, "step": 1143 }, { "epoch": 9.152, "grad_norm": 35.84242630004883, "learning_rate": 4.5440000000000005e-05, "loss": 1.7917, "step": 1144 }, { "epoch": 9.16, "grad_norm": 21.704355239868164, "learning_rate": 4.548e-05, "loss": 1.4495, "step": 1145 }, { "epoch": 9.168, "grad_norm": 43.37382888793945, "learning_rate": 4.5520000000000005e-05, "loss": 1.2661, "step": 1146 }, { "epoch": 9.176, "grad_norm": 37.97639083862305, "learning_rate": 4.5560000000000004e-05, "loss": 1.0322, "step": 1147 }, { "epoch": 9.184, "grad_norm": 23.179841995239258, "learning_rate": 4.5600000000000004e-05, "loss": 1.4763, "step": 1148 }, { "epoch": 9.192, "grad_norm": 34.32959747314453, "learning_rate": 4.564e-05, "loss": 1.8325, "step": 1149 }, { "epoch": 9.2, "grad_norm": 31.46529769897461, "learning_rate": 4.568e-05, "loss": 1.8163, "step": 1150 }, { "epoch": 9.208, "grad_norm": 35.988277435302734, "learning_rate": 4.572e-05, "loss": 1.4583, "step": 1151 }, { "epoch": 9.216, "grad_norm": 73.28640747070312, "learning_rate": 4.576e-05, "loss": 1.161, "step": 1152 }, { "epoch": 9.224, "grad_norm": 44.58521270751953, "learning_rate": 4.58e-05, "loss": 1.6256, "step": 1153 }, { "epoch": 9.232, "grad_norm": 15.930752754211426, "learning_rate": 4.584e-05, "loss": 1.3336, "step": 1154 }, { "epoch": 9.24, "grad_norm": 46.043357849121094, "learning_rate": 4.588e-05, "loss": 2.043, "step": 1155 }, { "epoch": 9.248, "grad_norm": 55.01474380493164, "learning_rate": 4.592e-05, "loss": 1.4238, "step": 1156 }, { "epoch": 9.256, "grad_norm": 29.920114517211914, "learning_rate": 4.596e-05, "loss": 1.3605, "step": 1157 }, { "epoch": 9.264, "grad_norm": 72.23231506347656, "learning_rate": 4.600000000000001e-05, "loss": 2.0162, "step": 1158 }, { "epoch": 9.272, "grad_norm": 39.75775146484375, "learning_rate": 4.604e-05, "loss": 1.423, "step": 1159 }, { "epoch": 9.28, "grad_norm": 69.63761901855469, "learning_rate": 4.608e-05, "loss": 1.2242, "step": 1160 }, { "epoch": 9.288, "grad_norm": 79.72176361083984, "learning_rate": 4.612e-05, "loss": 1.6178, "step": 1161 }, { "epoch": 9.296, "grad_norm": 30.783241271972656, "learning_rate": 4.6160000000000005e-05, "loss": 1.5757, "step": 1162 }, { "epoch": 9.304, "grad_norm": 28.16426658630371, "learning_rate": 4.6200000000000005e-05, "loss": 1.7954, "step": 1163 }, { "epoch": 9.312, "grad_norm": 34.136329650878906, "learning_rate": 4.624e-05, "loss": 1.7448, "step": 1164 }, { "epoch": 9.32, "grad_norm": 26.26499366760254, "learning_rate": 4.6280000000000004e-05, "loss": 1.5435, "step": 1165 }, { "epoch": 9.328, "grad_norm": 48.85750961303711, "learning_rate": 4.6320000000000004e-05, "loss": 1.7845, "step": 1166 }, { "epoch": 9.336, "grad_norm": 31.16029167175293, "learning_rate": 4.636e-05, "loss": 2.0354, "step": 1167 }, { "epoch": 9.344, "grad_norm": 37.69902801513672, "learning_rate": 4.64e-05, "loss": 1.9808, "step": 1168 }, { "epoch": 9.352, "grad_norm": 40.336021423339844, "learning_rate": 4.644e-05, "loss": 1.1494, "step": 1169 }, { "epoch": 9.36, "grad_norm": 23.6491641998291, "learning_rate": 4.648e-05, "loss": 1.6838, "step": 1170 }, { "epoch": 9.368, "grad_norm": 33.65192413330078, "learning_rate": 4.652e-05, "loss": 2.1437, "step": 1171 }, { "epoch": 9.376, "grad_norm": 27.42282485961914, "learning_rate": 4.656e-05, "loss": 1.753, "step": 1172 }, { "epoch": 9.384, "grad_norm": 35.780059814453125, "learning_rate": 4.660000000000001e-05, "loss": 1.5811, "step": 1173 }, { "epoch": 9.392, "grad_norm": 70.2336196899414, "learning_rate": 4.664e-05, "loss": 1.6507, "step": 1174 }, { "epoch": 9.4, "grad_norm": 38.34742736816406, "learning_rate": 4.668e-05, "loss": 1.4929, "step": 1175 }, { "epoch": 9.408, "grad_norm": 48.362030029296875, "learning_rate": 4.672e-05, "loss": 1.4244, "step": 1176 }, { "epoch": 9.416, "grad_norm": 142.842529296875, "learning_rate": 4.6760000000000006e-05, "loss": 1.343, "step": 1177 }, { "epoch": 9.424, "grad_norm": 21.185544967651367, "learning_rate": 4.6800000000000006e-05, "loss": 2.155, "step": 1178 }, { "epoch": 9.432, "grad_norm": 45.58897018432617, "learning_rate": 4.684e-05, "loss": 1.4966, "step": 1179 }, { "epoch": 9.44, "grad_norm": 32.65544509887695, "learning_rate": 4.688e-05, "loss": 1.7702, "step": 1180 }, { "epoch": 9.448, "grad_norm": 122.5071029663086, "learning_rate": 4.6920000000000005e-05, "loss": 1.5294, "step": 1181 }, { "epoch": 9.456, "grad_norm": 42.891387939453125, "learning_rate": 4.6960000000000004e-05, "loss": 1.521, "step": 1182 }, { "epoch": 9.464, "grad_norm": 53.69174575805664, "learning_rate": 4.7e-05, "loss": 1.541, "step": 1183 }, { "epoch": 9.472, "grad_norm": 96.16242980957031, "learning_rate": 4.7040000000000004e-05, "loss": 1.8226, "step": 1184 }, { "epoch": 9.48, "grad_norm": 21.437862396240234, "learning_rate": 4.708e-05, "loss": 1.832, "step": 1185 }, { "epoch": 9.488, "grad_norm": 40.8399772644043, "learning_rate": 4.712e-05, "loss": 1.5405, "step": 1186 }, { "epoch": 9.496, "grad_norm": 27.879114151000977, "learning_rate": 4.716e-05, "loss": 1.8348, "step": 1187 }, { "epoch": 9.504, "grad_norm": 32.01569366455078, "learning_rate": 4.72e-05, "loss": 1.64, "step": 1188 }, { "epoch": 9.512, "grad_norm": 43.134700775146484, "learning_rate": 4.724e-05, "loss": 1.4915, "step": 1189 }, { "epoch": 9.52, "grad_norm": 33.96206283569336, "learning_rate": 4.728e-05, "loss": 1.5085, "step": 1190 }, { "epoch": 9.528, "grad_norm": 41.24824523925781, "learning_rate": 4.732e-05, "loss": 1.4996, "step": 1191 }, { "epoch": 9.536, "grad_norm": 56.9720344543457, "learning_rate": 4.736000000000001e-05, "loss": 2.0222, "step": 1192 }, { "epoch": 9.544, "grad_norm": 18.569921493530273, "learning_rate": 4.74e-05, "loss": 1.3489, "step": 1193 }, { "epoch": 9.552, "grad_norm": 43.127140045166016, "learning_rate": 4.744e-05, "loss": 1.6008, "step": 1194 }, { "epoch": 9.56, "grad_norm": 30.11831283569336, "learning_rate": 4.748e-05, "loss": 1.4034, "step": 1195 }, { "epoch": 9.568, "grad_norm": 69.2654800415039, "learning_rate": 4.7520000000000006e-05, "loss": 1.356, "step": 1196 }, { "epoch": 9.576, "grad_norm": 31.611732482910156, "learning_rate": 4.7560000000000005e-05, "loss": 1.5962, "step": 1197 }, { "epoch": 9.584, "grad_norm": 52.74797439575195, "learning_rate": 4.76e-05, "loss": 1.4137, "step": 1198 }, { "epoch": 9.592, "grad_norm": 39.05170822143555, "learning_rate": 4.7640000000000005e-05, "loss": 1.4454, "step": 1199 }, { "epoch": 9.6, "grad_norm": 44.48203659057617, "learning_rate": 4.7680000000000004e-05, "loss": 1.5449, "step": 1200 }, { "epoch": 9.608, "grad_norm": 29.699602127075195, "learning_rate": 4.7720000000000004e-05, "loss": 1.6057, "step": 1201 }, { "epoch": 9.616, "grad_norm": 56.64519119262695, "learning_rate": 4.7760000000000004e-05, "loss": 1.8139, "step": 1202 }, { "epoch": 9.624, "grad_norm": 48.84329605102539, "learning_rate": 4.78e-05, "loss": 3.3081, "step": 1203 }, { "epoch": 9.632, "grad_norm": 106.97257995605469, "learning_rate": 4.784e-05, "loss": 1.9253, "step": 1204 }, { "epoch": 9.64, "grad_norm": 62.19359588623047, "learning_rate": 4.788e-05, "loss": 2.0177, "step": 1205 }, { "epoch": 9.648, "grad_norm": 40.46112823486328, "learning_rate": 4.792e-05, "loss": 1.5999, "step": 1206 }, { "epoch": 9.656, "grad_norm": 41.773712158203125, "learning_rate": 4.796e-05, "loss": 1.5013, "step": 1207 }, { "epoch": 9.664, "grad_norm": 54.263275146484375, "learning_rate": 4.8e-05, "loss": 1.3729, "step": 1208 }, { "epoch": 9.672, "grad_norm": 27.390213012695312, "learning_rate": 4.804e-05, "loss": 1.624, "step": 1209 }, { "epoch": 9.68, "grad_norm": 57.39705276489258, "learning_rate": 4.808e-05, "loss": 1.8266, "step": 1210 }, { "epoch": 9.688, "grad_norm": 24.7160587310791, "learning_rate": 4.812000000000001e-05, "loss": 1.4465, "step": 1211 }, { "epoch": 9.696, "grad_norm": 37.20499038696289, "learning_rate": 4.816e-05, "loss": 1.6568, "step": 1212 }, { "epoch": 9.704, "grad_norm": 51.86496353149414, "learning_rate": 4.82e-05, "loss": 1.6648, "step": 1213 }, { "epoch": 9.712, "grad_norm": 29.552122116088867, "learning_rate": 4.824e-05, "loss": 1.3685, "step": 1214 }, { "epoch": 9.72, "grad_norm": 32.58909606933594, "learning_rate": 4.8280000000000005e-05, "loss": 2.4447, "step": 1215 }, { "epoch": 9.728, "grad_norm": 41.26041030883789, "learning_rate": 4.8320000000000005e-05, "loss": 1.3118, "step": 1216 }, { "epoch": 9.736, "grad_norm": 32.50422668457031, "learning_rate": 4.836e-05, "loss": 1.3687, "step": 1217 }, { "epoch": 9.744, "grad_norm": 22.50342559814453, "learning_rate": 4.8400000000000004e-05, "loss": 1.5302, "step": 1218 }, { "epoch": 9.752, "grad_norm": 63.95438766479492, "learning_rate": 4.8440000000000004e-05, "loss": 1.5651, "step": 1219 }, { "epoch": 9.76, "grad_norm": 35.62430191040039, "learning_rate": 4.8480000000000003e-05, "loss": 1.3556, "step": 1220 }, { "epoch": 9.768, "grad_norm": 40.942848205566406, "learning_rate": 4.852e-05, "loss": 1.6039, "step": 1221 }, { "epoch": 9.776, "grad_norm": 48.09309005737305, "learning_rate": 4.856e-05, "loss": 1.2994, "step": 1222 }, { "epoch": 9.784, "grad_norm": 29.940065383911133, "learning_rate": 4.86e-05, "loss": 1.5591, "step": 1223 }, { "epoch": 9.792, "grad_norm": 33.09096145629883, "learning_rate": 4.864e-05, "loss": 1.4519, "step": 1224 }, { "epoch": 9.8, "grad_norm": 43.94113540649414, "learning_rate": 4.868e-05, "loss": 1.9536, "step": 1225 }, { "epoch": 9.808, "grad_norm": 37.69444274902344, "learning_rate": 4.872000000000001e-05, "loss": 1.6117, "step": 1226 }, { "epoch": 9.816, "grad_norm": 123.5386962890625, "learning_rate": 4.876e-05, "loss": 1.9474, "step": 1227 }, { "epoch": 9.824, "grad_norm": 43.71195983886719, "learning_rate": 4.88e-05, "loss": 1.543, "step": 1228 }, { "epoch": 9.832, "grad_norm": 29.03174591064453, "learning_rate": 4.884e-05, "loss": 1.796, "step": 1229 }, { "epoch": 9.84, "grad_norm": 44.97372817993164, "learning_rate": 4.8880000000000006e-05, "loss": 2.1282, "step": 1230 }, { "epoch": 9.848, "grad_norm": 30.2236385345459, "learning_rate": 4.8920000000000006e-05, "loss": 1.1813, "step": 1231 }, { "epoch": 9.856, "grad_norm": 21.92142105102539, "learning_rate": 4.896e-05, "loss": 1.7537, "step": 1232 }, { "epoch": 9.864, "grad_norm": 27.57614517211914, "learning_rate": 4.9e-05, "loss": 1.2383, "step": 1233 }, { "epoch": 9.872, "grad_norm": 42.98395919799805, "learning_rate": 4.9040000000000005e-05, "loss": 3.5638, "step": 1234 }, { "epoch": 9.88, "grad_norm": 37.724124908447266, "learning_rate": 4.9080000000000004e-05, "loss": 1.3284, "step": 1235 }, { "epoch": 9.888, "grad_norm": 23.636199951171875, "learning_rate": 4.9120000000000004e-05, "loss": 1.5179, "step": 1236 }, { "epoch": 9.896, "grad_norm": 101.78038024902344, "learning_rate": 4.9160000000000004e-05, "loss": 1.2626, "step": 1237 }, { "epoch": 9.904, "grad_norm": 25.85527229309082, "learning_rate": 4.92e-05, "loss": 1.623, "step": 1238 }, { "epoch": 9.912, "grad_norm": 30.609296798706055, "learning_rate": 4.924e-05, "loss": 1.6481, "step": 1239 }, { "epoch": 9.92, "grad_norm": 28.288501739501953, "learning_rate": 4.928e-05, "loss": 1.6005, "step": 1240 }, { "epoch": 9.928, "grad_norm": 68.38937377929688, "learning_rate": 4.932e-05, "loss": 2.2851, "step": 1241 }, { "epoch": 9.936, "grad_norm": 32.27545166015625, "learning_rate": 4.936e-05, "loss": 1.6322, "step": 1242 }, { "epoch": 9.943999999999999, "grad_norm": 30.103002548217773, "learning_rate": 4.94e-05, "loss": 1.8002, "step": 1243 }, { "epoch": 9.952, "grad_norm": 30.314970016479492, "learning_rate": 4.944e-05, "loss": 1.2967, "step": 1244 }, { "epoch": 9.96, "grad_norm": 20.600303649902344, "learning_rate": 4.948000000000001e-05, "loss": 1.1822, "step": 1245 }, { "epoch": 9.968, "grad_norm": 141.90350341796875, "learning_rate": 4.952e-05, "loss": 1.5021, "step": 1246 }, { "epoch": 9.975999999999999, "grad_norm": 58.56770324707031, "learning_rate": 4.956e-05, "loss": 1.6398, "step": 1247 }, { "epoch": 9.984, "grad_norm": 67.4556884765625, "learning_rate": 4.96e-05, "loss": 2.0902, "step": 1248 }, { "epoch": 9.992, "grad_norm": 43.522796630859375, "learning_rate": 4.9640000000000006e-05, "loss": 1.454, "step": 1249 }, { "epoch": 10.0, "grad_norm": 24.418197631835938, "learning_rate": 4.9680000000000005e-05, "loss": 1.8264, "step": 1250 }, { "epoch": 10.0, "eval_loss": 1.7386655807495117, "eval_map": 0.1801, "eval_map_50": 0.3655, "eval_map_75": 0.125, "eval_map_Coverall": 0.4167, "eval_map_Face_Shield": 0.1148, "eval_map_Gloves": 0.0943, "eval_map_Goggles": 0.0048, "eval_map_Mask": 0.2701, "eval_map_large": 0.1964, "eval_map_medium": 0.137, "eval_map_small": -1.0, "eval_mar_1": 0.162, "eval_mar_10": 0.3153, "eval_mar_100": 0.3335, "eval_mar_100_Coverall": 0.5911, "eval_mar_100_Face_Shield": 0.3176, "eval_mar_100_Gloves": 0.3098, "eval_mar_100_Goggles": 0.0125, "eval_mar_100_Mask": 0.4365, "eval_mar_large": 0.3438, "eval_mar_medium": 0.2324, "eval_mar_small": -1.0, "eval_runtime": 4.7545, "eval_samples_per_second": 6.099, "eval_steps_per_second": 0.421, "step": 1250 }, { "epoch": 10.008, "grad_norm": 33.580726623535156, "learning_rate": 4.972e-05, "loss": 1.1192, "step": 1251 }, { "epoch": 10.016, "grad_norm": 22.320112228393555, "learning_rate": 4.976e-05, "loss": 1.6365, "step": 1252 }, { "epoch": 10.024, "grad_norm": 20.382741928100586, "learning_rate": 4.9800000000000004e-05, "loss": 1.938, "step": 1253 }, { "epoch": 10.032, "grad_norm": 25.298784255981445, "learning_rate": 4.9840000000000004e-05, "loss": 1.5664, "step": 1254 }, { "epoch": 10.04, "grad_norm": 25.01670265197754, "learning_rate": 4.9880000000000004e-05, "loss": 1.9124, "step": 1255 }, { "epoch": 10.048, "grad_norm": 68.2153549194336, "learning_rate": 4.992e-05, "loss": 1.5522, "step": 1256 }, { "epoch": 10.056, "grad_norm": 44.231685638427734, "learning_rate": 4.996e-05, "loss": 1.5927, "step": 1257 }, { "epoch": 10.064, "grad_norm": 34.992618560791016, "learning_rate": 5e-05, "loss": 1.64, "step": 1258 }, { "epoch": 10.072, "grad_norm": 17.55783462524414, "learning_rate": 4.999555555555556e-05, "loss": 1.6821, "step": 1259 }, { "epoch": 10.08, "grad_norm": 23.284067153930664, "learning_rate": 4.999111111111111e-05, "loss": 1.5104, "step": 1260 }, { "epoch": 10.088, "grad_norm": 34.237022399902344, "learning_rate": 4.9986666666666674e-05, "loss": 1.6583, "step": 1261 }, { "epoch": 10.096, "grad_norm": 46.896915435791016, "learning_rate": 4.998222222222222e-05, "loss": 1.196, "step": 1262 }, { "epoch": 10.104, "grad_norm": 22.762893676757812, "learning_rate": 4.997777777777778e-05, "loss": 1.5992, "step": 1263 }, { "epoch": 10.112, "grad_norm": 48.10911178588867, "learning_rate": 4.997333333333333e-05, "loss": 1.7643, "step": 1264 }, { "epoch": 10.12, "grad_norm": 32.99292755126953, "learning_rate": 4.996888888888889e-05, "loss": 1.3, "step": 1265 }, { "epoch": 10.128, "grad_norm": 47.33378982543945, "learning_rate": 4.996444444444445e-05, "loss": 1.494, "step": 1266 }, { "epoch": 10.136, "grad_norm": 19.524776458740234, "learning_rate": 4.996e-05, "loss": 1.4628, "step": 1267 }, { "epoch": 10.144, "grad_norm": 28.40445327758789, "learning_rate": 4.995555555555556e-05, "loss": 1.7227, "step": 1268 }, { "epoch": 10.152, "grad_norm": 29.854604721069336, "learning_rate": 4.995111111111111e-05, "loss": 1.6894, "step": 1269 }, { "epoch": 10.16, "grad_norm": 45.030982971191406, "learning_rate": 4.994666666666667e-05, "loss": 1.1723, "step": 1270 }, { "epoch": 10.168, "grad_norm": 23.70360565185547, "learning_rate": 4.994222222222222e-05, "loss": 1.491, "step": 1271 }, { "epoch": 10.176, "grad_norm": 26.239646911621094, "learning_rate": 4.993777777777778e-05, "loss": 1.9073, "step": 1272 }, { "epoch": 10.184, "grad_norm": 31.425357818603516, "learning_rate": 4.993333333333334e-05, "loss": 1.478, "step": 1273 }, { "epoch": 10.192, "grad_norm": 35.7530632019043, "learning_rate": 4.9928888888888893e-05, "loss": 1.5514, "step": 1274 }, { "epoch": 10.2, "grad_norm": 33.40226745605469, "learning_rate": 4.992444444444445e-05, "loss": 1.9511, "step": 1275 }, { "epoch": 10.208, "grad_norm": 100.28253936767578, "learning_rate": 4.992e-05, "loss": 1.2769, "step": 1276 }, { "epoch": 10.216, "grad_norm": 31.764633178710938, "learning_rate": 4.991555555555556e-05, "loss": 3.6267, "step": 1277 }, { "epoch": 10.224, "grad_norm": 22.390247344970703, "learning_rate": 4.991111111111111e-05, "loss": 1.4568, "step": 1278 }, { "epoch": 10.232, "grad_norm": 31.510295867919922, "learning_rate": 4.990666666666667e-05, "loss": 1.5487, "step": 1279 }, { "epoch": 10.24, "grad_norm": 47.11511993408203, "learning_rate": 4.990222222222222e-05, "loss": 1.4541, "step": 1280 }, { "epoch": 10.248, "grad_norm": 36.48651123046875, "learning_rate": 4.9897777777777784e-05, "loss": 2.8792, "step": 1281 }, { "epoch": 10.256, "grad_norm": 53.26156234741211, "learning_rate": 4.989333333333334e-05, "loss": 1.5749, "step": 1282 }, { "epoch": 10.264, "grad_norm": 42.30354309082031, "learning_rate": 4.9888888888888894e-05, "loss": 1.2444, "step": 1283 }, { "epoch": 10.272, "grad_norm": 41.91629409790039, "learning_rate": 4.988444444444444e-05, "loss": 1.1633, "step": 1284 }, { "epoch": 10.28, "grad_norm": 38.94123840332031, "learning_rate": 4.9880000000000004e-05, "loss": 1.7064, "step": 1285 }, { "epoch": 10.288, "grad_norm": 50.14278793334961, "learning_rate": 4.987555555555556e-05, "loss": 1.5416, "step": 1286 }, { "epoch": 10.296, "grad_norm": 70.87529754638672, "learning_rate": 4.987111111111111e-05, "loss": 1.3855, "step": 1287 }, { "epoch": 10.304, "grad_norm": 39.26066207885742, "learning_rate": 4.986666666666667e-05, "loss": 1.4441, "step": 1288 }, { "epoch": 10.312, "grad_norm": 38.820125579833984, "learning_rate": 4.986222222222223e-05, "loss": 1.297, "step": 1289 }, { "epoch": 10.32, "grad_norm": 40.65828323364258, "learning_rate": 4.985777777777778e-05, "loss": 1.2051, "step": 1290 }, { "epoch": 10.328, "grad_norm": 29.173738479614258, "learning_rate": 4.985333333333333e-05, "loss": 1.1325, "step": 1291 }, { "epoch": 10.336, "grad_norm": 40.8956413269043, "learning_rate": 4.984888888888889e-05, "loss": 1.6638, "step": 1292 }, { "epoch": 10.344, "grad_norm": 32.254825592041016, "learning_rate": 4.984444444444445e-05, "loss": 1.348, "step": 1293 }, { "epoch": 10.352, "grad_norm": 41.33233642578125, "learning_rate": 4.9840000000000004e-05, "loss": 1.8394, "step": 1294 }, { "epoch": 10.36, "grad_norm": 36.09782028198242, "learning_rate": 4.983555555555556e-05, "loss": 1.5664, "step": 1295 }, { "epoch": 10.368, "grad_norm": 16.277772903442383, "learning_rate": 4.9831111111111114e-05, "loss": 1.5022, "step": 1296 }, { "epoch": 10.376, "grad_norm": 58.82730484008789, "learning_rate": 4.982666666666667e-05, "loss": 2.4237, "step": 1297 }, { "epoch": 10.384, "grad_norm": 53.92454528808594, "learning_rate": 4.982222222222222e-05, "loss": 2.3549, "step": 1298 }, { "epoch": 10.392, "grad_norm": 37.45758056640625, "learning_rate": 4.981777777777778e-05, "loss": 1.5444, "step": 1299 }, { "epoch": 10.4, "grad_norm": 27.09491729736328, "learning_rate": 4.981333333333333e-05, "loss": 1.5767, "step": 1300 }, { "epoch": 10.408, "grad_norm": 25.276947021484375, "learning_rate": 4.9808888888888895e-05, "loss": 1.1226, "step": 1301 }, { "epoch": 10.416, "grad_norm": 33.200225830078125, "learning_rate": 4.980444444444445e-05, "loss": 1.481, "step": 1302 }, { "epoch": 10.424, "grad_norm": 27.68879508972168, "learning_rate": 4.9800000000000004e-05, "loss": 1.6887, "step": 1303 }, { "epoch": 10.432, "grad_norm": 29.118749618530273, "learning_rate": 4.979555555555556e-05, "loss": 1.2439, "step": 1304 }, { "epoch": 10.44, "grad_norm": 111.11715698242188, "learning_rate": 4.9791111111111114e-05, "loss": 1.6615, "step": 1305 }, { "epoch": 10.448, "grad_norm": 23.174509048461914, "learning_rate": 4.978666666666667e-05, "loss": 1.4261, "step": 1306 }, { "epoch": 10.456, "grad_norm": 264.38134765625, "learning_rate": 4.9782222222222224e-05, "loss": 1.7197, "step": 1307 }, { "epoch": 10.464, "grad_norm": 127.03042602539062, "learning_rate": 4.977777777777778e-05, "loss": 1.6366, "step": 1308 }, { "epoch": 10.472, "grad_norm": 25.23630714416504, "learning_rate": 4.977333333333334e-05, "loss": 1.1366, "step": 1309 }, { "epoch": 10.48, "grad_norm": 39.13581085205078, "learning_rate": 4.9768888888888895e-05, "loss": 1.5173, "step": 1310 }, { "epoch": 10.488, "grad_norm": 62.89023208618164, "learning_rate": 4.976444444444445e-05, "loss": 1.5664, "step": 1311 }, { "epoch": 10.496, "grad_norm": 50.31435775756836, "learning_rate": 4.976e-05, "loss": 1.759, "step": 1312 }, { "epoch": 10.504, "grad_norm": 54.71525955200195, "learning_rate": 4.975555555555555e-05, "loss": 1.7848, "step": 1313 }, { "epoch": 10.512, "grad_norm": 31.72012710571289, "learning_rate": 4.9751111111111114e-05, "loss": 1.6763, "step": 1314 }, { "epoch": 10.52, "grad_norm": 40.486366271972656, "learning_rate": 4.974666666666667e-05, "loss": 1.3895, "step": 1315 }, { "epoch": 10.528, "grad_norm": 40.663570404052734, "learning_rate": 4.9742222222222224e-05, "loss": 1.5716, "step": 1316 }, { "epoch": 10.536, "grad_norm": 47.33025360107422, "learning_rate": 4.973777777777778e-05, "loss": 1.5441, "step": 1317 }, { "epoch": 10.544, "grad_norm": 25.02393341064453, "learning_rate": 4.973333333333334e-05, "loss": 0.9979, "step": 1318 }, { "epoch": 10.552, "grad_norm": 34.39772033691406, "learning_rate": 4.972888888888889e-05, "loss": 1.5732, "step": 1319 }, { "epoch": 10.56, "grad_norm": 25.77187728881836, "learning_rate": 4.9724444444444443e-05, "loss": 1.9677, "step": 1320 }, { "epoch": 10.568, "grad_norm": 49.58182907104492, "learning_rate": 4.972e-05, "loss": 1.6834, "step": 1321 }, { "epoch": 10.576, "grad_norm": 39.52227020263672, "learning_rate": 4.971555555555556e-05, "loss": 1.802, "step": 1322 }, { "epoch": 10.584, "grad_norm": 62.03678512573242, "learning_rate": 4.9711111111111115e-05, "loss": 1.493, "step": 1323 }, { "epoch": 10.592, "grad_norm": 44.59410858154297, "learning_rate": 4.970666666666667e-05, "loss": 1.6889, "step": 1324 }, { "epoch": 10.6, "grad_norm": 56.34920120239258, "learning_rate": 4.9702222222222224e-05, "loss": 1.3994, "step": 1325 }, { "epoch": 10.608, "grad_norm": 26.678407669067383, "learning_rate": 4.969777777777778e-05, "loss": 2.0719, "step": 1326 }, { "epoch": 10.616, "grad_norm": 49.11384201049805, "learning_rate": 4.9693333333333334e-05, "loss": 1.6064, "step": 1327 }, { "epoch": 10.624, "grad_norm": 18.755313873291016, "learning_rate": 4.968888888888889e-05, "loss": 1.5703, "step": 1328 }, { "epoch": 10.632, "grad_norm": 35.290016174316406, "learning_rate": 4.9684444444444444e-05, "loss": 1.2514, "step": 1329 }, { "epoch": 10.64, "grad_norm": 24.054235458374023, "learning_rate": 4.9680000000000005e-05, "loss": 1.8316, "step": 1330 }, { "epoch": 10.648, "grad_norm": 43.584373474121094, "learning_rate": 4.967555555555556e-05, "loss": 1.2972, "step": 1331 }, { "epoch": 10.656, "grad_norm": 58.89356231689453, "learning_rate": 4.9671111111111115e-05, "loss": 1.8816, "step": 1332 }, { "epoch": 10.664, "grad_norm": 39.62871170043945, "learning_rate": 4.966666666666667e-05, "loss": 1.5493, "step": 1333 }, { "epoch": 10.672, "grad_norm": 27.893404006958008, "learning_rate": 4.9662222222222225e-05, "loss": 1.387, "step": 1334 }, { "epoch": 10.68, "grad_norm": 72.05806732177734, "learning_rate": 4.965777777777778e-05, "loss": 1.3492, "step": 1335 }, { "epoch": 10.688, "grad_norm": 35.09833526611328, "learning_rate": 4.9653333333333335e-05, "loss": 1.4479, "step": 1336 }, { "epoch": 10.696, "grad_norm": 25.47539520263672, "learning_rate": 4.964888888888889e-05, "loss": 1.5976, "step": 1337 }, { "epoch": 10.704, "grad_norm": 46.543052673339844, "learning_rate": 4.964444444444445e-05, "loss": 1.6031, "step": 1338 }, { "epoch": 10.712, "grad_norm": 62.596885681152344, "learning_rate": 4.9640000000000006e-05, "loss": 1.7925, "step": 1339 }, { "epoch": 10.72, "grad_norm": 42.07998275756836, "learning_rate": 4.963555555555556e-05, "loss": 1.4382, "step": 1340 }, { "epoch": 10.728, "grad_norm": 28.468616485595703, "learning_rate": 4.963111111111111e-05, "loss": 1.7825, "step": 1341 }, { "epoch": 10.736, "grad_norm": 25.815275192260742, "learning_rate": 4.962666666666667e-05, "loss": 2.2731, "step": 1342 }, { "epoch": 10.744, "grad_norm": 72.79280090332031, "learning_rate": 4.9622222222222225e-05, "loss": 1.3883, "step": 1343 }, { "epoch": 10.752, "grad_norm": 42.36555480957031, "learning_rate": 4.961777777777778e-05, "loss": 1.8294, "step": 1344 }, { "epoch": 10.76, "grad_norm": 33.31477355957031, "learning_rate": 4.9613333333333335e-05, "loss": 1.7336, "step": 1345 }, { "epoch": 10.768, "grad_norm": 43.944915771484375, "learning_rate": 4.9608888888888897e-05, "loss": 1.9199, "step": 1346 }, { "epoch": 10.776, "grad_norm": 388.24969482421875, "learning_rate": 4.9604444444444445e-05, "loss": 2.0762, "step": 1347 }, { "epoch": 10.784, "grad_norm": 116.34178924560547, "learning_rate": 4.96e-05, "loss": 1.3968, "step": 1348 }, { "epoch": 10.792, "grad_norm": 34.07395553588867, "learning_rate": 4.9595555555555554e-05, "loss": 2.0318, "step": 1349 }, { "epoch": 10.8, "grad_norm": 34.082767486572266, "learning_rate": 4.9591111111111116e-05, "loss": 1.5977, "step": 1350 }, { "epoch": 10.808, "grad_norm": 27.3752384185791, "learning_rate": 4.958666666666667e-05, "loss": 1.402, "step": 1351 }, { "epoch": 10.816, "grad_norm": 31.310470581054688, "learning_rate": 4.9582222222222226e-05, "loss": 1.9825, "step": 1352 }, { "epoch": 10.824, "grad_norm": 91.09359741210938, "learning_rate": 4.957777777777778e-05, "loss": 2.0774, "step": 1353 }, { "epoch": 10.832, "grad_norm": 46.14306640625, "learning_rate": 4.9573333333333335e-05, "loss": 1.8874, "step": 1354 }, { "epoch": 10.84, "grad_norm": 50.06705093383789, "learning_rate": 4.956888888888889e-05, "loss": 1.416, "step": 1355 }, { "epoch": 10.848, "grad_norm": 50.9637336730957, "learning_rate": 4.9564444444444445e-05, "loss": 1.7722, "step": 1356 }, { "epoch": 10.856, "grad_norm": 45.548797607421875, "learning_rate": 4.956e-05, "loss": 1.9068, "step": 1357 }, { "epoch": 10.864, "grad_norm": 70.20760345458984, "learning_rate": 4.955555555555556e-05, "loss": 1.4303, "step": 1358 }, { "epoch": 10.872, "grad_norm": 31.40032386779785, "learning_rate": 4.9551111111111116e-05, "loss": 1.4605, "step": 1359 }, { "epoch": 10.88, "grad_norm": 54.770538330078125, "learning_rate": 4.954666666666667e-05, "loss": 1.5974, "step": 1360 }, { "epoch": 10.888, "grad_norm": 57.09111785888672, "learning_rate": 4.9542222222222226e-05, "loss": 1.4041, "step": 1361 }, { "epoch": 10.896, "grad_norm": 37.21052932739258, "learning_rate": 4.9537777777777774e-05, "loss": 1.2404, "step": 1362 }, { "epoch": 10.904, "grad_norm": 60.279541015625, "learning_rate": 4.9533333333333336e-05, "loss": 1.3621, "step": 1363 }, { "epoch": 10.912, "grad_norm": 30.89423179626465, "learning_rate": 4.952888888888889e-05, "loss": 2.0414, "step": 1364 }, { "epoch": 10.92, "grad_norm": 26.670549392700195, "learning_rate": 4.9524444444444445e-05, "loss": 1.5158, "step": 1365 }, { "epoch": 10.928, "grad_norm": 37.81254959106445, "learning_rate": 4.952e-05, "loss": 1.3268, "step": 1366 }, { "epoch": 10.936, "grad_norm": 21.698352813720703, "learning_rate": 4.951555555555556e-05, "loss": 1.445, "step": 1367 }, { "epoch": 10.943999999999999, "grad_norm": 140.5408935546875, "learning_rate": 4.951111111111112e-05, "loss": 1.7808, "step": 1368 }, { "epoch": 10.952, "grad_norm": 26.415882110595703, "learning_rate": 4.9506666666666665e-05, "loss": 1.6571, "step": 1369 }, { "epoch": 10.96, "grad_norm": 38.417755126953125, "learning_rate": 4.950222222222222e-05, "loss": 1.6723, "step": 1370 }, { "epoch": 10.968, "grad_norm": 36.15163803100586, "learning_rate": 4.949777777777778e-05, "loss": 1.5185, "step": 1371 }, { "epoch": 10.975999999999999, "grad_norm": 19.320058822631836, "learning_rate": 4.9493333333333336e-05, "loss": 1.874, "step": 1372 }, { "epoch": 10.984, "grad_norm": 37.3697395324707, "learning_rate": 4.948888888888889e-05, "loss": 1.5049, "step": 1373 }, { "epoch": 10.992, "grad_norm": 24.88294219970703, "learning_rate": 4.9484444444444446e-05, "loss": 2.6776, "step": 1374 }, { "epoch": 11.0, "grad_norm": 59.65004348754883, "learning_rate": 4.948000000000001e-05, "loss": 1.5921, "step": 1375 }, { "epoch": 11.0, "eval_loss": 1.6929633617401123, "eval_map": 0.1873, "eval_map_50": 0.3889, "eval_map_75": 0.1441, "eval_map_Coverall": 0.4131, "eval_map_Face_Shield": 0.1646, "eval_map_Gloves": 0.1198, "eval_map_Goggles": 0.0132, "eval_map_Mask": 0.2256, "eval_map_large": 0.2179, "eval_map_medium": 0.125, "eval_map_small": -1.0, "eval_mar_1": 0.1827, "eval_mar_10": 0.3562, "eval_mar_100": 0.3717, "eval_mar_100_Coverall": 0.6556, "eval_mar_100_Face_Shield": 0.5294, "eval_mar_100_Gloves": 0.2656, "eval_mar_100_Goggles": 0.0656, "eval_mar_100_Mask": 0.3423, "eval_mar_large": 0.4254, "eval_mar_medium": 0.2492, "eval_mar_small": -1.0, "eval_runtime": 3.8336, "eval_samples_per_second": 7.565, "eval_steps_per_second": 0.522, "step": 1375 }, { "epoch": 11.008, "grad_norm": 26.689912796020508, "learning_rate": 4.9475555555555555e-05, "loss": 1.4134, "step": 1376 }, { "epoch": 11.016, "grad_norm": 26.279891967773438, "learning_rate": 4.947111111111111e-05, "loss": 1.6263, "step": 1377 }, { "epoch": 11.024, "grad_norm": 34.9584846496582, "learning_rate": 4.9466666666666665e-05, "loss": 1.8097, "step": 1378 }, { "epoch": 11.032, "grad_norm": 16.348711013793945, "learning_rate": 4.946222222222223e-05, "loss": 2.0179, "step": 1379 }, { "epoch": 11.04, "grad_norm": 25.55263328552246, "learning_rate": 4.945777777777778e-05, "loss": 1.5925, "step": 1380 }, { "epoch": 11.048, "grad_norm": 28.791641235351562, "learning_rate": 4.9453333333333336e-05, "loss": 2.7403, "step": 1381 }, { "epoch": 11.056, "grad_norm": 23.191917419433594, "learning_rate": 4.944888888888889e-05, "loss": 1.9412, "step": 1382 }, { "epoch": 11.064, "grad_norm": 19.427433013916016, "learning_rate": 4.9444444444444446e-05, "loss": 1.268, "step": 1383 }, { "epoch": 11.072, "grad_norm": 36.292545318603516, "learning_rate": 4.944e-05, "loss": 1.5338, "step": 1384 }, { "epoch": 11.08, "grad_norm": 38.26237869262695, "learning_rate": 4.9435555555555556e-05, "loss": 1.6762, "step": 1385 }, { "epoch": 11.088, "grad_norm": 57.90266799926758, "learning_rate": 4.943111111111111e-05, "loss": 1.7387, "step": 1386 }, { "epoch": 11.096, "grad_norm": 50.831825256347656, "learning_rate": 4.942666666666667e-05, "loss": 1.386, "step": 1387 }, { "epoch": 11.104, "grad_norm": 34.3310661315918, "learning_rate": 4.942222222222223e-05, "loss": 1.5689, "step": 1388 }, { "epoch": 11.112, "grad_norm": 59.58251953125, "learning_rate": 4.941777777777778e-05, "loss": 2.4074, "step": 1389 }, { "epoch": 11.12, "grad_norm": 49.289451599121094, "learning_rate": 4.941333333333334e-05, "loss": 1.3481, "step": 1390 }, { "epoch": 11.128, "grad_norm": 92.61341857910156, "learning_rate": 4.940888888888889e-05, "loss": 1.5867, "step": 1391 }, { "epoch": 11.136, "grad_norm": 74.56541442871094, "learning_rate": 4.9404444444444447e-05, "loss": 1.9002, "step": 1392 }, { "epoch": 11.144, "grad_norm": 31.562349319458008, "learning_rate": 4.94e-05, "loss": 2.0417, "step": 1393 }, { "epoch": 11.152, "grad_norm": 28.38924789428711, "learning_rate": 4.9395555555555556e-05, "loss": 1.4566, "step": 1394 }, { "epoch": 11.16, "grad_norm": 20.465435028076172, "learning_rate": 4.939111111111112e-05, "loss": 1.9853, "step": 1395 }, { "epoch": 11.168, "grad_norm": 57.24822998046875, "learning_rate": 4.938666666666667e-05, "loss": 1.675, "step": 1396 }, { "epoch": 11.176, "grad_norm": 64.10393524169922, "learning_rate": 4.938222222222223e-05, "loss": 1.9757, "step": 1397 }, { "epoch": 11.184, "grad_norm": 31.240148544311523, "learning_rate": 4.9377777777777776e-05, "loss": 1.7686, "step": 1398 }, { "epoch": 11.192, "grad_norm": 32.14802551269531, "learning_rate": 4.937333333333334e-05, "loss": 1.6102, "step": 1399 }, { "epoch": 11.2, "grad_norm": 39.54681396484375, "learning_rate": 4.936888888888889e-05, "loss": 1.5465, "step": 1400 }, { "epoch": 11.208, "grad_norm": 113.53701782226562, "learning_rate": 4.936444444444445e-05, "loss": 1.6051, "step": 1401 }, { "epoch": 11.216, "grad_norm": 21.681610107421875, "learning_rate": 4.936e-05, "loss": 1.631, "step": 1402 }, { "epoch": 11.224, "grad_norm": 109.84596252441406, "learning_rate": 4.935555555555556e-05, "loss": 1.7086, "step": 1403 }, { "epoch": 11.232, "grad_norm": 20.865507125854492, "learning_rate": 4.935111111111111e-05, "loss": 1.1422, "step": 1404 }, { "epoch": 11.24, "grad_norm": 29.93189811706543, "learning_rate": 4.9346666666666666e-05, "loss": 1.5511, "step": 1405 }, { "epoch": 11.248, "grad_norm": 38.60445022583008, "learning_rate": 4.934222222222222e-05, "loss": 1.2309, "step": 1406 }, { "epoch": 11.256, "grad_norm": 39.400062561035156, "learning_rate": 4.933777777777778e-05, "loss": 1.5637, "step": 1407 }, { "epoch": 11.264, "grad_norm": 22.66122817993164, "learning_rate": 4.933333333333334e-05, "loss": 1.005, "step": 1408 }, { "epoch": 11.272, "grad_norm": 39.64421081542969, "learning_rate": 4.932888888888889e-05, "loss": 1.3404, "step": 1409 }, { "epoch": 11.28, "grad_norm": 218.9565887451172, "learning_rate": 4.932444444444445e-05, "loss": 1.6771, "step": 1410 }, { "epoch": 11.288, "grad_norm": 71.77190399169922, "learning_rate": 4.932e-05, "loss": 1.5347, "step": 1411 }, { "epoch": 11.296, "grad_norm": 32.031925201416016, "learning_rate": 4.931555555555556e-05, "loss": 1.7614, "step": 1412 }, { "epoch": 11.304, "grad_norm": 30.500656127929688, "learning_rate": 4.931111111111111e-05, "loss": 1.29, "step": 1413 }, { "epoch": 11.312, "grad_norm": 33.24708557128906, "learning_rate": 4.930666666666667e-05, "loss": 1.4861, "step": 1414 }, { "epoch": 11.32, "grad_norm": 33.884315490722656, "learning_rate": 4.930222222222222e-05, "loss": 1.7301, "step": 1415 }, { "epoch": 11.328, "grad_norm": 27.668550491333008, "learning_rate": 4.929777777777778e-05, "loss": 3.4324, "step": 1416 }, { "epoch": 11.336, "grad_norm": 42.49685287475586, "learning_rate": 4.929333333333334e-05, "loss": 1.7888, "step": 1417 }, { "epoch": 11.344, "grad_norm": 29.719085693359375, "learning_rate": 4.928888888888889e-05, "loss": 1.6597, "step": 1418 }, { "epoch": 11.352, "grad_norm": 37.3734245300293, "learning_rate": 4.928444444444444e-05, "loss": 1.5023, "step": 1419 }, { "epoch": 11.36, "grad_norm": 793.0416259765625, "learning_rate": 4.928e-05, "loss": 1.688, "step": 1420 }, { "epoch": 11.368, "grad_norm": 34.83229446411133, "learning_rate": 4.927555555555556e-05, "loss": 1.3337, "step": 1421 }, { "epoch": 11.376, "grad_norm": 119.97488403320312, "learning_rate": 4.927111111111111e-05, "loss": 1.3841, "step": 1422 }, { "epoch": 11.384, "grad_norm": 45.622432708740234, "learning_rate": 4.926666666666667e-05, "loss": 1.3304, "step": 1423 }, { "epoch": 11.392, "grad_norm": 64.20079040527344, "learning_rate": 4.926222222222223e-05, "loss": 1.709, "step": 1424 }, { "epoch": 11.4, "grad_norm": 19.690824508666992, "learning_rate": 4.9257777777777784e-05, "loss": 1.5356, "step": 1425 }, { "epoch": 11.408, "grad_norm": 30.513853073120117, "learning_rate": 4.925333333333333e-05, "loss": 2.295, "step": 1426 }, { "epoch": 11.416, "grad_norm": 45.38603973388672, "learning_rate": 4.9248888888888886e-05, "loss": 1.7194, "step": 1427 }, { "epoch": 11.424, "grad_norm": 61.4138069152832, "learning_rate": 4.924444444444445e-05, "loss": 1.7929, "step": 1428 }, { "epoch": 11.432, "grad_norm": 27.13850975036621, "learning_rate": 4.924e-05, "loss": 1.5793, "step": 1429 }, { "epoch": 11.44, "grad_norm": 34.27902603149414, "learning_rate": 4.923555555555556e-05, "loss": 1.6326, "step": 1430 }, { "epoch": 11.448, "grad_norm": 33.931434631347656, "learning_rate": 4.923111111111111e-05, "loss": 1.5849, "step": 1431 }, { "epoch": 11.456, "grad_norm": 74.11778259277344, "learning_rate": 4.9226666666666674e-05, "loss": 1.3141, "step": 1432 }, { "epoch": 11.464, "grad_norm": 42.865745544433594, "learning_rate": 4.922222222222222e-05, "loss": 1.7968, "step": 1433 }, { "epoch": 11.472, "grad_norm": 266.4754943847656, "learning_rate": 4.921777777777778e-05, "loss": 1.4819, "step": 1434 }, { "epoch": 11.48, "grad_norm": 27.437227249145508, "learning_rate": 4.921333333333333e-05, "loss": 2.9716, "step": 1435 }, { "epoch": 11.488, "grad_norm": 40.840911865234375, "learning_rate": 4.9208888888888894e-05, "loss": 1.9045, "step": 1436 }, { "epoch": 11.496, "grad_norm": 42.123600006103516, "learning_rate": 4.920444444444445e-05, "loss": 1.9458, "step": 1437 }, { "epoch": 11.504, "grad_norm": 31.689584732055664, "learning_rate": 4.92e-05, "loss": 1.789, "step": 1438 }, { "epoch": 11.512, "grad_norm": 30.345455169677734, "learning_rate": 4.919555555555556e-05, "loss": 1.8535, "step": 1439 }, { "epoch": 11.52, "grad_norm": 37.21958541870117, "learning_rate": 4.919111111111111e-05, "loss": 1.5827, "step": 1440 }, { "epoch": 11.528, "grad_norm": 69.69595336914062, "learning_rate": 4.918666666666667e-05, "loss": 1.9772, "step": 1441 }, { "epoch": 11.536, "grad_norm": 51.394229888916016, "learning_rate": 4.918222222222222e-05, "loss": 1.7135, "step": 1442 }, { "epoch": 11.544, "grad_norm": 43.67301940917969, "learning_rate": 4.917777777777778e-05, "loss": 1.7452, "step": 1443 }, { "epoch": 11.552, "grad_norm": 48.24986267089844, "learning_rate": 4.917333333333334e-05, "loss": 1.5293, "step": 1444 }, { "epoch": 11.56, "grad_norm": 55.6241340637207, "learning_rate": 4.9168888888888894e-05, "loss": 2.1843, "step": 1445 }, { "epoch": 11.568, "grad_norm": 22.503673553466797, "learning_rate": 4.916444444444445e-05, "loss": 1.4181, "step": 1446 }, { "epoch": 11.576, "grad_norm": 30.86716651916504, "learning_rate": 4.9160000000000004e-05, "loss": 1.3348, "step": 1447 }, { "epoch": 11.584, "grad_norm": 26.965063095092773, "learning_rate": 4.915555555555556e-05, "loss": 1.4145, "step": 1448 }, { "epoch": 11.592, "grad_norm": 43.55568313598633, "learning_rate": 4.915111111111111e-05, "loss": 1.1554, "step": 1449 }, { "epoch": 11.6, "grad_norm": 23.724899291992188, "learning_rate": 4.914666666666667e-05, "loss": 1.8341, "step": 1450 }, { "epoch": 11.608, "grad_norm": 35.20038604736328, "learning_rate": 4.914222222222222e-05, "loss": 1.3221, "step": 1451 }, { "epoch": 11.616, "grad_norm": 38.47380828857422, "learning_rate": 4.9137777777777785e-05, "loss": 1.3046, "step": 1452 }, { "epoch": 11.624, "grad_norm": 22.81513214111328, "learning_rate": 4.913333333333334e-05, "loss": 1.7945, "step": 1453 }, { "epoch": 11.632, "grad_norm": 17.575056076049805, "learning_rate": 4.912888888888889e-05, "loss": 1.3125, "step": 1454 }, { "epoch": 11.64, "grad_norm": 256.7516174316406, "learning_rate": 4.912444444444444e-05, "loss": 1.8028, "step": 1455 }, { "epoch": 11.648, "grad_norm": 26.15013885498047, "learning_rate": 4.9120000000000004e-05, "loss": 1.6956, "step": 1456 }, { "epoch": 11.656, "grad_norm": 42.570396423339844, "learning_rate": 4.911555555555556e-05, "loss": 1.415, "step": 1457 }, { "epoch": 11.664, "grad_norm": 63.05826187133789, "learning_rate": 4.9111111111111114e-05, "loss": 1.5179, "step": 1458 }, { "epoch": 11.672, "grad_norm": 38.39253616333008, "learning_rate": 4.910666666666667e-05, "loss": 1.632, "step": 1459 }, { "epoch": 11.68, "grad_norm": 56.52573776245117, "learning_rate": 4.910222222222223e-05, "loss": 1.8277, "step": 1460 }, { "epoch": 11.688, "grad_norm": 49.95197296142578, "learning_rate": 4.909777777777778e-05, "loss": 1.7677, "step": 1461 }, { "epoch": 11.696, "grad_norm": 25.05290985107422, "learning_rate": 4.909333333333333e-05, "loss": 1.7937, "step": 1462 }, { "epoch": 11.704, "grad_norm": 47.572479248046875, "learning_rate": 4.908888888888889e-05, "loss": 1.7727, "step": 1463 }, { "epoch": 11.712, "grad_norm": 32.81813430786133, "learning_rate": 4.908444444444445e-05, "loss": 1.3513, "step": 1464 }, { "epoch": 11.72, "grad_norm": 33.31889343261719, "learning_rate": 4.9080000000000004e-05, "loss": 1.1036, "step": 1465 }, { "epoch": 11.728, "grad_norm": 32.639678955078125, "learning_rate": 4.907555555555556e-05, "loss": 1.3299, "step": 1466 }, { "epoch": 11.736, "grad_norm": 54.97154235839844, "learning_rate": 4.9071111111111114e-05, "loss": 1.2827, "step": 1467 }, { "epoch": 11.744, "grad_norm": 24.253093719482422, "learning_rate": 4.906666666666667e-05, "loss": 1.5583, "step": 1468 }, { "epoch": 11.752, "grad_norm": 24.852598190307617, "learning_rate": 4.9062222222222224e-05, "loss": 1.2227, "step": 1469 }, { "epoch": 11.76, "grad_norm": 73.27813720703125, "learning_rate": 4.905777777777778e-05, "loss": 1.3557, "step": 1470 }, { "epoch": 11.768, "grad_norm": 36.742340087890625, "learning_rate": 4.9053333333333333e-05, "loss": 1.7652, "step": 1471 }, { "epoch": 11.776, "grad_norm": 34.99496078491211, "learning_rate": 4.904888888888889e-05, "loss": 1.8418, "step": 1472 }, { "epoch": 11.784, "grad_norm": 35.6380500793457, "learning_rate": 4.904444444444445e-05, "loss": 1.3192, "step": 1473 }, { "epoch": 11.792, "grad_norm": 115.22361755371094, "learning_rate": 4.9040000000000005e-05, "loss": 1.9151, "step": 1474 }, { "epoch": 11.8, "grad_norm": 23.813617706298828, "learning_rate": 4.903555555555556e-05, "loss": 1.5526, "step": 1475 }, { "epoch": 11.808, "grad_norm": 21.65581512451172, "learning_rate": 4.903111111111111e-05, "loss": 1.4206, "step": 1476 }, { "epoch": 11.816, "grad_norm": 52.93492126464844, "learning_rate": 4.902666666666667e-05, "loss": 1.7862, "step": 1477 }, { "epoch": 11.824, "grad_norm": 35.291229248046875, "learning_rate": 4.9022222222222224e-05, "loss": 1.7374, "step": 1478 }, { "epoch": 11.832, "grad_norm": 31.309160232543945, "learning_rate": 4.901777777777778e-05, "loss": 1.2444, "step": 1479 }, { "epoch": 11.84, "grad_norm": 31.329635620117188, "learning_rate": 4.9013333333333334e-05, "loss": 1.2937, "step": 1480 }, { "epoch": 11.848, "grad_norm": 32.3298225402832, "learning_rate": 4.9008888888888896e-05, "loss": 1.6246, "step": 1481 }, { "epoch": 11.856, "grad_norm": 72.86852264404297, "learning_rate": 4.900444444444445e-05, "loss": 1.7737, "step": 1482 }, { "epoch": 11.864, "grad_norm": 15.884003639221191, "learning_rate": 4.9e-05, "loss": 1.2093, "step": 1483 }, { "epoch": 11.872, "grad_norm": 29.712026596069336, "learning_rate": 4.899555555555555e-05, "loss": 1.7668, "step": 1484 }, { "epoch": 11.88, "grad_norm": 48.291526794433594, "learning_rate": 4.8991111111111115e-05, "loss": 1.6706, "step": 1485 }, { "epoch": 11.888, "grad_norm": 36.354339599609375, "learning_rate": 4.898666666666667e-05, "loss": 2.2415, "step": 1486 }, { "epoch": 11.896, "grad_norm": 60.327354431152344, "learning_rate": 4.8982222222222225e-05, "loss": 1.6215, "step": 1487 }, { "epoch": 11.904, "grad_norm": 38.99852752685547, "learning_rate": 4.897777777777778e-05, "loss": 1.5898, "step": 1488 }, { "epoch": 11.912, "grad_norm": 33.215396881103516, "learning_rate": 4.897333333333334e-05, "loss": 1.5695, "step": 1489 }, { "epoch": 11.92, "grad_norm": 67.43807983398438, "learning_rate": 4.896888888888889e-05, "loss": 1.5796, "step": 1490 }, { "epoch": 11.928, "grad_norm": 32.36810302734375, "learning_rate": 4.8964444444444444e-05, "loss": 1.4554, "step": 1491 }, { "epoch": 11.936, "grad_norm": 36.21152877807617, "learning_rate": 4.896e-05, "loss": 1.9182, "step": 1492 }, { "epoch": 11.943999999999999, "grad_norm": 81.34182739257812, "learning_rate": 4.895555555555556e-05, "loss": 1.4035, "step": 1493 }, { "epoch": 11.952, "grad_norm": 30.522605895996094, "learning_rate": 4.8951111111111115e-05, "loss": 1.3793, "step": 1494 }, { "epoch": 11.96, "grad_norm": 50.236385345458984, "learning_rate": 4.894666666666667e-05, "loss": 1.619, "step": 1495 }, { "epoch": 11.968, "grad_norm": 35.82511901855469, "learning_rate": 4.8942222222222225e-05, "loss": 1.7322, "step": 1496 }, { "epoch": 11.975999999999999, "grad_norm": 96.53227996826172, "learning_rate": 4.893777777777778e-05, "loss": 1.4512, "step": 1497 }, { "epoch": 11.984, "grad_norm": 39.706756591796875, "learning_rate": 4.8933333333333335e-05, "loss": 1.1885, "step": 1498 }, { "epoch": 11.992, "grad_norm": 122.8625259399414, "learning_rate": 4.892888888888889e-05, "loss": 1.6771, "step": 1499 }, { "epoch": 12.0, "grad_norm": 39.12254333496094, "learning_rate": 4.8924444444444444e-05, "loss": 1.4776, "step": 1500 }, { "epoch": 12.0, "eval_loss": 2.201972723007202, "eval_map": 0.1215, "eval_map_50": 0.2491, "eval_map_75": 0.0988, "eval_map_Coverall": 0.2112, "eval_map_Face_Shield": 0.0912, "eval_map_Gloves": 0.0608, "eval_map_Goggles": 0.0567, "eval_map_Mask": 0.1874, "eval_map_large": 0.1643, "eval_map_medium": 0.0472, "eval_map_small": -1.0, "eval_mar_1": 0.1644, "eval_mar_10": 0.293, "eval_mar_100": 0.3115, "eval_mar_100_Coverall": 0.5422, "eval_mar_100_Face_Shield": 0.4235, "eval_mar_100_Gloves": 0.1393, "eval_mar_100_Goggles": 0.1063, "eval_mar_100_Mask": 0.3462, "eval_mar_large": 0.3874, "eval_mar_medium": 0.081, "eval_mar_small": -1.0, "eval_runtime": 3.8038, "eval_samples_per_second": 7.624, "eval_steps_per_second": 0.526, "step": 1500 }, { "epoch": 12.008, "grad_norm": 67.43556213378906, "learning_rate": 4.8920000000000006e-05, "loss": 2.1101, "step": 1501 }, { "epoch": 12.016, "grad_norm": 34.7022819519043, "learning_rate": 4.891555555555556e-05, "loss": 1.5117, "step": 1502 }, { "epoch": 12.024, "grad_norm": 188.07113647460938, "learning_rate": 4.8911111111111116e-05, "loss": 1.7532, "step": 1503 }, { "epoch": 12.032, "grad_norm": 35.656341552734375, "learning_rate": 4.890666666666667e-05, "loss": 1.8823, "step": 1504 }, { "epoch": 12.04, "grad_norm": 31.017459869384766, "learning_rate": 4.8902222222222225e-05, "loss": 1.1393, "step": 1505 }, { "epoch": 12.048, "grad_norm": 54.46564483642578, "learning_rate": 4.889777777777778e-05, "loss": 1.7037, "step": 1506 }, { "epoch": 12.056, "grad_norm": 42.2746467590332, "learning_rate": 4.8893333333333335e-05, "loss": 1.9642, "step": 1507 }, { "epoch": 12.064, "grad_norm": 87.76895141601562, "learning_rate": 4.888888888888889e-05, "loss": 1.7161, "step": 1508 }, { "epoch": 12.072, "grad_norm": 35.34422302246094, "learning_rate": 4.888444444444445e-05, "loss": 1.8695, "step": 1509 }, { "epoch": 12.08, "grad_norm": 43.945899963378906, "learning_rate": 4.8880000000000006e-05, "loss": 1.1684, "step": 1510 }, { "epoch": 12.088, "grad_norm": 31.42352867126465, "learning_rate": 4.8875555555555554e-05, "loss": 1.6112, "step": 1511 }, { "epoch": 12.096, "grad_norm": 37.67301940917969, "learning_rate": 4.887111111111111e-05, "loss": 1.6295, "step": 1512 }, { "epoch": 12.104, "grad_norm": 54.850616455078125, "learning_rate": 4.886666666666667e-05, "loss": 2.0961, "step": 1513 }, { "epoch": 12.112, "grad_norm": 48.95390701293945, "learning_rate": 4.8862222222222226e-05, "loss": 1.5236, "step": 1514 }, { "epoch": 12.12, "grad_norm": 26.726823806762695, "learning_rate": 4.885777777777778e-05, "loss": 1.8948, "step": 1515 }, { "epoch": 12.128, "grad_norm": 38.86909484863281, "learning_rate": 4.8853333333333335e-05, "loss": 1.4473, "step": 1516 }, { "epoch": 12.136, "grad_norm": 51.62727737426758, "learning_rate": 4.884888888888889e-05, "loss": 1.4083, "step": 1517 }, { "epoch": 12.144, "grad_norm": 66.74028015136719, "learning_rate": 4.8844444444444445e-05, "loss": 1.9948, "step": 1518 }, { "epoch": 12.152, "grad_norm": 18.74602699279785, "learning_rate": 4.884e-05, "loss": 1.3209, "step": 1519 }, { "epoch": 12.16, "grad_norm": 49.39487075805664, "learning_rate": 4.8835555555555555e-05, "loss": 1.24, "step": 1520 }, { "epoch": 12.168, "grad_norm": 20.401123046875, "learning_rate": 4.883111111111111e-05, "loss": 1.4577, "step": 1521 }, { "epoch": 12.176, "grad_norm": 22.778804779052734, "learning_rate": 4.882666666666667e-05, "loss": 1.3407, "step": 1522 }, { "epoch": 12.184, "grad_norm": 26.89426612854004, "learning_rate": 4.8822222222222226e-05, "loss": 2.4027, "step": 1523 }, { "epoch": 12.192, "grad_norm": 20.38702392578125, "learning_rate": 4.881777777777778e-05, "loss": 1.5331, "step": 1524 }, { "epoch": 12.2, "grad_norm": 27.731855392456055, "learning_rate": 4.8813333333333336e-05, "loss": 1.811, "step": 1525 }, { "epoch": 12.208, "grad_norm": 26.775569915771484, "learning_rate": 4.880888888888889e-05, "loss": 1.3073, "step": 1526 }, { "epoch": 12.216, "grad_norm": 32.77802276611328, "learning_rate": 4.8804444444444445e-05, "loss": 1.3758, "step": 1527 }, { "epoch": 12.224, "grad_norm": 29.368003845214844, "learning_rate": 4.88e-05, "loss": 1.2882, "step": 1528 }, { "epoch": 12.232, "grad_norm": 20.404836654663086, "learning_rate": 4.8795555555555555e-05, "loss": 1.4323, "step": 1529 }, { "epoch": 12.24, "grad_norm": 36.08815383911133, "learning_rate": 4.879111111111112e-05, "loss": 1.5171, "step": 1530 }, { "epoch": 12.248, "grad_norm": 28.967931747436523, "learning_rate": 4.878666666666667e-05, "loss": 1.8625, "step": 1531 }, { "epoch": 12.256, "grad_norm": 24.67665672302246, "learning_rate": 4.8782222222222226e-05, "loss": 1.8355, "step": 1532 }, { "epoch": 12.264, "grad_norm": 24.164438247680664, "learning_rate": 4.8777777777777775e-05, "loss": 1.8364, "step": 1533 }, { "epoch": 12.272, "grad_norm": 26.81656265258789, "learning_rate": 4.8773333333333336e-05, "loss": 1.7868, "step": 1534 }, { "epoch": 12.28, "grad_norm": 32.858028411865234, "learning_rate": 4.876888888888889e-05, "loss": 1.7116, "step": 1535 }, { "epoch": 12.288, "grad_norm": 111.29364776611328, "learning_rate": 4.8764444444444446e-05, "loss": 1.8684, "step": 1536 }, { "epoch": 12.296, "grad_norm": 57.49281311035156, "learning_rate": 4.876e-05, "loss": 1.6801, "step": 1537 }, { "epoch": 12.304, "grad_norm": 23.66840934753418, "learning_rate": 4.875555555555556e-05, "loss": 1.2179, "step": 1538 }, { "epoch": 12.312, "grad_norm": 31.380693435668945, "learning_rate": 4.875111111111112e-05, "loss": 1.9268, "step": 1539 }, { "epoch": 12.32, "grad_norm": 33.901058197021484, "learning_rate": 4.8746666666666665e-05, "loss": 1.6099, "step": 1540 }, { "epoch": 12.328, "grad_norm": 35.76226043701172, "learning_rate": 4.874222222222222e-05, "loss": 1.3663, "step": 1541 }, { "epoch": 12.336, "grad_norm": 27.801660537719727, "learning_rate": 4.873777777777778e-05, "loss": 1.651, "step": 1542 }, { "epoch": 12.344, "grad_norm": 81.21717071533203, "learning_rate": 4.8733333333333337e-05, "loss": 2.0732, "step": 1543 }, { "epoch": 12.352, "grad_norm": 32.56896209716797, "learning_rate": 4.872888888888889e-05, "loss": 1.6415, "step": 1544 }, { "epoch": 12.36, "grad_norm": 22.03966522216797, "learning_rate": 4.8724444444444446e-05, "loss": 1.1503, "step": 1545 }, { "epoch": 12.368, "grad_norm": 50.412261962890625, "learning_rate": 4.872000000000001e-05, "loss": 2.0206, "step": 1546 }, { "epoch": 12.376, "grad_norm": 25.25143814086914, "learning_rate": 4.8715555555555556e-05, "loss": 1.9253, "step": 1547 }, { "epoch": 12.384, "grad_norm": 53.35560989379883, "learning_rate": 4.871111111111111e-05, "loss": 1.5325, "step": 1548 }, { "epoch": 12.392, "grad_norm": 30.600082397460938, "learning_rate": 4.8706666666666666e-05, "loss": 1.5111, "step": 1549 }, { "epoch": 12.4, "grad_norm": 29.734209060668945, "learning_rate": 4.870222222222223e-05, "loss": 0.9963, "step": 1550 }, { "epoch": 12.408, "grad_norm": 226.23983764648438, "learning_rate": 4.869777777777778e-05, "loss": 1.6608, "step": 1551 }, { "epoch": 12.416, "grad_norm": 26.935596466064453, "learning_rate": 4.869333333333334e-05, "loss": 1.571, "step": 1552 }, { "epoch": 12.424, "grad_norm": 36.66112518310547, "learning_rate": 4.868888888888889e-05, "loss": 1.4745, "step": 1553 }, { "epoch": 12.432, "grad_norm": 27.550046920776367, "learning_rate": 4.868444444444445e-05, "loss": 1.8302, "step": 1554 }, { "epoch": 12.44, "grad_norm": 52.805545806884766, "learning_rate": 4.868e-05, "loss": 1.2959, "step": 1555 }, { "epoch": 12.448, "grad_norm": 76.6732406616211, "learning_rate": 4.8675555555555556e-05, "loss": 1.6764, "step": 1556 }, { "epoch": 12.456, "grad_norm": 24.68566131591797, "learning_rate": 4.867111111111111e-05, "loss": 1.12, "step": 1557 }, { "epoch": 12.464, "grad_norm": 41.321861267089844, "learning_rate": 4.866666666666667e-05, "loss": 1.6667, "step": 1558 }, { "epoch": 12.472, "grad_norm": 52.445289611816406, "learning_rate": 4.866222222222223e-05, "loss": 1.8318, "step": 1559 }, { "epoch": 12.48, "grad_norm": 27.42964744567871, "learning_rate": 4.865777777777778e-05, "loss": 1.2816, "step": 1560 }, { "epoch": 12.488, "grad_norm": 23.035715103149414, "learning_rate": 4.865333333333334e-05, "loss": 1.3056, "step": 1561 }, { "epoch": 12.496, "grad_norm": 28.002456665039062, "learning_rate": 4.864888888888889e-05, "loss": 1.5137, "step": 1562 }, { "epoch": 12.504, "grad_norm": 28.73207664489746, "learning_rate": 4.864444444444445e-05, "loss": 1.6471, "step": 1563 }, { "epoch": 12.512, "grad_norm": 18.34092903137207, "learning_rate": 4.864e-05, "loss": 2.5733, "step": 1564 }, { "epoch": 12.52, "grad_norm": 21.910812377929688, "learning_rate": 4.863555555555556e-05, "loss": 1.742, "step": 1565 }, { "epoch": 12.528, "grad_norm": 48.967071533203125, "learning_rate": 4.863111111111112e-05, "loss": 1.9762, "step": 1566 }, { "epoch": 12.536, "grad_norm": 20.859228134155273, "learning_rate": 4.862666666666667e-05, "loss": 1.1673, "step": 1567 }, { "epoch": 12.544, "grad_norm": 36.91686248779297, "learning_rate": 4.862222222222222e-05, "loss": 0.9732, "step": 1568 }, { "epoch": 12.552, "grad_norm": 60.74995803833008, "learning_rate": 4.8617777777777776e-05, "loss": 1.3601, "step": 1569 }, { "epoch": 12.56, "grad_norm": 44.7741813659668, "learning_rate": 4.861333333333333e-05, "loss": 1.3647, "step": 1570 }, { "epoch": 12.568, "grad_norm": 17.67992401123047, "learning_rate": 4.860888888888889e-05, "loss": 1.5376, "step": 1571 }, { "epoch": 12.576, "grad_norm": 29.135236740112305, "learning_rate": 4.860444444444445e-05, "loss": 1.4325, "step": 1572 }, { "epoch": 12.584, "grad_norm": 23.474666595458984, "learning_rate": 4.86e-05, "loss": 1.3903, "step": 1573 }, { "epoch": 12.592, "grad_norm": 54.90715789794922, "learning_rate": 4.859555555555556e-05, "loss": 1.4485, "step": 1574 }, { "epoch": 12.6, "grad_norm": 46.099754333496094, "learning_rate": 4.859111111111111e-05, "loss": 1.2183, "step": 1575 }, { "epoch": 12.608, "grad_norm": 17.507917404174805, "learning_rate": 4.858666666666667e-05, "loss": 1.1533, "step": 1576 }, { "epoch": 12.616, "grad_norm": 33.42848587036133, "learning_rate": 4.858222222222222e-05, "loss": 1.3495, "step": 1577 }, { "epoch": 12.624, "grad_norm": 24.831270217895508, "learning_rate": 4.8577777777777776e-05, "loss": 1.0788, "step": 1578 }, { "epoch": 12.632, "grad_norm": 37.61476516723633, "learning_rate": 4.857333333333334e-05, "loss": 1.3077, "step": 1579 }, { "epoch": 12.64, "grad_norm": 30.597673416137695, "learning_rate": 4.856888888888889e-05, "loss": 1.4141, "step": 1580 }, { "epoch": 12.648, "grad_norm": 35.205047607421875, "learning_rate": 4.856444444444445e-05, "loss": 1.507, "step": 1581 }, { "epoch": 12.656, "grad_norm": 27.876216888427734, "learning_rate": 4.856e-05, "loss": 1.5524, "step": 1582 }, { "epoch": 12.664, "grad_norm": 146.6150665283203, "learning_rate": 4.855555555555556e-05, "loss": 1.6685, "step": 1583 }, { "epoch": 12.672, "grad_norm": 49.38671875, "learning_rate": 4.855111111111111e-05, "loss": 2.015, "step": 1584 }, { "epoch": 12.68, "grad_norm": 36.32566452026367, "learning_rate": 4.854666666666667e-05, "loss": 1.658, "step": 1585 }, { "epoch": 12.688, "grad_norm": 29.004070281982422, "learning_rate": 4.854222222222222e-05, "loss": 3.5797, "step": 1586 }, { "epoch": 12.696, "grad_norm": 39.36180114746094, "learning_rate": 4.8537777777777784e-05, "loss": 1.2071, "step": 1587 }, { "epoch": 12.704, "grad_norm": 79.11420440673828, "learning_rate": 4.853333333333334e-05, "loss": 2.1402, "step": 1588 }, { "epoch": 12.712, "grad_norm": 21.775897979736328, "learning_rate": 4.852888888888889e-05, "loss": 1.4693, "step": 1589 }, { "epoch": 12.72, "grad_norm": 26.35989761352539, "learning_rate": 4.852444444444444e-05, "loss": 1.2284, "step": 1590 }, { "epoch": 12.728, "grad_norm": 18.653770446777344, "learning_rate": 4.852e-05, "loss": 2.4618, "step": 1591 }, { "epoch": 12.736, "grad_norm": 20.223268508911133, "learning_rate": 4.851555555555556e-05, "loss": 1.4488, "step": 1592 }, { "epoch": 12.744, "grad_norm": 49.611202239990234, "learning_rate": 4.851111111111111e-05, "loss": 1.8865, "step": 1593 }, { "epoch": 12.752, "grad_norm": 66.74272155761719, "learning_rate": 4.850666666666667e-05, "loss": 1.4498, "step": 1594 }, { "epoch": 12.76, "grad_norm": 35.60082244873047, "learning_rate": 4.850222222222223e-05, "loss": 1.5305, "step": 1595 }, { "epoch": 12.768, "grad_norm": 21.243732452392578, "learning_rate": 4.8497777777777784e-05, "loss": 1.9113, "step": 1596 }, { "epoch": 12.776, "grad_norm": 34.04533004760742, "learning_rate": 4.849333333333333e-05, "loss": 1.2603, "step": 1597 }, { "epoch": 12.784, "grad_norm": 30.470712661743164, "learning_rate": 4.848888888888889e-05, "loss": 1.4332, "step": 1598 }, { "epoch": 12.792, "grad_norm": 25.840482711791992, "learning_rate": 4.848444444444445e-05, "loss": 1.5817, "step": 1599 }, { "epoch": 12.8, "grad_norm": 162.19305419921875, "learning_rate": 4.8480000000000003e-05, "loss": 1.3752, "step": 1600 }, { "epoch": 12.808, "grad_norm": 50.4342155456543, "learning_rate": 4.847555555555556e-05, "loss": 1.8681, "step": 1601 }, { "epoch": 12.816, "grad_norm": 161.25950622558594, "learning_rate": 4.847111111111111e-05, "loss": 1.6171, "step": 1602 }, { "epoch": 12.824, "grad_norm": 40.35723876953125, "learning_rate": 4.8466666666666675e-05, "loss": 1.4792, "step": 1603 }, { "epoch": 12.832, "grad_norm": 57.07332992553711, "learning_rate": 4.846222222222222e-05, "loss": 1.5986, "step": 1604 }, { "epoch": 12.84, "grad_norm": 80.11104583740234, "learning_rate": 4.845777777777778e-05, "loss": 1.3523, "step": 1605 }, { "epoch": 12.848, "grad_norm": 52.667396545410156, "learning_rate": 4.845333333333333e-05, "loss": 1.4999, "step": 1606 }, { "epoch": 12.856, "grad_norm": 48.77626037597656, "learning_rate": 4.8448888888888894e-05, "loss": 1.6387, "step": 1607 }, { "epoch": 12.864, "grad_norm": 18.253650665283203, "learning_rate": 4.844444444444445e-05, "loss": 1.6105, "step": 1608 }, { "epoch": 12.872, "grad_norm": 128.14688110351562, "learning_rate": 4.8440000000000004e-05, "loss": 1.7235, "step": 1609 }, { "epoch": 12.88, "grad_norm": 77.48787689208984, "learning_rate": 4.843555555555556e-05, "loss": 1.2453, "step": 1610 }, { "epoch": 12.888, "grad_norm": 18.601417541503906, "learning_rate": 4.8431111111111113e-05, "loss": 1.3651, "step": 1611 }, { "epoch": 12.896, "grad_norm": 129.36056518554688, "learning_rate": 4.842666666666667e-05, "loss": 1.3209, "step": 1612 }, { "epoch": 12.904, "grad_norm": 44.71307373046875, "learning_rate": 4.842222222222222e-05, "loss": 1.1553, "step": 1613 }, { "epoch": 12.912, "grad_norm": 20.723005294799805, "learning_rate": 4.841777777777778e-05, "loss": 1.1178, "step": 1614 }, { "epoch": 12.92, "grad_norm": 29.35223388671875, "learning_rate": 4.841333333333334e-05, "loss": 1.4103, "step": 1615 }, { "epoch": 12.928, "grad_norm": 38.47895431518555, "learning_rate": 4.8408888888888894e-05, "loss": 2.3718, "step": 1616 }, { "epoch": 12.936, "grad_norm": 16.5584774017334, "learning_rate": 4.840444444444445e-05, "loss": 1.5261, "step": 1617 }, { "epoch": 12.943999999999999, "grad_norm": 26.059226989746094, "learning_rate": 4.8400000000000004e-05, "loss": 1.4532, "step": 1618 }, { "epoch": 12.952, "grad_norm": 35.424171447753906, "learning_rate": 4.839555555555556e-05, "loss": 1.5252, "step": 1619 }, { "epoch": 12.96, "grad_norm": 30.277929306030273, "learning_rate": 4.8391111111111114e-05, "loss": 1.3334, "step": 1620 }, { "epoch": 12.968, "grad_norm": 35.05168914794922, "learning_rate": 4.838666666666667e-05, "loss": 1.4065, "step": 1621 }, { "epoch": 12.975999999999999, "grad_norm": 24.329504013061523, "learning_rate": 4.8382222222222224e-05, "loss": 1.1517, "step": 1622 }, { "epoch": 12.984, "grad_norm": 50.9328727722168, "learning_rate": 4.837777777777778e-05, "loss": 1.3572, "step": 1623 }, { "epoch": 12.992, "grad_norm": 32.50839614868164, "learning_rate": 4.837333333333334e-05, "loss": 1.3891, "step": 1624 }, { "epoch": 13.0, "grad_norm": 24.842710494995117, "learning_rate": 4.836888888888889e-05, "loss": 1.3805, "step": 1625 }, { "epoch": 13.0, "eval_loss": 1.4408096075057983, "eval_map": 0.2553, "eval_map_50": 0.471, "eval_map_75": 0.2272, "eval_map_Coverall": 0.5456, "eval_map_Face_Shield": 0.2114, "eval_map_Gloves": 0.1506, "eval_map_Goggles": 0.0467, "eval_map_Mask": 0.3222, "eval_map_large": 0.2926, "eval_map_medium": 0.1155, "eval_map_small": -1.0, "eval_mar_1": 0.2261, "eval_mar_10": 0.4413, "eval_mar_100": 0.4521, "eval_mar_100_Coverall": 0.7244, "eval_mar_100_Face_Shield": 0.5176, "eval_mar_100_Gloves": 0.3262, "eval_mar_100_Goggles": 0.2594, "eval_mar_100_Mask": 0.4327, "eval_mar_large": 0.5159, "eval_mar_medium": 0.2139, "eval_mar_small": -1.0, "eval_runtime": 3.829, "eval_samples_per_second": 7.574, "eval_steps_per_second": 0.522, "step": 1625 }, { "epoch": 13.008, "grad_norm": 22.592864990234375, "learning_rate": 4.836444444444444e-05, "loss": 1.1448, "step": 1626 }, { "epoch": 13.016, "grad_norm": 24.892065048217773, "learning_rate": 4.836e-05, "loss": 1.3124, "step": 1627 }, { "epoch": 13.024, "grad_norm": 27.914438247680664, "learning_rate": 4.835555555555556e-05, "loss": 1.2556, "step": 1628 }, { "epoch": 13.032, "grad_norm": 22.446487426757812, "learning_rate": 4.8351111111111114e-05, "loss": 1.5131, "step": 1629 }, { "epoch": 13.04, "grad_norm": 28.538837432861328, "learning_rate": 4.834666666666667e-05, "loss": 1.3299, "step": 1630 }, { "epoch": 13.048, "grad_norm": 35.71076965332031, "learning_rate": 4.8342222222222224e-05, "loss": 1.3354, "step": 1631 }, { "epoch": 13.056, "grad_norm": 38.25749588012695, "learning_rate": 4.833777777777778e-05, "loss": 1.5722, "step": 1632 }, { "epoch": 13.064, "grad_norm": 24.067983627319336, "learning_rate": 4.8333333333333334e-05, "loss": 1.1934, "step": 1633 }, { "epoch": 13.072, "grad_norm": 64.563232421875, "learning_rate": 4.832888888888889e-05, "loss": 1.8797, "step": 1634 }, { "epoch": 13.08, "grad_norm": 39.658145904541016, "learning_rate": 4.832444444444444e-05, "loss": 1.5569, "step": 1635 }, { "epoch": 13.088, "grad_norm": 46.1627197265625, "learning_rate": 4.8320000000000005e-05, "loss": 1.5023, "step": 1636 }, { "epoch": 13.096, "grad_norm": 24.4865665435791, "learning_rate": 4.831555555555556e-05, "loss": 1.4868, "step": 1637 }, { "epoch": 13.104, "grad_norm": 53.560245513916016, "learning_rate": 4.8311111111111115e-05, "loss": 1.2279, "step": 1638 }, { "epoch": 13.112, "grad_norm": 18.927223205566406, "learning_rate": 4.830666666666667e-05, "loss": 1.3147, "step": 1639 }, { "epoch": 13.12, "grad_norm": 24.84602165222168, "learning_rate": 4.8302222222222224e-05, "loss": 1.6306, "step": 1640 }, { "epoch": 13.128, "grad_norm": 33.18925476074219, "learning_rate": 4.829777777777778e-05, "loss": 1.4121, "step": 1641 }, { "epoch": 13.136, "grad_norm": 24.92771339416504, "learning_rate": 4.8293333333333334e-05, "loss": 1.48, "step": 1642 }, { "epoch": 13.144, "grad_norm": 21.33700180053711, "learning_rate": 4.828888888888889e-05, "loss": 1.3695, "step": 1643 }, { "epoch": 13.152, "grad_norm": 29.917428970336914, "learning_rate": 4.828444444444445e-05, "loss": 1.3682, "step": 1644 }, { "epoch": 13.16, "grad_norm": 27.6994686126709, "learning_rate": 4.8280000000000005e-05, "loss": 1.6113, "step": 1645 }, { "epoch": 13.168, "grad_norm": 24.711366653442383, "learning_rate": 4.827555555555556e-05, "loss": 1.6902, "step": 1646 }, { "epoch": 13.176, "grad_norm": 46.21656799316406, "learning_rate": 4.827111111111111e-05, "loss": 1.7045, "step": 1647 }, { "epoch": 13.184, "grad_norm": 21.31562614440918, "learning_rate": 4.826666666666667e-05, "loss": 1.3757, "step": 1648 }, { "epoch": 13.192, "grad_norm": 18.719518661499023, "learning_rate": 4.8262222222222225e-05, "loss": 1.242, "step": 1649 }, { "epoch": 13.2, "grad_norm": 18.66802215576172, "learning_rate": 4.825777777777778e-05, "loss": 1.48, "step": 1650 }, { "epoch": 13.208, "grad_norm": 58.126461029052734, "learning_rate": 4.8253333333333334e-05, "loss": 1.8837, "step": 1651 }, { "epoch": 13.216, "grad_norm": 37.262939453125, "learning_rate": 4.8248888888888896e-05, "loss": 1.2637, "step": 1652 }, { "epoch": 13.224, "grad_norm": 45.935543060302734, "learning_rate": 4.824444444444445e-05, "loss": 1.0693, "step": 1653 }, { "epoch": 13.232, "grad_norm": 27.3867130279541, "learning_rate": 4.824e-05, "loss": 1.3634, "step": 1654 }, { "epoch": 13.24, "grad_norm": 20.41999053955078, "learning_rate": 4.8235555555555554e-05, "loss": 1.1089, "step": 1655 }, { "epoch": 13.248, "grad_norm": 26.899494171142578, "learning_rate": 4.8231111111111115e-05, "loss": 1.7428, "step": 1656 }, { "epoch": 13.256, "grad_norm": 22.307004928588867, "learning_rate": 4.822666666666667e-05, "loss": 1.111, "step": 1657 }, { "epoch": 13.264, "grad_norm": 37.7590446472168, "learning_rate": 4.8222222222222225e-05, "loss": 1.2465, "step": 1658 }, { "epoch": 13.272, "grad_norm": 39.93532943725586, "learning_rate": 4.821777777777778e-05, "loss": 1.1356, "step": 1659 }, { "epoch": 13.28, "grad_norm": 19.535024642944336, "learning_rate": 4.8213333333333335e-05, "loss": 1.3268, "step": 1660 }, { "epoch": 13.288, "grad_norm": 110.03358459472656, "learning_rate": 4.820888888888889e-05, "loss": 2.207, "step": 1661 }, { "epoch": 13.296, "grad_norm": 28.920284271240234, "learning_rate": 4.8204444444444444e-05, "loss": 1.7586, "step": 1662 }, { "epoch": 13.304, "grad_norm": 42.12594985961914, "learning_rate": 4.82e-05, "loss": 1.3653, "step": 1663 }, { "epoch": 13.312, "grad_norm": 28.244464874267578, "learning_rate": 4.819555555555556e-05, "loss": 1.9244, "step": 1664 }, { "epoch": 13.32, "grad_norm": 24.990108489990234, "learning_rate": 4.8191111111111116e-05, "loss": 1.4137, "step": 1665 }, { "epoch": 13.328, "grad_norm": 51.61275863647461, "learning_rate": 4.818666666666667e-05, "loss": 1.0819, "step": 1666 }, { "epoch": 13.336, "grad_norm": 35.42472839355469, "learning_rate": 4.8182222222222225e-05, "loss": 1.7189, "step": 1667 }, { "epoch": 13.344, "grad_norm": 28.184852600097656, "learning_rate": 4.817777777777778e-05, "loss": 1.6103, "step": 1668 }, { "epoch": 13.352, "grad_norm": 25.75661277770996, "learning_rate": 4.8173333333333335e-05, "loss": 1.6475, "step": 1669 }, { "epoch": 13.36, "grad_norm": 17.44000244140625, "learning_rate": 4.816888888888889e-05, "loss": 1.227, "step": 1670 }, { "epoch": 13.368, "grad_norm": 33.29915237426758, "learning_rate": 4.8164444444444445e-05, "loss": 2.1838, "step": 1671 }, { "epoch": 13.376, "grad_norm": 22.48436164855957, "learning_rate": 4.816e-05, "loss": 1.1243, "step": 1672 }, { "epoch": 13.384, "grad_norm": 32.531681060791016, "learning_rate": 4.815555555555556e-05, "loss": 1.8153, "step": 1673 }, { "epoch": 13.392, "grad_norm": 28.733840942382812, "learning_rate": 4.8151111111111116e-05, "loss": 1.674, "step": 1674 }, { "epoch": 13.4, "grad_norm": 25.10010528564453, "learning_rate": 4.814666666666667e-05, "loss": 1.229, "step": 1675 }, { "epoch": 13.408, "grad_norm": 22.015884399414062, "learning_rate": 4.814222222222222e-05, "loss": 1.7737, "step": 1676 }, { "epoch": 13.416, "grad_norm": 48.35927963256836, "learning_rate": 4.813777777777778e-05, "loss": 1.4595, "step": 1677 }, { "epoch": 13.424, "grad_norm": 31.039859771728516, "learning_rate": 4.8133333333333336e-05, "loss": 2.9735, "step": 1678 }, { "epoch": 13.432, "grad_norm": 102.02745819091797, "learning_rate": 4.812888888888889e-05, "loss": 1.6314, "step": 1679 }, { "epoch": 13.44, "grad_norm": 29.523725509643555, "learning_rate": 4.8124444444444445e-05, "loss": 1.2238, "step": 1680 }, { "epoch": 13.448, "grad_norm": 26.66429901123047, "learning_rate": 4.812000000000001e-05, "loss": 1.2514, "step": 1681 }, { "epoch": 13.456, "grad_norm": 20.908449172973633, "learning_rate": 4.8115555555555555e-05, "loss": 2.112, "step": 1682 }, { "epoch": 13.464, "grad_norm": 41.76308059692383, "learning_rate": 4.811111111111111e-05, "loss": 1.441, "step": 1683 }, { "epoch": 13.472, "grad_norm": 19.474987030029297, "learning_rate": 4.8106666666666665e-05, "loss": 1.0418, "step": 1684 }, { "epoch": 13.48, "grad_norm": 22.028715133666992, "learning_rate": 4.8102222222222226e-05, "loss": 1.874, "step": 1685 }, { "epoch": 13.488, "grad_norm": 34.981414794921875, "learning_rate": 4.809777777777778e-05, "loss": 1.1891, "step": 1686 }, { "epoch": 13.496, "grad_norm": 32.186344146728516, "learning_rate": 4.8093333333333336e-05, "loss": 1.4676, "step": 1687 }, { "epoch": 13.504, "grad_norm": 47.95741271972656, "learning_rate": 4.808888888888889e-05, "loss": 1.1663, "step": 1688 }, { "epoch": 13.512, "grad_norm": 34.8120231628418, "learning_rate": 4.8084444444444446e-05, "loss": 1.2452, "step": 1689 }, { "epoch": 13.52, "grad_norm": 30.043109893798828, "learning_rate": 4.808e-05, "loss": 1.5467, "step": 1690 }, { "epoch": 13.528, "grad_norm": 97.77434539794922, "learning_rate": 4.8075555555555555e-05, "loss": 1.4708, "step": 1691 }, { "epoch": 13.536, "grad_norm": 19.77394676208496, "learning_rate": 4.807111111111111e-05, "loss": 1.3501, "step": 1692 }, { "epoch": 13.544, "grad_norm": 14.126052856445312, "learning_rate": 4.806666666666667e-05, "loss": 1.2899, "step": 1693 }, { "epoch": 13.552, "grad_norm": 32.073455810546875, "learning_rate": 4.8062222222222227e-05, "loss": 1.6153, "step": 1694 }, { "epoch": 13.56, "grad_norm": 24.29463005065918, "learning_rate": 4.805777777777778e-05, "loss": 1.4041, "step": 1695 }, { "epoch": 13.568, "grad_norm": 22.534183502197266, "learning_rate": 4.8053333333333336e-05, "loss": 3.0606, "step": 1696 }, { "epoch": 13.576, "grad_norm": 31.13209342956543, "learning_rate": 4.804888888888889e-05, "loss": 1.1332, "step": 1697 }, { "epoch": 13.584, "grad_norm": 26.06682014465332, "learning_rate": 4.8044444444444446e-05, "loss": 1.413, "step": 1698 }, { "epoch": 13.592, "grad_norm": 27.158842086791992, "learning_rate": 4.804e-05, "loss": 1.8297, "step": 1699 }, { "epoch": 13.6, "grad_norm": 19.122568130493164, "learning_rate": 4.8035555555555556e-05, "loss": 1.548, "step": 1700 }, { "epoch": 13.608, "grad_norm": 26.508316040039062, "learning_rate": 4.803111111111112e-05, "loss": 2.136, "step": 1701 }, { "epoch": 13.616, "grad_norm": 23.27251434326172, "learning_rate": 4.802666666666667e-05, "loss": 1.7307, "step": 1702 }, { "epoch": 13.624, "grad_norm": 50.52394485473633, "learning_rate": 4.802222222222223e-05, "loss": 1.7092, "step": 1703 }, { "epoch": 13.632, "grad_norm": 18.504785537719727, "learning_rate": 4.8017777777777775e-05, "loss": 1.423, "step": 1704 }, { "epoch": 13.64, "grad_norm": 31.79596519470215, "learning_rate": 4.801333333333334e-05, "loss": 1.5021, "step": 1705 }, { "epoch": 13.648, "grad_norm": 30.70829963684082, "learning_rate": 4.800888888888889e-05, "loss": 1.2408, "step": 1706 }, { "epoch": 13.656, "grad_norm": 52.15388107299805, "learning_rate": 4.8004444444444446e-05, "loss": 1.5853, "step": 1707 }, { "epoch": 13.664, "grad_norm": 29.710445404052734, "learning_rate": 4.8e-05, "loss": 0.8484, "step": 1708 }, { "epoch": 13.672, "grad_norm": 32.19495391845703, "learning_rate": 4.799555555555556e-05, "loss": 1.2762, "step": 1709 }, { "epoch": 13.68, "grad_norm": 28.85637855529785, "learning_rate": 4.799111111111112e-05, "loss": 1.1714, "step": 1710 }, { "epoch": 13.688, "grad_norm": 45.878746032714844, "learning_rate": 4.7986666666666666e-05, "loss": 1.7791, "step": 1711 }, { "epoch": 13.696, "grad_norm": 18.456254959106445, "learning_rate": 4.798222222222222e-05, "loss": 1.0027, "step": 1712 }, { "epoch": 13.704, "grad_norm": 63.09761428833008, "learning_rate": 4.797777777777778e-05, "loss": 1.2877, "step": 1713 }, { "epoch": 13.712, "grad_norm": 78.56232452392578, "learning_rate": 4.797333333333334e-05, "loss": 1.6883, "step": 1714 }, { "epoch": 13.72, "grad_norm": 56.41901397705078, "learning_rate": 4.796888888888889e-05, "loss": 1.4911, "step": 1715 }, { "epoch": 13.728, "grad_norm": 34.925537109375, "learning_rate": 4.796444444444445e-05, "loss": 1.5189, "step": 1716 }, { "epoch": 13.736, "grad_norm": 21.89179801940918, "learning_rate": 4.796e-05, "loss": 1.6534, "step": 1717 }, { "epoch": 13.744, "grad_norm": 232.20960998535156, "learning_rate": 4.7955555555555556e-05, "loss": 1.5603, "step": 1718 }, { "epoch": 13.752, "grad_norm": 29.568462371826172, "learning_rate": 4.795111111111111e-05, "loss": 1.2654, "step": 1719 }, { "epoch": 13.76, "grad_norm": 26.719459533691406, "learning_rate": 4.7946666666666666e-05, "loss": 1.2915, "step": 1720 }, { "epoch": 13.768, "grad_norm": 35.468711853027344, "learning_rate": 4.794222222222223e-05, "loss": 1.6957, "step": 1721 }, { "epoch": 13.776, "grad_norm": 27.3775691986084, "learning_rate": 4.793777777777778e-05, "loss": 1.6924, "step": 1722 }, { "epoch": 13.784, "grad_norm": 13.349129676818848, "learning_rate": 4.793333333333334e-05, "loss": 1.2215, "step": 1723 }, { "epoch": 13.792, "grad_norm": 21.497785568237305, "learning_rate": 4.792888888888889e-05, "loss": 1.4062, "step": 1724 }, { "epoch": 13.8, "grad_norm": 30.734277725219727, "learning_rate": 4.792444444444445e-05, "loss": 1.6225, "step": 1725 }, { "epoch": 13.808, "grad_norm": 44.652687072753906, "learning_rate": 4.792e-05, "loss": 1.3529, "step": 1726 }, { "epoch": 13.816, "grad_norm": 146.59808349609375, "learning_rate": 4.791555555555556e-05, "loss": 1.4817, "step": 1727 }, { "epoch": 13.824, "grad_norm": 28.724071502685547, "learning_rate": 4.791111111111111e-05, "loss": 1.5062, "step": 1728 }, { "epoch": 13.832, "grad_norm": 45.81856918334961, "learning_rate": 4.7906666666666667e-05, "loss": 1.3249, "step": 1729 }, { "epoch": 13.84, "grad_norm": 16.55193328857422, "learning_rate": 4.790222222222223e-05, "loss": 1.8316, "step": 1730 }, { "epoch": 13.848, "grad_norm": 17.017292022705078, "learning_rate": 4.789777777777778e-05, "loss": 1.9037, "step": 1731 }, { "epoch": 13.856, "grad_norm": 67.59866333007812, "learning_rate": 4.789333333333334e-05, "loss": 1.7821, "step": 1732 }, { "epoch": 13.864, "grad_norm": 30.952362060546875, "learning_rate": 4.7888888888888886e-05, "loss": 1.6599, "step": 1733 }, { "epoch": 13.872, "grad_norm": 33.97465896606445, "learning_rate": 4.788444444444445e-05, "loss": 1.5916, "step": 1734 }, { "epoch": 13.88, "grad_norm": 23.80949592590332, "learning_rate": 4.788e-05, "loss": 1.3564, "step": 1735 }, { "epoch": 13.888, "grad_norm": 21.241662979125977, "learning_rate": 4.787555555555556e-05, "loss": 1.4053, "step": 1736 }, { "epoch": 13.896, "grad_norm": 35.62356948852539, "learning_rate": 4.787111111111111e-05, "loss": 2.1704, "step": 1737 }, { "epoch": 13.904, "grad_norm": 45.725711822509766, "learning_rate": 4.7866666666666674e-05, "loss": 1.3615, "step": 1738 }, { "epoch": 13.912, "grad_norm": 25.05815887451172, "learning_rate": 4.786222222222222e-05, "loss": 1.4566, "step": 1739 }, { "epoch": 13.92, "grad_norm": 28.421045303344727, "learning_rate": 4.7857777777777777e-05, "loss": 1.0825, "step": 1740 }, { "epoch": 13.928, "grad_norm": 35.87770462036133, "learning_rate": 4.785333333333333e-05, "loss": 1.7792, "step": 1741 }, { "epoch": 13.936, "grad_norm": 33.36922836303711, "learning_rate": 4.784888888888889e-05, "loss": 1.0948, "step": 1742 }, { "epoch": 13.943999999999999, "grad_norm": 48.32431411743164, "learning_rate": 4.784444444444445e-05, "loss": 1.3102, "step": 1743 }, { "epoch": 13.952, "grad_norm": 26.145353317260742, "learning_rate": 4.784e-05, "loss": 1.9918, "step": 1744 }, { "epoch": 13.96, "grad_norm": 36.01974105834961, "learning_rate": 4.783555555555556e-05, "loss": 1.5594, "step": 1745 }, { "epoch": 13.968, "grad_norm": 114.67523956298828, "learning_rate": 4.783111111111111e-05, "loss": 1.3958, "step": 1746 }, { "epoch": 13.975999999999999, "grad_norm": 29.882898330688477, "learning_rate": 4.782666666666667e-05, "loss": 1.3155, "step": 1747 }, { "epoch": 13.984, "grad_norm": 29.395456314086914, "learning_rate": 4.782222222222222e-05, "loss": 1.9204, "step": 1748 }, { "epoch": 13.992, "grad_norm": 17.68216323852539, "learning_rate": 4.781777777777778e-05, "loss": 1.3421, "step": 1749 }, { "epoch": 14.0, "grad_norm": 64.88712310791016, "learning_rate": 4.781333333333334e-05, "loss": 1.5328, "step": 1750 }, { "epoch": 14.0, "eval_loss": 1.4780170917510986, "eval_map": 0.2293, "eval_map_50": 0.479, "eval_map_75": 0.2084, "eval_map_Coverall": 0.4829, "eval_map_Face_Shield": 0.1713, "eval_map_Gloves": 0.1355, "eval_map_Goggles": 0.1225, "eval_map_Mask": 0.2343, "eval_map_large": 0.2621, "eval_map_medium": 0.107, "eval_map_small": -1.0, "eval_mar_1": 0.2129, "eval_mar_10": 0.4207, "eval_mar_100": 0.4296, "eval_mar_100_Coverall": 0.6578, "eval_mar_100_Face_Shield": 0.5118, "eval_mar_100_Gloves": 0.3148, "eval_mar_100_Goggles": 0.325, "eval_mar_100_Mask": 0.3385, "eval_mar_large": 0.5015, "eval_mar_medium": 0.1838, "eval_mar_small": -1.0, "eval_runtime": 3.8285, "eval_samples_per_second": 7.575, "eval_steps_per_second": 0.522, "step": 1750 }, { "epoch": 14.008, "grad_norm": 15.89767837524414, "learning_rate": 4.7808888888888893e-05, "loss": 1.6752, "step": 1751 }, { "epoch": 14.016, "grad_norm": 25.84173583984375, "learning_rate": 4.780444444444445e-05, "loss": 1.5355, "step": 1752 }, { "epoch": 14.024, "grad_norm": 34.52365493774414, "learning_rate": 4.78e-05, "loss": 1.3381, "step": 1753 }, { "epoch": 14.032, "grad_norm": 40.74403381347656, "learning_rate": 4.779555555555556e-05, "loss": 1.6901, "step": 1754 }, { "epoch": 14.04, "grad_norm": 17.671432495117188, "learning_rate": 4.779111111111111e-05, "loss": 1.2531, "step": 1755 }, { "epoch": 14.048, "grad_norm": 73.15729522705078, "learning_rate": 4.778666666666667e-05, "loss": 1.6046, "step": 1756 }, { "epoch": 14.056, "grad_norm": 29.94041633605957, "learning_rate": 4.778222222222222e-05, "loss": 1.2883, "step": 1757 }, { "epoch": 14.064, "grad_norm": 32.20097732543945, "learning_rate": 4.7777777777777784e-05, "loss": 1.8248, "step": 1758 }, { "epoch": 14.072, "grad_norm": 27.612895965576172, "learning_rate": 4.777333333333334e-05, "loss": 1.3544, "step": 1759 }, { "epoch": 14.08, "grad_norm": 21.103567123413086, "learning_rate": 4.7768888888888894e-05, "loss": 1.3442, "step": 1760 }, { "epoch": 14.088, "grad_norm": 36.64070129394531, "learning_rate": 4.776444444444444e-05, "loss": 1.1478, "step": 1761 }, { "epoch": 14.096, "grad_norm": 32.865806579589844, "learning_rate": 4.7760000000000004e-05, "loss": 1.6941, "step": 1762 }, { "epoch": 14.104, "grad_norm": 36.86145782470703, "learning_rate": 4.775555555555556e-05, "loss": 1.5045, "step": 1763 }, { "epoch": 14.112, "grad_norm": 103.90203094482422, "learning_rate": 4.775111111111111e-05, "loss": 1.6617, "step": 1764 }, { "epoch": 14.12, "grad_norm": 76.14293670654297, "learning_rate": 4.774666666666667e-05, "loss": 1.4009, "step": 1765 }, { "epoch": 14.128, "grad_norm": 29.24789810180664, "learning_rate": 4.774222222222223e-05, "loss": 1.8918, "step": 1766 }, { "epoch": 14.136, "grad_norm": 39.490726470947266, "learning_rate": 4.7737777777777785e-05, "loss": 2.1302, "step": 1767 }, { "epoch": 14.144, "grad_norm": 19.51039695739746, "learning_rate": 4.773333333333333e-05, "loss": 1.6019, "step": 1768 }, { "epoch": 14.152, "grad_norm": 47.6649169921875, "learning_rate": 4.772888888888889e-05, "loss": 1.5631, "step": 1769 }, { "epoch": 14.16, "grad_norm": 51.52498245239258, "learning_rate": 4.772444444444445e-05, "loss": 1.4402, "step": 1770 }, { "epoch": 14.168, "grad_norm": 54.20497131347656, "learning_rate": 4.7720000000000004e-05, "loss": 1.5318, "step": 1771 }, { "epoch": 14.176, "grad_norm": 32.616085052490234, "learning_rate": 4.771555555555556e-05, "loss": 1.4521, "step": 1772 }, { "epoch": 14.184, "grad_norm": 35.062583923339844, "learning_rate": 4.7711111111111114e-05, "loss": 1.5775, "step": 1773 }, { "epoch": 14.192, "grad_norm": 31.57722282409668, "learning_rate": 4.770666666666667e-05, "loss": 1.7916, "step": 1774 }, { "epoch": 14.2, "grad_norm": 64.41719818115234, "learning_rate": 4.770222222222222e-05, "loss": 1.5315, "step": 1775 }, { "epoch": 14.208, "grad_norm": 30.814348220825195, "learning_rate": 4.769777777777778e-05, "loss": 1.3835, "step": 1776 }, { "epoch": 14.216, "grad_norm": 22.646648406982422, "learning_rate": 4.769333333333333e-05, "loss": 1.6493, "step": 1777 }, { "epoch": 14.224, "grad_norm": 800.186767578125, "learning_rate": 4.768888888888889e-05, "loss": 1.29, "step": 1778 }, { "epoch": 14.232, "grad_norm": 21.46826171875, "learning_rate": 4.768444444444445e-05, "loss": 1.4192, "step": 1779 }, { "epoch": 14.24, "grad_norm": 22.948993682861328, "learning_rate": 4.7680000000000004e-05, "loss": 1.3037, "step": 1780 }, { "epoch": 14.248, "grad_norm": 20.038619995117188, "learning_rate": 4.767555555555556e-05, "loss": 1.3121, "step": 1781 }, { "epoch": 14.256, "grad_norm": 19.75214195251465, "learning_rate": 4.7671111111111114e-05, "loss": 2.4302, "step": 1782 }, { "epoch": 14.264, "grad_norm": 17.965240478515625, "learning_rate": 4.766666666666667e-05, "loss": 1.2185, "step": 1783 }, { "epoch": 14.272, "grad_norm": 26.451295852661133, "learning_rate": 4.7662222222222224e-05, "loss": 2.0092, "step": 1784 }, { "epoch": 14.28, "grad_norm": 52.59686279296875, "learning_rate": 4.765777777777778e-05, "loss": 1.2587, "step": 1785 }, { "epoch": 14.288, "grad_norm": 25.648977279663086, "learning_rate": 4.765333333333333e-05, "loss": 1.478, "step": 1786 }, { "epoch": 14.296, "grad_norm": 16.052011489868164, "learning_rate": 4.7648888888888895e-05, "loss": 1.3318, "step": 1787 }, { "epoch": 14.304, "grad_norm": 40.11272430419922, "learning_rate": 4.764444444444445e-05, "loss": 1.6632, "step": 1788 }, { "epoch": 14.312, "grad_norm": 33.369041442871094, "learning_rate": 4.7640000000000005e-05, "loss": 1.3675, "step": 1789 }, { "epoch": 14.32, "grad_norm": 28.73035430908203, "learning_rate": 4.763555555555555e-05, "loss": 1.8789, "step": 1790 }, { "epoch": 14.328, "grad_norm": 39.8529167175293, "learning_rate": 4.7631111111111114e-05, "loss": 1.1815, "step": 1791 }, { "epoch": 14.336, "grad_norm": 38.078514099121094, "learning_rate": 4.762666666666667e-05, "loss": 1.2475, "step": 1792 }, { "epoch": 14.344, "grad_norm": 21.06952667236328, "learning_rate": 4.7622222222222224e-05, "loss": 1.0046, "step": 1793 }, { "epoch": 14.352, "grad_norm": 34.72304153442383, "learning_rate": 4.761777777777778e-05, "loss": 1.2327, "step": 1794 }, { "epoch": 14.36, "grad_norm": 24.42037010192871, "learning_rate": 4.761333333333334e-05, "loss": 1.0674, "step": 1795 }, { "epoch": 14.368, "grad_norm": 22.495784759521484, "learning_rate": 4.760888888888889e-05, "loss": 1.2335, "step": 1796 }, { "epoch": 14.376, "grad_norm": 46.68667221069336, "learning_rate": 4.7604444444444443e-05, "loss": 1.2831, "step": 1797 }, { "epoch": 14.384, "grad_norm": 23.316082000732422, "learning_rate": 4.76e-05, "loss": 1.5726, "step": 1798 }, { "epoch": 14.392, "grad_norm": 27.320165634155273, "learning_rate": 4.759555555555556e-05, "loss": 1.2985, "step": 1799 }, { "epoch": 14.4, "grad_norm": 69.9921646118164, "learning_rate": 4.7591111111111115e-05, "loss": 1.553, "step": 1800 }, { "epoch": 14.408, "grad_norm": 32.44914627075195, "learning_rate": 4.758666666666667e-05, "loss": 1.4739, "step": 1801 }, { "epoch": 14.416, "grad_norm": 28.221710205078125, "learning_rate": 4.7582222222222224e-05, "loss": 1.4744, "step": 1802 }, { "epoch": 14.424, "grad_norm": 39.04890060424805, "learning_rate": 4.757777777777778e-05, "loss": 1.5148, "step": 1803 }, { "epoch": 14.432, "grad_norm": 56.7842903137207, "learning_rate": 4.7573333333333334e-05, "loss": 1.5032, "step": 1804 }, { "epoch": 14.44, "grad_norm": 25.166400909423828, "learning_rate": 4.756888888888889e-05, "loss": 1.3978, "step": 1805 }, { "epoch": 14.448, "grad_norm": 20.968774795532227, "learning_rate": 4.7564444444444444e-05, "loss": 1.6877, "step": 1806 }, { "epoch": 14.456, "grad_norm": 29.321388244628906, "learning_rate": 4.7560000000000005e-05, "loss": 1.759, "step": 1807 }, { "epoch": 14.464, "grad_norm": 52.926780700683594, "learning_rate": 4.755555555555556e-05, "loss": 1.4599, "step": 1808 }, { "epoch": 14.472, "grad_norm": 27.287792205810547, "learning_rate": 4.7551111111111115e-05, "loss": 1.1091, "step": 1809 }, { "epoch": 14.48, "grad_norm": 38.4724235534668, "learning_rate": 4.754666666666667e-05, "loss": 0.9534, "step": 1810 }, { "epoch": 14.488, "grad_norm": 27.501110076904297, "learning_rate": 4.7542222222222225e-05, "loss": 1.6909, "step": 1811 }, { "epoch": 14.496, "grad_norm": 38.733238220214844, "learning_rate": 4.753777777777778e-05, "loss": 1.6546, "step": 1812 }, { "epoch": 14.504, "grad_norm": 33.717411041259766, "learning_rate": 4.7533333333333334e-05, "loss": 1.5763, "step": 1813 }, { "epoch": 14.512, "grad_norm": 32.83949661254883, "learning_rate": 4.752888888888889e-05, "loss": 3.1909, "step": 1814 }, { "epoch": 14.52, "grad_norm": 38.565433502197266, "learning_rate": 4.752444444444445e-05, "loss": 1.5462, "step": 1815 }, { "epoch": 14.528, "grad_norm": 22.04370880126953, "learning_rate": 4.7520000000000006e-05, "loss": 1.5097, "step": 1816 }, { "epoch": 14.536, "grad_norm": 43.36502456665039, "learning_rate": 4.751555555555556e-05, "loss": 1.5834, "step": 1817 }, { "epoch": 14.544, "grad_norm": 24.888824462890625, "learning_rate": 4.751111111111111e-05, "loss": 1.1879, "step": 1818 }, { "epoch": 14.552, "grad_norm": 26.70519256591797, "learning_rate": 4.750666666666667e-05, "loss": 1.1602, "step": 1819 }, { "epoch": 14.56, "grad_norm": 40.67252731323242, "learning_rate": 4.7502222222222225e-05, "loss": 1.7317, "step": 1820 }, { "epoch": 14.568, "grad_norm": 117.2452163696289, "learning_rate": 4.749777777777778e-05, "loss": 1.5917, "step": 1821 }, { "epoch": 14.576, "grad_norm": 58.339073181152344, "learning_rate": 4.7493333333333335e-05, "loss": 1.2916, "step": 1822 }, { "epoch": 14.584, "grad_norm": 53.960445404052734, "learning_rate": 4.7488888888888897e-05, "loss": 1.6689, "step": 1823 }, { "epoch": 14.592, "grad_norm": 36.24987030029297, "learning_rate": 4.748444444444445e-05, "loss": 1.324, "step": 1824 }, { "epoch": 14.6, "grad_norm": 37.80342483520508, "learning_rate": 4.748e-05, "loss": 2.023, "step": 1825 }, { "epoch": 14.608, "grad_norm": 65.04106903076172, "learning_rate": 4.7475555555555554e-05, "loss": 1.7329, "step": 1826 }, { "epoch": 14.616, "grad_norm": 27.734251022338867, "learning_rate": 4.747111111111111e-05, "loss": 1.2098, "step": 1827 }, { "epoch": 14.624, "grad_norm": 32.48012161254883, "learning_rate": 4.746666666666667e-05, "loss": 1.4103, "step": 1828 }, { "epoch": 14.632, "grad_norm": 50.91429901123047, "learning_rate": 4.7462222222222226e-05, "loss": 1.4608, "step": 1829 }, { "epoch": 14.64, "grad_norm": 31.36233901977539, "learning_rate": 4.745777777777778e-05, "loss": 1.4744, "step": 1830 }, { "epoch": 14.648, "grad_norm": 32.0705680847168, "learning_rate": 4.7453333333333335e-05, "loss": 1.9371, "step": 1831 }, { "epoch": 14.656, "grad_norm": 26.308332443237305, "learning_rate": 4.744888888888889e-05, "loss": 1.0741, "step": 1832 }, { "epoch": 14.664, "grad_norm": 33.436561584472656, "learning_rate": 4.7444444444444445e-05, "loss": 1.0717, "step": 1833 }, { "epoch": 14.672, "grad_norm": 22.753189086914062, "learning_rate": 4.744e-05, "loss": 1.5816, "step": 1834 }, { "epoch": 14.68, "grad_norm": 30.552322387695312, "learning_rate": 4.7435555555555555e-05, "loss": 1.4559, "step": 1835 }, { "epoch": 14.688, "grad_norm": 34.33586120605469, "learning_rate": 4.7431111111111116e-05, "loss": 1.4852, "step": 1836 }, { "epoch": 14.696, "grad_norm": 74.29656219482422, "learning_rate": 4.742666666666667e-05, "loss": 1.2285, "step": 1837 }, { "epoch": 14.704, "grad_norm": 28.728351593017578, "learning_rate": 4.7422222222222226e-05, "loss": 1.5368, "step": 1838 }, { "epoch": 14.712, "grad_norm": 31.270610809326172, "learning_rate": 4.741777777777778e-05, "loss": 1.8278, "step": 1839 }, { "epoch": 14.72, "grad_norm": 167.32444763183594, "learning_rate": 4.7413333333333336e-05, "loss": 1.3787, "step": 1840 }, { "epoch": 14.728, "grad_norm": 22.189104080200195, "learning_rate": 4.740888888888889e-05, "loss": 1.3887, "step": 1841 }, { "epoch": 14.736, "grad_norm": 24.146469116210938, "learning_rate": 4.7404444444444445e-05, "loss": 1.3297, "step": 1842 }, { "epoch": 14.744, "grad_norm": 26.311948776245117, "learning_rate": 4.74e-05, "loss": 1.4328, "step": 1843 }, { "epoch": 14.752, "grad_norm": 28.303491592407227, "learning_rate": 4.739555555555556e-05, "loss": 1.5934, "step": 1844 }, { "epoch": 14.76, "grad_norm": 172.68238830566406, "learning_rate": 4.739111111111112e-05, "loss": 1.2967, "step": 1845 }, { "epoch": 14.768, "grad_norm": 29.11901092529297, "learning_rate": 4.7386666666666665e-05, "loss": 1.2901, "step": 1846 }, { "epoch": 14.776, "grad_norm": 19.545440673828125, "learning_rate": 4.738222222222222e-05, "loss": 1.8033, "step": 1847 }, { "epoch": 14.784, "grad_norm": 273.1265869140625, "learning_rate": 4.737777777777778e-05, "loss": 1.3374, "step": 1848 }, { "epoch": 14.792, "grad_norm": 23.731124877929688, "learning_rate": 4.7373333333333336e-05, "loss": 1.0247, "step": 1849 }, { "epoch": 14.8, "grad_norm": 35.15464401245117, "learning_rate": 4.736888888888889e-05, "loss": 0.9129, "step": 1850 }, { "epoch": 14.808, "grad_norm": 18.422748565673828, "learning_rate": 4.7364444444444446e-05, "loss": 1.3445, "step": 1851 }, { "epoch": 14.816, "grad_norm": 47.65056228637695, "learning_rate": 4.736000000000001e-05, "loss": 1.3711, "step": 1852 }, { "epoch": 14.824, "grad_norm": 18.387678146362305, "learning_rate": 4.7355555555555555e-05, "loss": 1.2157, "step": 1853 }, { "epoch": 14.832, "grad_norm": 38.93866729736328, "learning_rate": 4.735111111111111e-05, "loss": 1.3911, "step": 1854 }, { "epoch": 14.84, "grad_norm": 34.443267822265625, "learning_rate": 4.7346666666666665e-05, "loss": 1.3996, "step": 1855 }, { "epoch": 14.848, "grad_norm": 34.9578857421875, "learning_rate": 4.734222222222223e-05, "loss": 1.096, "step": 1856 }, { "epoch": 14.856, "grad_norm": 22.251033782958984, "learning_rate": 4.733777777777778e-05, "loss": 1.1664, "step": 1857 }, { "epoch": 14.864, "grad_norm": 23.53314781188965, "learning_rate": 4.7333333333333336e-05, "loss": 1.0976, "step": 1858 }, { "epoch": 14.872, "grad_norm": 34.080650329589844, "learning_rate": 4.732888888888889e-05, "loss": 1.1967, "step": 1859 }, { "epoch": 14.88, "grad_norm": 15.870540618896484, "learning_rate": 4.7324444444444446e-05, "loss": 1.2319, "step": 1860 }, { "epoch": 14.888, "grad_norm": 29.644472122192383, "learning_rate": 4.732e-05, "loss": 1.4511, "step": 1861 }, { "epoch": 14.896, "grad_norm": 48.224300384521484, "learning_rate": 4.7315555555555556e-05, "loss": 1.1982, "step": 1862 }, { "epoch": 14.904, "grad_norm": 33.95867156982422, "learning_rate": 4.731111111111111e-05, "loss": 1.2693, "step": 1863 }, { "epoch": 14.912, "grad_norm": 17.74903678894043, "learning_rate": 4.730666666666667e-05, "loss": 1.0702, "step": 1864 }, { "epoch": 14.92, "grad_norm": 51.66568374633789, "learning_rate": 4.730222222222223e-05, "loss": 1.2159, "step": 1865 }, { "epoch": 14.928, "grad_norm": 21.401660919189453, "learning_rate": 4.729777777777778e-05, "loss": 1.3345, "step": 1866 }, { "epoch": 14.936, "grad_norm": 21.409847259521484, "learning_rate": 4.729333333333334e-05, "loss": 2.6696, "step": 1867 }, { "epoch": 14.943999999999999, "grad_norm": 17.156381607055664, "learning_rate": 4.728888888888889e-05, "loss": 1.8432, "step": 1868 }, { "epoch": 14.952, "grad_norm": 27.49476432800293, "learning_rate": 4.7284444444444446e-05, "loss": 1.0008, "step": 1869 }, { "epoch": 14.96, "grad_norm": 19.587970733642578, "learning_rate": 4.728e-05, "loss": 1.2576, "step": 1870 }, { "epoch": 14.968, "grad_norm": 36.38450241088867, "learning_rate": 4.7275555555555556e-05, "loss": 1.3192, "step": 1871 }, { "epoch": 14.975999999999999, "grad_norm": 30.696157455444336, "learning_rate": 4.727111111111112e-05, "loss": 2.5509, "step": 1872 }, { "epoch": 14.984, "grad_norm": 64.8305435180664, "learning_rate": 4.726666666666667e-05, "loss": 1.0904, "step": 1873 }, { "epoch": 14.992, "grad_norm": 52.119956970214844, "learning_rate": 4.726222222222223e-05, "loss": 1.1879, "step": 1874 }, { "epoch": 15.0, "grad_norm": 25.334299087524414, "learning_rate": 4.7257777777777776e-05, "loss": 1.7608, "step": 1875 }, { "epoch": 15.0, "eval_loss": 1.4879372119903564, "eval_map": 0.2349, "eval_map_50": 0.4973, "eval_map_75": 0.1705, "eval_map_Coverall": 0.551, "eval_map_Face_Shield": 0.2384, "eval_map_Gloves": 0.1425, "eval_map_Goggles": 0.0631, "eval_map_Mask": 0.1797, "eval_map_large": 0.2687, "eval_map_medium": 0.0923, "eval_map_small": -1.0, "eval_mar_1": 0.2172, "eval_mar_10": 0.4041, "eval_mar_100": 0.4142, "eval_mar_100_Coverall": 0.7267, "eval_mar_100_Face_Shield": 0.5294, "eval_mar_100_Gloves": 0.3082, "eval_mar_100_Goggles": 0.2375, "eval_mar_100_Mask": 0.2692, "eval_mar_large": 0.5035, "eval_mar_medium": 0.1475, "eval_mar_small": -1.0, "eval_runtime": 5.0946, "eval_samples_per_second": 5.692, "eval_steps_per_second": 0.393, "step": 1875 }, { "epoch": 15.008, "grad_norm": 15.574034690856934, "learning_rate": 4.725333333333334e-05, "loss": 2.9937, "step": 1876 }, { "epoch": 15.016, "grad_norm": 22.74445343017578, "learning_rate": 4.724888888888889e-05, "loss": 1.3478, "step": 1877 }, { "epoch": 15.024, "grad_norm": 23.429929733276367, "learning_rate": 4.724444444444445e-05, "loss": 1.6096, "step": 1878 }, { "epoch": 15.032, "grad_norm": 38.980587005615234, "learning_rate": 4.724e-05, "loss": 1.085, "step": 1879 }, { "epoch": 15.04, "grad_norm": 35.1064338684082, "learning_rate": 4.7235555555555557e-05, "loss": 1.5794, "step": 1880 }, { "epoch": 15.048, "grad_norm": 18.645170211791992, "learning_rate": 4.723111111111112e-05, "loss": 1.0365, "step": 1881 }, { "epoch": 15.056, "grad_norm": 36.819313049316406, "learning_rate": 4.7226666666666666e-05, "loss": 1.3176, "step": 1882 }, { "epoch": 15.064, "grad_norm": 36.22188186645508, "learning_rate": 4.722222222222222e-05, "loss": 1.4879, "step": 1883 }, { "epoch": 15.072, "grad_norm": 24.519567489624023, "learning_rate": 4.7217777777777776e-05, "loss": 1.7994, "step": 1884 }, { "epoch": 15.08, "grad_norm": 48.881996154785156, "learning_rate": 4.721333333333334e-05, "loss": 1.5046, "step": 1885 }, { "epoch": 15.088, "grad_norm": 45.11880111694336, "learning_rate": 4.720888888888889e-05, "loss": 1.5922, "step": 1886 }, { "epoch": 15.096, "grad_norm": 26.052837371826172, "learning_rate": 4.720444444444445e-05, "loss": 1.2358, "step": 1887 }, { "epoch": 15.104, "grad_norm": 30.803754806518555, "learning_rate": 4.72e-05, "loss": 1.2541, "step": 1888 }, { "epoch": 15.112, "grad_norm": 23.054244995117188, "learning_rate": 4.719555555555556e-05, "loss": 1.211, "step": 1889 }, { "epoch": 15.12, "grad_norm": 24.396697998046875, "learning_rate": 4.719111111111111e-05, "loss": 1.3066, "step": 1890 }, { "epoch": 15.128, "grad_norm": 25.405920028686523, "learning_rate": 4.718666666666667e-05, "loss": 1.2791, "step": 1891 }, { "epoch": 15.136, "grad_norm": 70.18645477294922, "learning_rate": 4.718222222222222e-05, "loss": 1.0985, "step": 1892 }, { "epoch": 15.144, "grad_norm": 54.076271057128906, "learning_rate": 4.717777777777778e-05, "loss": 1.6421, "step": 1893 }, { "epoch": 15.152, "grad_norm": 75.11385345458984, "learning_rate": 4.717333333333334e-05, "loss": 1.4633, "step": 1894 }, { "epoch": 15.16, "grad_norm": 21.34485626220703, "learning_rate": 4.716888888888889e-05, "loss": 1.2564, "step": 1895 }, { "epoch": 15.168, "grad_norm": 46.58147048950195, "learning_rate": 4.716444444444445e-05, "loss": 1.041, "step": 1896 }, { "epoch": 15.176, "grad_norm": 21.670557022094727, "learning_rate": 4.716e-05, "loss": 1.6804, "step": 1897 }, { "epoch": 15.184, "grad_norm": 26.75288963317871, "learning_rate": 4.715555555555556e-05, "loss": 1.2016, "step": 1898 }, { "epoch": 15.192, "grad_norm": 20.371196746826172, "learning_rate": 4.715111111111111e-05, "loss": 1.3763, "step": 1899 }, { "epoch": 15.2, "grad_norm": 22.351768493652344, "learning_rate": 4.714666666666667e-05, "loss": 1.0805, "step": 1900 }, { "epoch": 15.208, "grad_norm": 26.334060668945312, "learning_rate": 4.714222222222223e-05, "loss": 1.4887, "step": 1901 }, { "epoch": 15.216, "grad_norm": 25.447694778442383, "learning_rate": 4.7137777777777783e-05, "loss": 1.1998, "step": 1902 }, { "epoch": 15.224, "grad_norm": 33.11394500732422, "learning_rate": 4.713333333333333e-05, "loss": 1.7239, "step": 1903 }, { "epoch": 15.232, "grad_norm": 23.61109161376953, "learning_rate": 4.7128888888888886e-05, "loss": 1.686, "step": 1904 }, { "epoch": 15.24, "grad_norm": 22.321460723876953, "learning_rate": 4.712444444444445e-05, "loss": 1.2261, "step": 1905 }, { "epoch": 15.248, "grad_norm": 19.35871696472168, "learning_rate": 4.712e-05, "loss": 1.4893, "step": 1906 }, { "epoch": 15.256, "grad_norm": 18.072656631469727, "learning_rate": 4.711555555555556e-05, "loss": 1.4322, "step": 1907 }, { "epoch": 15.264, "grad_norm": 37.10190200805664, "learning_rate": 4.711111111111111e-05, "loss": 1.5858, "step": 1908 }, { "epoch": 15.272, "grad_norm": 48.993072509765625, "learning_rate": 4.7106666666666674e-05, "loss": 1.3985, "step": 1909 }, { "epoch": 15.28, "grad_norm": 73.19894409179688, "learning_rate": 4.710222222222222e-05, "loss": 1.2154, "step": 1910 }, { "epoch": 15.288, "grad_norm": 30.761159896850586, "learning_rate": 4.709777777777778e-05, "loss": 1.3692, "step": 1911 }, { "epoch": 15.296, "grad_norm": 26.049713134765625, "learning_rate": 4.709333333333333e-05, "loss": 1.2155, "step": 1912 }, { "epoch": 15.304, "grad_norm": 20.253314971923828, "learning_rate": 4.7088888888888894e-05, "loss": 1.6575, "step": 1913 }, { "epoch": 15.312, "grad_norm": 36.796722412109375, "learning_rate": 4.708444444444445e-05, "loss": 1.3215, "step": 1914 }, { "epoch": 15.32, "grad_norm": 24.541576385498047, "learning_rate": 4.708e-05, "loss": 1.3333, "step": 1915 }, { "epoch": 15.328, "grad_norm": 25.014474868774414, "learning_rate": 4.707555555555556e-05, "loss": 1.3172, "step": 1916 }, { "epoch": 15.336, "grad_norm": 40.15852355957031, "learning_rate": 4.707111111111111e-05, "loss": 1.6126, "step": 1917 }, { "epoch": 15.344, "grad_norm": 32.24897384643555, "learning_rate": 4.706666666666667e-05, "loss": 1.844, "step": 1918 }, { "epoch": 15.352, "grad_norm": 37.20631408691406, "learning_rate": 4.706222222222222e-05, "loss": 1.3133, "step": 1919 }, { "epoch": 15.36, "grad_norm": 58.931419372558594, "learning_rate": 4.705777777777778e-05, "loss": 0.9977, "step": 1920 }, { "epoch": 15.368, "grad_norm": 36.718658447265625, "learning_rate": 4.705333333333334e-05, "loss": 1.1394, "step": 1921 }, { "epoch": 15.376, "grad_norm": 42.108543395996094, "learning_rate": 4.7048888888888894e-05, "loss": 1.7193, "step": 1922 }, { "epoch": 15.384, "grad_norm": 42.76636505126953, "learning_rate": 4.704444444444445e-05, "loss": 1.1155, "step": 1923 }, { "epoch": 15.392, "grad_norm": 18.346710205078125, "learning_rate": 4.7040000000000004e-05, "loss": 1.4608, "step": 1924 }, { "epoch": 15.4, "grad_norm": 29.987812042236328, "learning_rate": 4.703555555555556e-05, "loss": 1.0995, "step": 1925 }, { "epoch": 15.408, "grad_norm": 22.691455841064453, "learning_rate": 4.703111111111111e-05, "loss": 1.0794, "step": 1926 }, { "epoch": 15.416, "grad_norm": 22.829893112182617, "learning_rate": 4.702666666666667e-05, "loss": 1.2055, "step": 1927 }, { "epoch": 15.424, "grad_norm": 49.62306213378906, "learning_rate": 4.702222222222222e-05, "loss": 1.2507, "step": 1928 }, { "epoch": 15.432, "grad_norm": 539.6380615234375, "learning_rate": 4.701777777777778e-05, "loss": 1.542, "step": 1929 }, { "epoch": 15.44, "grad_norm": 19.526586532592773, "learning_rate": 4.701333333333334e-05, "loss": 1.6128, "step": 1930 }, { "epoch": 15.448, "grad_norm": 76.19386291503906, "learning_rate": 4.7008888888888894e-05, "loss": 1.478, "step": 1931 }, { "epoch": 15.456, "grad_norm": 59.096473693847656, "learning_rate": 4.700444444444444e-05, "loss": 1.8579, "step": 1932 }, { "epoch": 15.464, "grad_norm": 36.42529296875, "learning_rate": 4.7e-05, "loss": 1.047, "step": 1933 }, { "epoch": 15.472, "grad_norm": 24.325971603393555, "learning_rate": 4.699555555555556e-05, "loss": 1.4219, "step": 1934 }, { "epoch": 15.48, "grad_norm": 58.047393798828125, "learning_rate": 4.6991111111111114e-05, "loss": 1.2309, "step": 1935 }, { "epoch": 15.488, "grad_norm": 25.881946563720703, "learning_rate": 4.698666666666667e-05, "loss": 1.2389, "step": 1936 }, { "epoch": 15.496, "grad_norm": 33.096893310546875, "learning_rate": 4.6982222222222223e-05, "loss": 1.608, "step": 1937 }, { "epoch": 15.504, "grad_norm": 32.763370513916016, "learning_rate": 4.6977777777777785e-05, "loss": 1.2743, "step": 1938 }, { "epoch": 15.512, "grad_norm": 26.600425720214844, "learning_rate": 4.697333333333333e-05, "loss": 0.7245, "step": 1939 }, { "epoch": 15.52, "grad_norm": 28.827957153320312, "learning_rate": 4.696888888888889e-05, "loss": 1.6928, "step": 1940 }, { "epoch": 15.528, "grad_norm": 59.81801223754883, "learning_rate": 4.696444444444444e-05, "loss": 1.359, "step": 1941 }, { "epoch": 15.536, "grad_norm": 85.19742584228516, "learning_rate": 4.6960000000000004e-05, "loss": 1.3947, "step": 1942 }, { "epoch": 15.544, "grad_norm": 75.34698486328125, "learning_rate": 4.695555555555556e-05, "loss": 1.5642, "step": 1943 }, { "epoch": 15.552, "grad_norm": 62.839351654052734, "learning_rate": 4.6951111111111114e-05, "loss": 1.1139, "step": 1944 }, { "epoch": 15.56, "grad_norm": 39.36369323730469, "learning_rate": 4.694666666666667e-05, "loss": 1.6308, "step": 1945 }, { "epoch": 15.568, "grad_norm": 39.02461624145508, "learning_rate": 4.6942222222222224e-05, "loss": 1.9212, "step": 1946 }, { "epoch": 15.576, "grad_norm": 42.21596908569336, "learning_rate": 4.693777777777778e-05, "loss": 1.8613, "step": 1947 }, { "epoch": 15.584, "grad_norm": 168.50909423828125, "learning_rate": 4.6933333333333333e-05, "loss": 1.1273, "step": 1948 }, { "epoch": 15.592, "grad_norm": 24.385343551635742, "learning_rate": 4.692888888888889e-05, "loss": 2.6929, "step": 1949 }, { "epoch": 15.6, "grad_norm": 28.338693618774414, "learning_rate": 4.692444444444445e-05, "loss": 1.636, "step": 1950 }, { "epoch": 15.608, "grad_norm": 30.4067325592041, "learning_rate": 4.6920000000000005e-05, "loss": 1.3809, "step": 1951 }, { "epoch": 15.616, "grad_norm": 37.749996185302734, "learning_rate": 4.691555555555556e-05, "loss": 1.3311, "step": 1952 }, { "epoch": 15.624, "grad_norm": 50.89385223388672, "learning_rate": 4.6911111111111114e-05, "loss": 1.7512, "step": 1953 }, { "epoch": 15.632, "grad_norm": 26.39379119873047, "learning_rate": 4.690666666666667e-05, "loss": 1.4732, "step": 1954 }, { "epoch": 15.64, "grad_norm": 30.166221618652344, "learning_rate": 4.6902222222222224e-05, "loss": 1.1877, "step": 1955 }, { "epoch": 15.648, "grad_norm": 77.93148803710938, "learning_rate": 4.689777777777778e-05, "loss": 1.7026, "step": 1956 }, { "epoch": 15.656, "grad_norm": 48.1814079284668, "learning_rate": 4.6893333333333334e-05, "loss": 1.2915, "step": 1957 }, { "epoch": 15.664, "grad_norm": 42.28075408935547, "learning_rate": 4.6888888888888895e-05, "loss": 1.2768, "step": 1958 }, { "epoch": 15.672, "grad_norm": 59.05986785888672, "learning_rate": 4.688444444444445e-05, "loss": 1.5943, "step": 1959 }, { "epoch": 15.68, "grad_norm": 50.804412841796875, "learning_rate": 4.688e-05, "loss": 1.4731, "step": 1960 }, { "epoch": 15.688, "grad_norm": 25.72974967956543, "learning_rate": 4.687555555555555e-05, "loss": 1.343, "step": 1961 }, { "epoch": 15.696, "grad_norm": 26.76974868774414, "learning_rate": 4.6871111111111115e-05, "loss": 1.3275, "step": 1962 }, { "epoch": 15.704, "grad_norm": 34.08677291870117, "learning_rate": 4.686666666666667e-05, "loss": 0.9825, "step": 1963 }, { "epoch": 15.712, "grad_norm": 33.09025192260742, "learning_rate": 4.6862222222222225e-05, "loss": 2.2742, "step": 1964 }, { "epoch": 15.72, "grad_norm": 42.920616149902344, "learning_rate": 4.685777777777778e-05, "loss": 1.0112, "step": 1965 }, { "epoch": 15.728, "grad_norm": 109.45634460449219, "learning_rate": 4.685333333333334e-05, "loss": 1.7919, "step": 1966 }, { "epoch": 15.736, "grad_norm": 37.0184326171875, "learning_rate": 4.684888888888889e-05, "loss": 1.6512, "step": 1967 }, { "epoch": 15.744, "grad_norm": 28.781312942504883, "learning_rate": 4.6844444444444444e-05, "loss": 1.167, "step": 1968 }, { "epoch": 15.752, "grad_norm": 30.612897872924805, "learning_rate": 4.684e-05, "loss": 1.6579, "step": 1969 }, { "epoch": 15.76, "grad_norm": 58.85970687866211, "learning_rate": 4.683555555555556e-05, "loss": 2.6481, "step": 1970 }, { "epoch": 15.768, "grad_norm": 26.251840591430664, "learning_rate": 4.6831111111111115e-05, "loss": 1.7424, "step": 1971 }, { "epoch": 15.776, "grad_norm": 26.084041595458984, "learning_rate": 4.682666666666667e-05, "loss": 1.4521, "step": 1972 }, { "epoch": 15.784, "grad_norm": 32.04871368408203, "learning_rate": 4.6822222222222225e-05, "loss": 1.1517, "step": 1973 }, { "epoch": 15.792, "grad_norm": 36.613677978515625, "learning_rate": 4.681777777777778e-05, "loss": 1.5034, "step": 1974 }, { "epoch": 15.8, "grad_norm": 44.03630065917969, "learning_rate": 4.6813333333333335e-05, "loss": 1.7677, "step": 1975 }, { "epoch": 15.808, "grad_norm": 38.14585876464844, "learning_rate": 4.680888888888889e-05, "loss": 0.9943, "step": 1976 }, { "epoch": 15.816, "grad_norm": 27.363117218017578, "learning_rate": 4.6804444444444444e-05, "loss": 1.0661, "step": 1977 }, { "epoch": 15.824, "grad_norm": 20.24018096923828, "learning_rate": 4.6800000000000006e-05, "loss": 1.8013, "step": 1978 }, { "epoch": 15.832, "grad_norm": 43.16933822631836, "learning_rate": 4.679555555555556e-05, "loss": 1.8783, "step": 1979 }, { "epoch": 15.84, "grad_norm": 38.081886291503906, "learning_rate": 4.6791111111111116e-05, "loss": 1.3547, "step": 1980 }, { "epoch": 15.848, "grad_norm": 66.78762817382812, "learning_rate": 4.678666666666667e-05, "loss": 1.1795, "step": 1981 }, { "epoch": 15.856, "grad_norm": 38.29312515258789, "learning_rate": 4.678222222222222e-05, "loss": 1.2578, "step": 1982 }, { "epoch": 15.864, "grad_norm": 41.797367095947266, "learning_rate": 4.677777777777778e-05, "loss": 1.4447, "step": 1983 }, { "epoch": 15.872, "grad_norm": 43.231937408447266, "learning_rate": 4.6773333333333335e-05, "loss": 1.2967, "step": 1984 }, { "epoch": 15.88, "grad_norm": 19.414093017578125, "learning_rate": 4.676888888888889e-05, "loss": 1.1974, "step": 1985 }, { "epoch": 15.888, "grad_norm": 40.66779327392578, "learning_rate": 4.6764444444444445e-05, "loss": 1.3983, "step": 1986 }, { "epoch": 15.896, "grad_norm": 26.405973434448242, "learning_rate": 4.6760000000000006e-05, "loss": 1.3954, "step": 1987 }, { "epoch": 15.904, "grad_norm": 25.496784210205078, "learning_rate": 4.675555555555556e-05, "loss": 1.7672, "step": 1988 }, { "epoch": 15.912, "grad_norm": 24.54131507873535, "learning_rate": 4.675111111111111e-05, "loss": 1.4717, "step": 1989 }, { "epoch": 15.92, "grad_norm": 42.83162307739258, "learning_rate": 4.6746666666666664e-05, "loss": 1.0555, "step": 1990 }, { "epoch": 15.928, "grad_norm": 25.14067840576172, "learning_rate": 4.6742222222222226e-05, "loss": 1.5392, "step": 1991 }, { "epoch": 15.936, "grad_norm": 24.597490310668945, "learning_rate": 4.673777777777778e-05, "loss": 1.6909, "step": 1992 }, { "epoch": 15.943999999999999, "grad_norm": 35.442447662353516, "learning_rate": 4.6733333333333335e-05, "loss": 1.2474, "step": 1993 }, { "epoch": 15.952, "grad_norm": 33.84844207763672, "learning_rate": 4.672888888888889e-05, "loss": 1.8648, "step": 1994 }, { "epoch": 15.96, "grad_norm": 42.77531051635742, "learning_rate": 4.672444444444445e-05, "loss": 0.8676, "step": 1995 }, { "epoch": 15.968, "grad_norm": 21.61330223083496, "learning_rate": 4.672e-05, "loss": 1.1125, "step": 1996 }, { "epoch": 15.975999999999999, "grad_norm": 47.06346130371094, "learning_rate": 4.6715555555555555e-05, "loss": 1.8615, "step": 1997 }, { "epoch": 15.984, "grad_norm": 24.25872039794922, "learning_rate": 4.671111111111111e-05, "loss": 1.2596, "step": 1998 }, { "epoch": 15.992, "grad_norm": 78.8768539428711, "learning_rate": 4.670666666666667e-05, "loss": 1.2928, "step": 1999 }, { "epoch": 16.0, "grad_norm": 110.39242553710938, "learning_rate": 4.6702222222222226e-05, "loss": 1.5319, "step": 2000 }, { "epoch": 16.0, "eval_loss": 1.5054142475128174, "eval_map": 0.2578, "eval_map_50": 0.5013, "eval_map_75": 0.2388, "eval_map_Coverall": 0.5264, "eval_map_Face_Shield": 0.2072, "eval_map_Gloves": 0.2109, "eval_map_Goggles": 0.0452, "eval_map_Mask": 0.2991, "eval_map_large": 0.2813, "eval_map_medium": 0.1327, "eval_map_small": -1.0, "eval_mar_1": 0.2336, "eval_mar_10": 0.4547, "eval_mar_100": 0.4825, "eval_mar_100_Coverall": 0.7, "eval_mar_100_Face_Shield": 0.6529, "eval_mar_100_Gloves": 0.3475, "eval_mar_100_Goggles": 0.2969, "eval_mar_100_Mask": 0.4154, "eval_mar_large": 0.5511, "eval_mar_medium": 0.2245, "eval_mar_small": -1.0, "eval_runtime": 3.7967, "eval_samples_per_second": 7.638, "eval_steps_per_second": 0.527, "step": 2000 }, { "epoch": 16.008, "grad_norm": 45.15663146972656, "learning_rate": 4.669777777777778e-05, "loss": 1.2615, "step": 2001 }, { "epoch": 16.016, "grad_norm": 28.435338973999023, "learning_rate": 4.6693333333333336e-05, "loss": 1.0795, "step": 2002 }, { "epoch": 16.024, "grad_norm": 31.308496475219727, "learning_rate": 4.668888888888889e-05, "loss": 1.4544, "step": 2003 }, { "epoch": 16.032, "grad_norm": 35.38277053833008, "learning_rate": 4.6684444444444445e-05, "loss": 2.2783, "step": 2004 }, { "epoch": 16.04, "grad_norm": 48.23637771606445, "learning_rate": 4.668e-05, "loss": 1.4467, "step": 2005 }, { "epoch": 16.048, "grad_norm": 30.234214782714844, "learning_rate": 4.6675555555555555e-05, "loss": 1.3046, "step": 2006 }, { "epoch": 16.056, "grad_norm": 48.4248161315918, "learning_rate": 4.667111111111112e-05, "loss": 1.2193, "step": 2007 }, { "epoch": 16.064, "grad_norm": 25.00201416015625, "learning_rate": 4.666666666666667e-05, "loss": 1.2043, "step": 2008 }, { "epoch": 16.072, "grad_norm": 40.88947296142578, "learning_rate": 4.6662222222222226e-05, "loss": 1.5298, "step": 2009 }, { "epoch": 16.08, "grad_norm": 40.145347595214844, "learning_rate": 4.665777777777778e-05, "loss": 1.4615, "step": 2010 }, { "epoch": 16.088, "grad_norm": 45.862361907958984, "learning_rate": 4.6653333333333336e-05, "loss": 1.281, "step": 2011 }, { "epoch": 16.096, "grad_norm": 39.44308853149414, "learning_rate": 4.664888888888889e-05, "loss": 1.3647, "step": 2012 }, { "epoch": 16.104, "grad_norm": 60.0455207824707, "learning_rate": 4.6644444444444446e-05, "loss": 1.415, "step": 2013 }, { "epoch": 16.112, "grad_norm": 48.034263610839844, "learning_rate": 4.664e-05, "loss": 1.1685, "step": 2014 }, { "epoch": 16.12, "grad_norm": 55.12480545043945, "learning_rate": 4.663555555555556e-05, "loss": 1.3277, "step": 2015 }, { "epoch": 16.128, "grad_norm": 18.60493278503418, "learning_rate": 4.663111111111112e-05, "loss": 0.9488, "step": 2016 }, { "epoch": 16.136, "grad_norm": 22.46178436279297, "learning_rate": 4.6626666666666665e-05, "loss": 1.3409, "step": 2017 }, { "epoch": 16.144, "grad_norm": 24.773807525634766, "learning_rate": 4.662222222222222e-05, "loss": 1.5967, "step": 2018 }, { "epoch": 16.152, "grad_norm": 31.493928909301758, "learning_rate": 4.661777777777778e-05, "loss": 1.3867, "step": 2019 }, { "epoch": 16.16, "grad_norm": 28.245956420898438, "learning_rate": 4.6613333333333337e-05, "loss": 1.4417, "step": 2020 }, { "epoch": 16.168, "grad_norm": 114.4273452758789, "learning_rate": 4.660888888888889e-05, "loss": 1.4089, "step": 2021 }, { "epoch": 16.176, "grad_norm": 31.558944702148438, "learning_rate": 4.6604444444444446e-05, "loss": 1.3585, "step": 2022 }, { "epoch": 16.184, "grad_norm": 24.028406143188477, "learning_rate": 4.660000000000001e-05, "loss": 1.1966, "step": 2023 }, { "epoch": 16.192, "grad_norm": 21.361785888671875, "learning_rate": 4.6595555555555556e-05, "loss": 1.2616, "step": 2024 }, { "epoch": 16.2, "grad_norm": 39.134986877441406, "learning_rate": 4.659111111111111e-05, "loss": 1.8396, "step": 2025 }, { "epoch": 16.208, "grad_norm": 29.546010971069336, "learning_rate": 4.6586666666666666e-05, "loss": 2.3079, "step": 2026 }, { "epoch": 16.216, "grad_norm": 37.5776252746582, "learning_rate": 4.658222222222223e-05, "loss": 1.0744, "step": 2027 }, { "epoch": 16.224, "grad_norm": 42.72591781616211, "learning_rate": 4.657777777777778e-05, "loss": 1.183, "step": 2028 }, { "epoch": 16.232, "grad_norm": 23.949249267578125, "learning_rate": 4.657333333333334e-05, "loss": 1.1455, "step": 2029 }, { "epoch": 16.24, "grad_norm": 21.73378562927246, "learning_rate": 4.656888888888889e-05, "loss": 2.0539, "step": 2030 }, { "epoch": 16.248, "grad_norm": 24.56477928161621, "learning_rate": 4.6564444444444447e-05, "loss": 1.131, "step": 2031 }, { "epoch": 16.256, "grad_norm": 35.882198333740234, "learning_rate": 4.656e-05, "loss": 1.5885, "step": 2032 }, { "epoch": 16.264, "grad_norm": 29.386661529541016, "learning_rate": 4.6555555555555556e-05, "loss": 1.469, "step": 2033 }, { "epoch": 16.272, "grad_norm": 48.664886474609375, "learning_rate": 4.655111111111111e-05, "loss": 1.7141, "step": 2034 }, { "epoch": 16.28, "grad_norm": 59.51313781738281, "learning_rate": 4.6546666666666666e-05, "loss": 1.0762, "step": 2035 }, { "epoch": 16.288, "grad_norm": 34.75484085083008, "learning_rate": 4.654222222222223e-05, "loss": 1.5854, "step": 2036 }, { "epoch": 16.296, "grad_norm": 20.721771240234375, "learning_rate": 4.653777777777778e-05, "loss": 1.2344, "step": 2037 }, { "epoch": 16.304, "grad_norm": 28.101261138916016, "learning_rate": 4.653333333333334e-05, "loss": 1.5855, "step": 2038 }, { "epoch": 16.312, "grad_norm": 55.14041519165039, "learning_rate": 4.6528888888888885e-05, "loss": 0.6747, "step": 2039 }, { "epoch": 16.32, "grad_norm": 59.44183349609375, "learning_rate": 4.652444444444445e-05, "loss": 1.2922, "step": 2040 }, { "epoch": 16.328, "grad_norm": 31.96394157409668, "learning_rate": 4.652e-05, "loss": 1.2798, "step": 2041 }, { "epoch": 16.336, "grad_norm": 34.213111877441406, "learning_rate": 4.651555555555556e-05, "loss": 1.2118, "step": 2042 }, { "epoch": 16.344, "grad_norm": 27.760557174682617, "learning_rate": 4.651111111111111e-05, "loss": 1.9664, "step": 2043 }, { "epoch": 16.352, "grad_norm": 28.92892074584961, "learning_rate": 4.650666666666667e-05, "loss": 1.3326, "step": 2044 }, { "epoch": 16.36, "grad_norm": 21.396284103393555, "learning_rate": 4.650222222222223e-05, "loss": 1.3527, "step": 2045 }, { "epoch": 16.368, "grad_norm": 36.852294921875, "learning_rate": 4.6497777777777776e-05, "loss": 1.1889, "step": 2046 }, { "epoch": 16.376, "grad_norm": 23.924467086791992, "learning_rate": 4.649333333333333e-05, "loss": 1.5626, "step": 2047 }, { "epoch": 16.384, "grad_norm": 25.419418334960938, "learning_rate": 4.648888888888889e-05, "loss": 1.3636, "step": 2048 }, { "epoch": 16.392, "grad_norm": 41.51591491699219, "learning_rate": 4.648444444444445e-05, "loss": 1.2776, "step": 2049 }, { "epoch": 16.4, "grad_norm": 22.246976852416992, "learning_rate": 4.648e-05, "loss": 1.8042, "step": 2050 }, { "epoch": 16.408, "grad_norm": 63.84178161621094, "learning_rate": 4.647555555555556e-05, "loss": 3.2143, "step": 2051 }, { "epoch": 16.416, "grad_norm": 25.927316665649414, "learning_rate": 4.647111111111111e-05, "loss": 1.001, "step": 2052 }, { "epoch": 16.424, "grad_norm": 21.19340705871582, "learning_rate": 4.646666666666667e-05, "loss": 1.3028, "step": 2053 }, { "epoch": 16.432, "grad_norm": 21.541719436645508, "learning_rate": 4.646222222222222e-05, "loss": 1.0427, "step": 2054 }, { "epoch": 16.44, "grad_norm": 33.537940979003906, "learning_rate": 4.6457777777777776e-05, "loss": 1.1562, "step": 2055 }, { "epoch": 16.448, "grad_norm": 18.166362762451172, "learning_rate": 4.645333333333334e-05, "loss": 1.6039, "step": 2056 }, { "epoch": 16.456, "grad_norm": 29.76205062866211, "learning_rate": 4.644888888888889e-05, "loss": 1.1151, "step": 2057 }, { "epoch": 16.464, "grad_norm": 94.79408264160156, "learning_rate": 4.644444444444445e-05, "loss": 1.212, "step": 2058 }, { "epoch": 16.472, "grad_norm": 27.17621421813965, "learning_rate": 4.644e-05, "loss": 0.9288, "step": 2059 }, { "epoch": 16.48, "grad_norm": 25.890039443969727, "learning_rate": 4.643555555555556e-05, "loss": 1.543, "step": 2060 }, { "epoch": 16.488, "grad_norm": 55.27799987792969, "learning_rate": 4.643111111111111e-05, "loss": 1.5261, "step": 2061 }, { "epoch": 16.496, "grad_norm": 54.912803649902344, "learning_rate": 4.642666666666667e-05, "loss": 1.2142, "step": 2062 }, { "epoch": 16.504, "grad_norm": 81.92450714111328, "learning_rate": 4.642222222222222e-05, "loss": 1.2211, "step": 2063 }, { "epoch": 16.512, "grad_norm": 35.99851989746094, "learning_rate": 4.6417777777777784e-05, "loss": 1.3984, "step": 2064 }, { "epoch": 16.52, "grad_norm": 31.74465560913086, "learning_rate": 4.641333333333334e-05, "loss": 1.1783, "step": 2065 }, { "epoch": 16.528, "grad_norm": 19.804767608642578, "learning_rate": 4.640888888888889e-05, "loss": 1.5911, "step": 2066 }, { "epoch": 16.536, "grad_norm": 40.707908630371094, "learning_rate": 4.640444444444445e-05, "loss": 1.2554, "step": 2067 }, { "epoch": 16.544, "grad_norm": 19.5363712310791, "learning_rate": 4.64e-05, "loss": 1.3291, "step": 2068 }, { "epoch": 16.552, "grad_norm": 21.619274139404297, "learning_rate": 4.639555555555556e-05, "loss": 1.414, "step": 2069 }, { "epoch": 16.56, "grad_norm": 89.72694396972656, "learning_rate": 4.639111111111111e-05, "loss": 1.1384, "step": 2070 }, { "epoch": 16.568, "grad_norm": 29.070104598999023, "learning_rate": 4.638666666666667e-05, "loss": 1.4972, "step": 2071 }, { "epoch": 16.576, "grad_norm": 26.166406631469727, "learning_rate": 4.638222222222223e-05, "loss": 1.15, "step": 2072 }, { "epoch": 16.584, "grad_norm": 29.565467834472656, "learning_rate": 4.6377777777777784e-05, "loss": 1.1847, "step": 2073 }, { "epoch": 16.592, "grad_norm": 15.523707389831543, "learning_rate": 4.637333333333333e-05, "loss": 1.4245, "step": 2074 }, { "epoch": 16.6, "grad_norm": 21.258695602416992, "learning_rate": 4.636888888888889e-05, "loss": 1.6662, "step": 2075 }, { "epoch": 16.608, "grad_norm": 33.81998825073242, "learning_rate": 4.636444444444445e-05, "loss": 1.0698, "step": 2076 }, { "epoch": 16.616, "grad_norm": 28.526639938354492, "learning_rate": 4.636e-05, "loss": 1.0612, "step": 2077 }, { "epoch": 16.624, "grad_norm": 81.02356719970703, "learning_rate": 4.635555555555556e-05, "loss": 1.4003, "step": 2078 }, { "epoch": 16.632, "grad_norm": 23.296937942504883, "learning_rate": 4.635111111111111e-05, "loss": 1.1501, "step": 2079 }, { "epoch": 16.64, "grad_norm": 24.488555908203125, "learning_rate": 4.6346666666666675e-05, "loss": 1.9659, "step": 2080 }, { "epoch": 16.648, "grad_norm": 44.10194396972656, "learning_rate": 4.634222222222222e-05, "loss": 1.5086, "step": 2081 }, { "epoch": 16.656, "grad_norm": 59.39401626586914, "learning_rate": 4.633777777777778e-05, "loss": 1.8754, "step": 2082 }, { "epoch": 16.664, "grad_norm": 41.004234313964844, "learning_rate": 4.633333333333333e-05, "loss": 1.1647, "step": 2083 }, { "epoch": 16.672, "grad_norm": 31.216148376464844, "learning_rate": 4.632888888888889e-05, "loss": 1.235, "step": 2084 }, { "epoch": 16.68, "grad_norm": 27.326839447021484, "learning_rate": 4.632444444444445e-05, "loss": 1.404, "step": 2085 }, { "epoch": 16.688, "grad_norm": 32.29030990600586, "learning_rate": 4.6320000000000004e-05, "loss": 1.3667, "step": 2086 }, { "epoch": 16.696, "grad_norm": 96.6994400024414, "learning_rate": 4.631555555555556e-05, "loss": 1.5774, "step": 2087 }, { "epoch": 16.704, "grad_norm": 35.22106170654297, "learning_rate": 4.6311111111111113e-05, "loss": 1.1957, "step": 2088 }, { "epoch": 16.712, "grad_norm": 19.900550842285156, "learning_rate": 4.630666666666667e-05, "loss": 1.0205, "step": 2089 }, { "epoch": 16.72, "grad_norm": 62.15876388549805, "learning_rate": 4.630222222222222e-05, "loss": 1.2109, "step": 2090 }, { "epoch": 16.728, "grad_norm": 23.75916862487793, "learning_rate": 4.629777777777778e-05, "loss": 1.6066, "step": 2091 }, { "epoch": 16.736, "grad_norm": 18.877408981323242, "learning_rate": 4.629333333333333e-05, "loss": 1.7382, "step": 2092 }, { "epoch": 16.744, "grad_norm": 37.90680694580078, "learning_rate": 4.6288888888888894e-05, "loss": 1.4251, "step": 2093 }, { "epoch": 16.752, "grad_norm": 21.58987045288086, "learning_rate": 4.628444444444445e-05, "loss": 1.2339, "step": 2094 }, { "epoch": 16.76, "grad_norm": 28.526466369628906, "learning_rate": 4.6280000000000004e-05, "loss": 1.1471, "step": 2095 }, { "epoch": 16.768, "grad_norm": 16.3223819732666, "learning_rate": 4.627555555555555e-05, "loss": 1.0764, "step": 2096 }, { "epoch": 16.776, "grad_norm": 20.320274353027344, "learning_rate": 4.6271111111111114e-05, "loss": 1.3616, "step": 2097 }, { "epoch": 16.784, "grad_norm": 19.995094299316406, "learning_rate": 4.626666666666667e-05, "loss": 1.5148, "step": 2098 }, { "epoch": 16.792, "grad_norm": 25.990875244140625, "learning_rate": 4.6262222222222224e-05, "loss": 1.8578, "step": 2099 }, { "epoch": 16.8, "grad_norm": 20.03815269470215, "learning_rate": 4.625777777777778e-05, "loss": 1.5367, "step": 2100 }, { "epoch": 16.808, "grad_norm": 67.40023803710938, "learning_rate": 4.625333333333334e-05, "loss": 1.1472, "step": 2101 }, { "epoch": 16.816, "grad_norm": 29.907917022705078, "learning_rate": 4.6248888888888895e-05, "loss": 1.6426, "step": 2102 }, { "epoch": 16.824, "grad_norm": 40.42079544067383, "learning_rate": 4.624444444444444e-05, "loss": 1.2214, "step": 2103 }, { "epoch": 16.832, "grad_norm": 81.24820709228516, "learning_rate": 4.624e-05, "loss": 1.4515, "step": 2104 }, { "epoch": 16.84, "grad_norm": 28.99948501586914, "learning_rate": 4.623555555555556e-05, "loss": 2.0272, "step": 2105 }, { "epoch": 16.848, "grad_norm": 37.561492919921875, "learning_rate": 4.6231111111111114e-05, "loss": 1.2685, "step": 2106 }, { "epoch": 16.856, "grad_norm": 46.046714782714844, "learning_rate": 4.622666666666667e-05, "loss": 1.139, "step": 2107 }, { "epoch": 16.864, "grad_norm": 45.04513931274414, "learning_rate": 4.6222222222222224e-05, "loss": 1.1526, "step": 2108 }, { "epoch": 16.872, "grad_norm": 57.14850616455078, "learning_rate": 4.621777777777778e-05, "loss": 1.159, "step": 2109 }, { "epoch": 16.88, "grad_norm": 46.4625358581543, "learning_rate": 4.6213333333333334e-05, "loss": 1.488, "step": 2110 }, { "epoch": 16.888, "grad_norm": 30.76515769958496, "learning_rate": 4.620888888888889e-05, "loss": 1.404, "step": 2111 }, { "epoch": 16.896, "grad_norm": 43.740928649902344, "learning_rate": 4.620444444444444e-05, "loss": 1.6585, "step": 2112 }, { "epoch": 16.904, "grad_norm": 127.12969970703125, "learning_rate": 4.6200000000000005e-05, "loss": 1.7363, "step": 2113 }, { "epoch": 16.912, "grad_norm": 23.677539825439453, "learning_rate": 4.619555555555556e-05, "loss": 1.267, "step": 2114 }, { "epoch": 16.92, "grad_norm": 127.18306732177734, "learning_rate": 4.6191111111111115e-05, "loss": 1.3839, "step": 2115 }, { "epoch": 16.928, "grad_norm": 26.687253952026367, "learning_rate": 4.618666666666667e-05, "loss": 1.8335, "step": 2116 }, { "epoch": 16.936, "grad_norm": 23.2551212310791, "learning_rate": 4.6182222222222224e-05, "loss": 1.2673, "step": 2117 }, { "epoch": 16.944, "grad_norm": 29.341976165771484, "learning_rate": 4.617777777777778e-05, "loss": 1.2547, "step": 2118 }, { "epoch": 16.951999999999998, "grad_norm": 33.7431640625, "learning_rate": 4.6173333333333334e-05, "loss": 1.278, "step": 2119 }, { "epoch": 16.96, "grad_norm": 25.081298828125, "learning_rate": 4.616888888888889e-05, "loss": 1.1794, "step": 2120 }, { "epoch": 16.968, "grad_norm": 19.9505558013916, "learning_rate": 4.616444444444445e-05, "loss": 0.9797, "step": 2121 }, { "epoch": 16.976, "grad_norm": 70.70762634277344, "learning_rate": 4.6160000000000005e-05, "loss": 2.3626, "step": 2122 }, { "epoch": 16.984, "grad_norm": 60.020286560058594, "learning_rate": 4.615555555555556e-05, "loss": 1.3482, "step": 2123 }, { "epoch": 16.992, "grad_norm": 51.47914123535156, "learning_rate": 4.6151111111111115e-05, "loss": 1.3537, "step": 2124 }, { "epoch": 17.0, "grad_norm": 50.26940155029297, "learning_rate": 4.614666666666667e-05, "loss": 1.5269, "step": 2125 }, { "epoch": 17.0, "eval_loss": 1.3584022521972656, "eval_map": 0.2612, "eval_map_50": 0.5099, "eval_map_75": 0.2279, "eval_map_Coverall": 0.523, "eval_map_Face_Shield": 0.2531, "eval_map_Gloves": 0.2093, "eval_map_Goggles": 0.0288, "eval_map_Mask": 0.2916, "eval_map_large": 0.295, "eval_map_medium": 0.1202, "eval_map_small": -1.0, "eval_mar_1": 0.2259, "eval_mar_10": 0.4578, "eval_mar_100": 0.4825, "eval_mar_100_Coverall": 0.7267, "eval_mar_100_Face_Shield": 0.6529, "eval_mar_100_Gloves": 0.3967, "eval_mar_100_Goggles": 0.2438, "eval_mar_100_Mask": 0.3923, "eval_mar_large": 0.5765, "eval_mar_medium": 0.2686, "eval_mar_small": -1.0, "eval_runtime": 4.189, "eval_samples_per_second": 6.923, "eval_steps_per_second": 0.477, "step": 2125 }, { "epoch": 17.008, "grad_norm": 39.196075439453125, "learning_rate": 4.6142222222222225e-05, "loss": 1.7327, "step": 2126 }, { "epoch": 17.016, "grad_norm": 26.621572494506836, "learning_rate": 4.613777777777778e-05, "loss": 1.8108, "step": 2127 }, { "epoch": 17.024, "grad_norm": 68.27599334716797, "learning_rate": 4.6133333333333334e-05, "loss": 1.1969, "step": 2128 }, { "epoch": 17.032, "grad_norm": 51.29975509643555, "learning_rate": 4.6128888888888896e-05, "loss": 1.6542, "step": 2129 }, { "epoch": 17.04, "grad_norm": 21.753225326538086, "learning_rate": 4.612444444444445e-05, "loss": 2.0405, "step": 2130 }, { "epoch": 17.048, "grad_norm": 22.42283821105957, "learning_rate": 4.612e-05, "loss": 1.0469, "step": 2131 }, { "epoch": 17.056, "grad_norm": 22.952924728393555, "learning_rate": 4.6115555555555554e-05, "loss": 1.2262, "step": 2132 }, { "epoch": 17.064, "grad_norm": 18.269418716430664, "learning_rate": 4.6111111111111115e-05, "loss": 1.3747, "step": 2133 }, { "epoch": 17.072, "grad_norm": 47.92393112182617, "learning_rate": 4.610666666666667e-05, "loss": 1.6001, "step": 2134 }, { "epoch": 17.08, "grad_norm": 33.852909088134766, "learning_rate": 4.6102222222222225e-05, "loss": 1.3941, "step": 2135 }, { "epoch": 17.088, "grad_norm": 25.35291290283203, "learning_rate": 4.609777777777778e-05, "loss": 1.039, "step": 2136 }, { "epoch": 17.096, "grad_norm": 21.52263832092285, "learning_rate": 4.6093333333333335e-05, "loss": 1.1832, "step": 2137 }, { "epoch": 17.104, "grad_norm": 48.79566192626953, "learning_rate": 4.608888888888889e-05, "loss": 1.4784, "step": 2138 }, { "epoch": 17.112, "grad_norm": 33.72265625, "learning_rate": 4.6084444444444444e-05, "loss": 1.2501, "step": 2139 }, { "epoch": 17.12, "grad_norm": 28.218050003051758, "learning_rate": 4.608e-05, "loss": 1.766, "step": 2140 }, { "epoch": 17.128, "grad_norm": 37.584442138671875, "learning_rate": 4.6075555555555554e-05, "loss": 1.6016, "step": 2141 }, { "epoch": 17.136, "grad_norm": 23.63691520690918, "learning_rate": 4.6071111111111116e-05, "loss": 1.164, "step": 2142 }, { "epoch": 17.144, "grad_norm": 44.39622116088867, "learning_rate": 4.606666666666667e-05, "loss": 1.4882, "step": 2143 }, { "epoch": 17.152, "grad_norm": 34.78615951538086, "learning_rate": 4.6062222222222225e-05, "loss": 1.4551, "step": 2144 }, { "epoch": 17.16, "grad_norm": 78.42229461669922, "learning_rate": 4.605777777777778e-05, "loss": 1.158, "step": 2145 }, { "epoch": 17.168, "grad_norm": 38.7276496887207, "learning_rate": 4.6053333333333335e-05, "loss": 1.8347, "step": 2146 }, { "epoch": 17.176, "grad_norm": 36.89461898803711, "learning_rate": 4.604888888888889e-05, "loss": 1.5378, "step": 2147 }, { "epoch": 17.184, "grad_norm": 46.22654724121094, "learning_rate": 4.6044444444444445e-05, "loss": 1.4898, "step": 2148 }, { "epoch": 17.192, "grad_norm": 66.2233657836914, "learning_rate": 4.604e-05, "loss": 1.5476, "step": 2149 }, { "epoch": 17.2, "grad_norm": 47.75509262084961, "learning_rate": 4.603555555555556e-05, "loss": 1.1688, "step": 2150 }, { "epoch": 17.208, "grad_norm": 23.9595947265625, "learning_rate": 4.6031111111111116e-05, "loss": 1.6602, "step": 2151 }, { "epoch": 17.216, "grad_norm": 37.3036994934082, "learning_rate": 4.602666666666667e-05, "loss": 1.336, "step": 2152 }, { "epoch": 17.224, "grad_norm": 41.83155822753906, "learning_rate": 4.602222222222222e-05, "loss": 1.1545, "step": 2153 }, { "epoch": 17.232, "grad_norm": 44.61173629760742, "learning_rate": 4.601777777777778e-05, "loss": 1.3973, "step": 2154 }, { "epoch": 17.24, "grad_norm": 42.56441116333008, "learning_rate": 4.6013333333333336e-05, "loss": 1.2289, "step": 2155 }, { "epoch": 17.248, "grad_norm": 37.08918380737305, "learning_rate": 4.600888888888889e-05, "loss": 1.5201, "step": 2156 }, { "epoch": 17.256, "grad_norm": 78.43424987792969, "learning_rate": 4.6004444444444445e-05, "loss": 1.6758, "step": 2157 }, { "epoch": 17.264, "grad_norm": 27.79692268371582, "learning_rate": 4.600000000000001e-05, "loss": 1.4109, "step": 2158 }, { "epoch": 17.272, "grad_norm": 42.70063781738281, "learning_rate": 4.599555555555556e-05, "loss": 0.9563, "step": 2159 }, { "epoch": 17.28, "grad_norm": 35.146514892578125, "learning_rate": 4.599111111111111e-05, "loss": 1.0688, "step": 2160 }, { "epoch": 17.288, "grad_norm": 33.99504470825195, "learning_rate": 4.5986666666666665e-05, "loss": 1.3832, "step": 2161 }, { "epoch": 17.296, "grad_norm": 20.762367248535156, "learning_rate": 4.5982222222222226e-05, "loss": 1.18, "step": 2162 }, { "epoch": 17.304, "grad_norm": 29.262548446655273, "learning_rate": 4.597777777777778e-05, "loss": 0.9698, "step": 2163 }, { "epoch": 17.312, "grad_norm": 29.8239803314209, "learning_rate": 4.5973333333333336e-05, "loss": 1.3234, "step": 2164 }, { "epoch": 17.32, "grad_norm": 25.564790725708008, "learning_rate": 4.596888888888889e-05, "loss": 1.2409, "step": 2165 }, { "epoch": 17.328, "grad_norm": 23.522001266479492, "learning_rate": 4.5964444444444446e-05, "loss": 1.5949, "step": 2166 }, { "epoch": 17.336, "grad_norm": 17.375320434570312, "learning_rate": 4.596e-05, "loss": 3.0145, "step": 2167 }, { "epoch": 17.344, "grad_norm": 37.34791946411133, "learning_rate": 4.5955555555555555e-05, "loss": 1.5847, "step": 2168 }, { "epoch": 17.352, "grad_norm": 56.34286117553711, "learning_rate": 4.595111111111111e-05, "loss": 1.6994, "step": 2169 }, { "epoch": 17.36, "grad_norm": 26.159435272216797, "learning_rate": 4.594666666666667e-05, "loss": 1.281, "step": 2170 }, { "epoch": 17.368, "grad_norm": 34.701480865478516, "learning_rate": 4.5942222222222227e-05, "loss": 1.7413, "step": 2171 }, { "epoch": 17.376, "grad_norm": 42.920997619628906, "learning_rate": 4.593777777777778e-05, "loss": 1.3324, "step": 2172 }, { "epoch": 17.384, "grad_norm": 46.803627014160156, "learning_rate": 4.5933333333333336e-05, "loss": 1.1446, "step": 2173 }, { "epoch": 17.392, "grad_norm": 32.0989875793457, "learning_rate": 4.592888888888889e-05, "loss": 1.2071, "step": 2174 }, { "epoch": 17.4, "grad_norm": 25.46465301513672, "learning_rate": 4.5924444444444446e-05, "loss": 1.0882, "step": 2175 }, { "epoch": 17.408, "grad_norm": 17.630023956298828, "learning_rate": 4.592e-05, "loss": 1.3672, "step": 2176 }, { "epoch": 17.416, "grad_norm": 82.94445037841797, "learning_rate": 4.5915555555555556e-05, "loss": 1.9757, "step": 2177 }, { "epoch": 17.424, "grad_norm": 32.927711486816406, "learning_rate": 4.591111111111112e-05, "loss": 1.0574, "step": 2178 }, { "epoch": 17.432, "grad_norm": 40.796504974365234, "learning_rate": 4.590666666666667e-05, "loss": 1.3866, "step": 2179 }, { "epoch": 17.44, "grad_norm": 96.24600982666016, "learning_rate": 4.590222222222223e-05, "loss": 1.9825, "step": 2180 }, { "epoch": 17.448, "grad_norm": 122.40647888183594, "learning_rate": 4.589777777777778e-05, "loss": 1.195, "step": 2181 }, { "epoch": 17.456, "grad_norm": 58.65459442138672, "learning_rate": 4.589333333333334e-05, "loss": 1.1829, "step": 2182 }, { "epoch": 17.464, "grad_norm": 41.629432678222656, "learning_rate": 4.588888888888889e-05, "loss": 0.9434, "step": 2183 }, { "epoch": 17.472, "grad_norm": 33.96433639526367, "learning_rate": 4.5884444444444446e-05, "loss": 1.0722, "step": 2184 }, { "epoch": 17.48, "grad_norm": 50.7461051940918, "learning_rate": 4.588e-05, "loss": 1.645, "step": 2185 }, { "epoch": 17.488, "grad_norm": 55.882442474365234, "learning_rate": 4.587555555555556e-05, "loss": 1.4133, "step": 2186 }, { "epoch": 17.496, "grad_norm": 19.01738739013672, "learning_rate": 4.587111111111112e-05, "loss": 1.2143, "step": 2187 }, { "epoch": 17.504, "grad_norm": 25.149961471557617, "learning_rate": 4.5866666666666666e-05, "loss": 1.1623, "step": 2188 }, { "epoch": 17.512, "grad_norm": 28.221494674682617, "learning_rate": 4.586222222222222e-05, "loss": 1.0421, "step": 2189 }, { "epoch": 17.52, "grad_norm": 31.29536247253418, "learning_rate": 4.5857777777777775e-05, "loss": 1.4285, "step": 2190 }, { "epoch": 17.528, "grad_norm": 60.39698028564453, "learning_rate": 4.585333333333334e-05, "loss": 1.1189, "step": 2191 }, { "epoch": 17.536, "grad_norm": 38.9393196105957, "learning_rate": 4.584888888888889e-05, "loss": 1.4813, "step": 2192 }, { "epoch": 17.544, "grad_norm": 39.73295974731445, "learning_rate": 4.584444444444445e-05, "loss": 1.4478, "step": 2193 }, { "epoch": 17.552, "grad_norm": 91.5682373046875, "learning_rate": 4.584e-05, "loss": 1.3091, "step": 2194 }, { "epoch": 17.56, "grad_norm": 24.43229866027832, "learning_rate": 4.5835555555555556e-05, "loss": 1.2912, "step": 2195 }, { "epoch": 17.568, "grad_norm": 20.074871063232422, "learning_rate": 4.583111111111111e-05, "loss": 1.5536, "step": 2196 }, { "epoch": 17.576, "grad_norm": 202.689453125, "learning_rate": 4.5826666666666666e-05, "loss": 1.2687, "step": 2197 }, { "epoch": 17.584, "grad_norm": 22.882543563842773, "learning_rate": 4.582222222222222e-05, "loss": 1.1861, "step": 2198 }, { "epoch": 17.592, "grad_norm": 31.381620407104492, "learning_rate": 4.581777777777778e-05, "loss": 1.3316, "step": 2199 }, { "epoch": 17.6, "grad_norm": 31.768274307250977, "learning_rate": 4.581333333333334e-05, "loss": 0.912, "step": 2200 }, { "epoch": 17.608, "grad_norm": 33.298484802246094, "learning_rate": 4.580888888888889e-05, "loss": 1.3507, "step": 2201 }, { "epoch": 17.616, "grad_norm": 343.21258544921875, "learning_rate": 4.580444444444445e-05, "loss": 1.3278, "step": 2202 }, { "epoch": 17.624, "grad_norm": 29.445438385009766, "learning_rate": 4.58e-05, "loss": 2.5434, "step": 2203 }, { "epoch": 17.632, "grad_norm": 36.0821647644043, "learning_rate": 4.579555555555556e-05, "loss": 1.2078, "step": 2204 }, { "epoch": 17.64, "grad_norm": 24.35422134399414, "learning_rate": 4.579111111111111e-05, "loss": 1.8041, "step": 2205 }, { "epoch": 17.648, "grad_norm": 36.78593826293945, "learning_rate": 4.5786666666666666e-05, "loss": 1.6779, "step": 2206 }, { "epoch": 17.656, "grad_norm": 48.543155670166016, "learning_rate": 4.578222222222223e-05, "loss": 1.6597, "step": 2207 }, { "epoch": 17.664, "grad_norm": 31.305368423461914, "learning_rate": 4.577777777777778e-05, "loss": 1.263, "step": 2208 }, { "epoch": 17.672, "grad_norm": 33.57781219482422, "learning_rate": 4.577333333333334e-05, "loss": 1.25, "step": 2209 }, { "epoch": 17.68, "grad_norm": 34.972747802734375, "learning_rate": 4.5768888888888886e-05, "loss": 1.0656, "step": 2210 }, { "epoch": 17.688, "grad_norm": 29.523365020751953, "learning_rate": 4.576444444444445e-05, "loss": 0.7529, "step": 2211 }, { "epoch": 17.696, "grad_norm": 23.62306022644043, "learning_rate": 4.576e-05, "loss": 1.4524, "step": 2212 }, { "epoch": 17.704, "grad_norm": 19.44194984436035, "learning_rate": 4.575555555555556e-05, "loss": 1.3747, "step": 2213 }, { "epoch": 17.712, "grad_norm": 35.08988571166992, "learning_rate": 4.575111111111111e-05, "loss": 0.9644, "step": 2214 }, { "epoch": 17.72, "grad_norm": 26.437124252319336, "learning_rate": 4.5746666666666674e-05, "loss": 1.3189, "step": 2215 }, { "epoch": 17.728, "grad_norm": 30.205020904541016, "learning_rate": 4.574222222222223e-05, "loss": 1.3938, "step": 2216 }, { "epoch": 17.736, "grad_norm": 40.35396194458008, "learning_rate": 4.5737777777777777e-05, "loss": 1.1325, "step": 2217 }, { "epoch": 17.744, "grad_norm": 42.42508316040039, "learning_rate": 4.573333333333333e-05, "loss": 1.2552, "step": 2218 }, { "epoch": 17.752, "grad_norm": 23.397300720214844, "learning_rate": 4.572888888888889e-05, "loss": 1.304, "step": 2219 }, { "epoch": 17.76, "grad_norm": 21.093446731567383, "learning_rate": 4.572444444444445e-05, "loss": 1.3382, "step": 2220 }, { "epoch": 17.768, "grad_norm": 22.64166831970215, "learning_rate": 4.572e-05, "loss": 1.1846, "step": 2221 }, { "epoch": 17.776, "grad_norm": 25.265544891357422, "learning_rate": 4.571555555555556e-05, "loss": 1.9576, "step": 2222 }, { "epoch": 17.784, "grad_norm": 26.927000045776367, "learning_rate": 4.571111111111111e-05, "loss": 1.4111, "step": 2223 }, { "epoch": 17.792, "grad_norm": 31.803913116455078, "learning_rate": 4.570666666666667e-05, "loss": 1.052, "step": 2224 }, { "epoch": 17.8, "grad_norm": 30.310882568359375, "learning_rate": 4.570222222222222e-05, "loss": 1.3139, "step": 2225 }, { "epoch": 17.808, "grad_norm": 22.372089385986328, "learning_rate": 4.569777777777778e-05, "loss": 1.3221, "step": 2226 }, { "epoch": 17.816, "grad_norm": 21.15584945678711, "learning_rate": 4.569333333333334e-05, "loss": 1.2444, "step": 2227 }, { "epoch": 17.824, "grad_norm": 103.80522155761719, "learning_rate": 4.5688888888888893e-05, "loss": 1.2789, "step": 2228 }, { "epoch": 17.832, "grad_norm": 23.442270278930664, "learning_rate": 4.568444444444445e-05, "loss": 0.9631, "step": 2229 }, { "epoch": 17.84, "grad_norm": 25.57155990600586, "learning_rate": 4.568e-05, "loss": 0.8897, "step": 2230 }, { "epoch": 17.848, "grad_norm": 26.777132034301758, "learning_rate": 4.567555555555556e-05, "loss": 1.5029, "step": 2231 }, { "epoch": 17.856, "grad_norm": 15.47154426574707, "learning_rate": 4.567111111111111e-05, "loss": 0.9612, "step": 2232 }, { "epoch": 17.864, "grad_norm": 28.112972259521484, "learning_rate": 4.566666666666667e-05, "loss": 0.8464, "step": 2233 }, { "epoch": 17.872, "grad_norm": 16.33782386779785, "learning_rate": 4.566222222222222e-05, "loss": 1.1428, "step": 2234 }, { "epoch": 17.88, "grad_norm": 26.07354164123535, "learning_rate": 4.5657777777777784e-05, "loss": 1.2998, "step": 2235 }, { "epoch": 17.888, "grad_norm": 47.80546188354492, "learning_rate": 4.565333333333334e-05, "loss": 0.9928, "step": 2236 }, { "epoch": 17.896, "grad_norm": 51.28022003173828, "learning_rate": 4.5648888888888894e-05, "loss": 1.5973, "step": 2237 }, { "epoch": 17.904, "grad_norm": 28.750341415405273, "learning_rate": 4.564444444444444e-05, "loss": 0.8845, "step": 2238 }, { "epoch": 17.912, "grad_norm": 39.9095344543457, "learning_rate": 4.564e-05, "loss": 1.2414, "step": 2239 }, { "epoch": 17.92, "grad_norm": 20.58609390258789, "learning_rate": 4.563555555555556e-05, "loss": 1.4256, "step": 2240 }, { "epoch": 17.928, "grad_norm": 30.30979347229004, "learning_rate": 4.563111111111111e-05, "loss": 1.2865, "step": 2241 }, { "epoch": 17.936, "grad_norm": 90.17414093017578, "learning_rate": 4.562666666666667e-05, "loss": 1.3461, "step": 2242 }, { "epoch": 17.944, "grad_norm": 21.380603790283203, "learning_rate": 4.562222222222222e-05, "loss": 1.3762, "step": 2243 }, { "epoch": 17.951999999999998, "grad_norm": 68.13831329345703, "learning_rate": 4.5617777777777784e-05, "loss": 1.4818, "step": 2244 }, { "epoch": 17.96, "grad_norm": 68.83797454833984, "learning_rate": 4.561333333333333e-05, "loss": 1.7418, "step": 2245 }, { "epoch": 17.968, "grad_norm": 53.25823974609375, "learning_rate": 4.560888888888889e-05, "loss": 2.2703, "step": 2246 }, { "epoch": 17.976, "grad_norm": 57.03624725341797, "learning_rate": 4.560444444444444e-05, "loss": 1.3203, "step": 2247 }, { "epoch": 17.984, "grad_norm": 39.614501953125, "learning_rate": 4.5600000000000004e-05, "loss": 1.7033, "step": 2248 }, { "epoch": 17.992, "grad_norm": 37.21839904785156, "learning_rate": 4.559555555555556e-05, "loss": 1.7241, "step": 2249 }, { "epoch": 18.0, "grad_norm": 31.368162155151367, "learning_rate": 4.5591111111111114e-05, "loss": 1.2687, "step": 2250 }, { "epoch": 18.0, "eval_loss": 1.2595864534378052, "eval_map": 0.3081, "eval_map_50": 0.5901, "eval_map_75": 0.2613, "eval_map_Coverall": 0.5807, "eval_map_Face_Shield": 0.258, "eval_map_Gloves": 0.2366, "eval_map_Goggles": 0.0789, "eval_map_Mask": 0.3865, "eval_map_large": 0.3599, "eval_map_medium": 0.1759, "eval_map_small": -1.0, "eval_mar_1": 0.2747, "eval_mar_10": 0.5104, "eval_mar_100": 0.5276, "eval_mar_100_Coverall": 0.7133, "eval_mar_100_Face_Shield": 0.6176, "eval_mar_100_Gloves": 0.4525, "eval_mar_100_Goggles": 0.3969, "eval_mar_100_Mask": 0.4577, "eval_mar_large": 0.5988, "eval_mar_medium": 0.3597, "eval_mar_small": -1.0, "eval_runtime": 5.1653, "eval_samples_per_second": 5.614, "eval_steps_per_second": 0.387, "step": 2250 }, { "epoch": 18.008, "grad_norm": 46.81858825683594, "learning_rate": 4.558666666666667e-05, "loss": 1.0444, "step": 2251 }, { "epoch": 18.016, "grad_norm": 45.40398025512695, "learning_rate": 4.558222222222222e-05, "loss": 2.0678, "step": 2252 }, { "epoch": 18.024, "grad_norm": 19.30278968811035, "learning_rate": 4.557777777777778e-05, "loss": 1.0184, "step": 2253 }, { "epoch": 18.032, "grad_norm": 35.62297821044922, "learning_rate": 4.557333333333333e-05, "loss": 1.3428, "step": 2254 }, { "epoch": 18.04, "grad_norm": 18.43499183654785, "learning_rate": 4.556888888888889e-05, "loss": 1.1595, "step": 2255 }, { "epoch": 18.048, "grad_norm": 18.31495475769043, "learning_rate": 4.556444444444445e-05, "loss": 1.4312, "step": 2256 }, { "epoch": 18.056, "grad_norm": 37.19743347167969, "learning_rate": 4.5560000000000004e-05, "loss": 1.595, "step": 2257 }, { "epoch": 18.064, "grad_norm": 54.88460922241211, "learning_rate": 4.555555555555556e-05, "loss": 1.6057, "step": 2258 }, { "epoch": 18.072, "grad_norm": 27.84742546081543, "learning_rate": 4.5551111111111114e-05, "loss": 0.981, "step": 2259 }, { "epoch": 18.08, "grad_norm": 40.456932067871094, "learning_rate": 4.554666666666667e-05, "loss": 1.4001, "step": 2260 }, { "epoch": 18.088, "grad_norm": 33.936458587646484, "learning_rate": 4.5542222222222224e-05, "loss": 1.1145, "step": 2261 }, { "epoch": 18.096, "grad_norm": 22.315261840820312, "learning_rate": 4.553777777777778e-05, "loss": 1.1471, "step": 2262 }, { "epoch": 18.104, "grad_norm": 38.14823913574219, "learning_rate": 4.553333333333333e-05, "loss": 1.0637, "step": 2263 }, { "epoch": 18.112, "grad_norm": 18.57054328918457, "learning_rate": 4.5528888888888895e-05, "loss": 1.3972, "step": 2264 }, { "epoch": 18.12, "grad_norm": 27.39850616455078, "learning_rate": 4.552444444444445e-05, "loss": 1.7185, "step": 2265 }, { "epoch": 18.128, "grad_norm": 40.84364700317383, "learning_rate": 4.5520000000000005e-05, "loss": 1.532, "step": 2266 }, { "epoch": 18.136, "grad_norm": 35.849151611328125, "learning_rate": 4.551555555555555e-05, "loss": 1.2001, "step": 2267 }, { "epoch": 18.144, "grad_norm": 44.89984130859375, "learning_rate": 4.5511111111111114e-05, "loss": 1.2094, "step": 2268 }, { "epoch": 18.152, "grad_norm": 32.973567962646484, "learning_rate": 4.550666666666667e-05, "loss": 0.9509, "step": 2269 }, { "epoch": 18.16, "grad_norm": 75.96735382080078, "learning_rate": 4.5502222222222224e-05, "loss": 1.5009, "step": 2270 }, { "epoch": 18.168, "grad_norm": 32.969993591308594, "learning_rate": 4.549777777777778e-05, "loss": 1.4933, "step": 2271 }, { "epoch": 18.176, "grad_norm": 26.42609214782715, "learning_rate": 4.549333333333334e-05, "loss": 1.2113, "step": 2272 }, { "epoch": 18.184, "grad_norm": 29.103111267089844, "learning_rate": 4.5488888888888895e-05, "loss": 1.4234, "step": 2273 }, { "epoch": 18.192, "grad_norm": 27.130704879760742, "learning_rate": 4.5484444444444443e-05, "loss": 1.2031, "step": 2274 }, { "epoch": 18.2, "grad_norm": 34.07284927368164, "learning_rate": 4.548e-05, "loss": 1.5311, "step": 2275 }, { "epoch": 18.208, "grad_norm": 23.543113708496094, "learning_rate": 4.547555555555556e-05, "loss": 1.3401, "step": 2276 }, { "epoch": 18.216, "grad_norm": 26.758258819580078, "learning_rate": 4.5471111111111115e-05, "loss": 2.427, "step": 2277 }, { "epoch": 18.224, "grad_norm": 26.944812774658203, "learning_rate": 4.546666666666667e-05, "loss": 1.3092, "step": 2278 }, { "epoch": 18.232, "grad_norm": 37.91249084472656, "learning_rate": 4.5462222222222224e-05, "loss": 2.1598, "step": 2279 }, { "epoch": 18.24, "grad_norm": 39.3010368347168, "learning_rate": 4.545777777777778e-05, "loss": 1.5224, "step": 2280 }, { "epoch": 18.248, "grad_norm": 38.23113250732422, "learning_rate": 4.5453333333333334e-05, "loss": 1.2886, "step": 2281 }, { "epoch": 18.256, "grad_norm": 21.512996673583984, "learning_rate": 4.544888888888889e-05, "loss": 1.4246, "step": 2282 }, { "epoch": 18.264, "grad_norm": 34.43813705444336, "learning_rate": 4.5444444444444444e-05, "loss": 1.3469, "step": 2283 }, { "epoch": 18.272, "grad_norm": 19.416378021240234, "learning_rate": 4.5440000000000005e-05, "loss": 1.4964, "step": 2284 }, { "epoch": 18.28, "grad_norm": 39.96742630004883, "learning_rate": 4.543555555555556e-05, "loss": 1.1734, "step": 2285 }, { "epoch": 18.288, "grad_norm": 46.56365966796875, "learning_rate": 4.5431111111111115e-05, "loss": 1.2498, "step": 2286 }, { "epoch": 18.296, "grad_norm": 27.70553207397461, "learning_rate": 4.542666666666667e-05, "loss": 1.6607, "step": 2287 }, { "epoch": 18.304, "grad_norm": 22.480968475341797, "learning_rate": 4.5422222222222225e-05, "loss": 1.3144, "step": 2288 }, { "epoch": 18.312, "grad_norm": 20.69089698791504, "learning_rate": 4.541777777777778e-05, "loss": 0.9917, "step": 2289 }, { "epoch": 18.32, "grad_norm": 19.719558715820312, "learning_rate": 4.5413333333333334e-05, "loss": 1.6112, "step": 2290 }, { "epoch": 18.328, "grad_norm": 23.248666763305664, "learning_rate": 4.540888888888889e-05, "loss": 1.3666, "step": 2291 }, { "epoch": 18.336, "grad_norm": 28.25998878479004, "learning_rate": 4.5404444444444444e-05, "loss": 1.5349, "step": 2292 }, { "epoch": 18.344, "grad_norm": 29.392200469970703, "learning_rate": 4.5400000000000006e-05, "loss": 1.8159, "step": 2293 }, { "epoch": 18.352, "grad_norm": 32.58990478515625, "learning_rate": 4.539555555555556e-05, "loss": 1.4909, "step": 2294 }, { "epoch": 18.36, "grad_norm": 36.34330749511719, "learning_rate": 4.539111111111111e-05, "loss": 1.5903, "step": 2295 }, { "epoch": 18.368, "grad_norm": 22.541278839111328, "learning_rate": 4.5386666666666664e-05, "loss": 1.2269, "step": 2296 }, { "epoch": 18.376, "grad_norm": 60.9660758972168, "learning_rate": 4.5382222222222225e-05, "loss": 1.2665, "step": 2297 }, { "epoch": 18.384, "grad_norm": 28.005226135253906, "learning_rate": 4.537777777777778e-05, "loss": 1.2292, "step": 2298 }, { "epoch": 18.392, "grad_norm": 30.89092445373535, "learning_rate": 4.5373333333333335e-05, "loss": 1.301, "step": 2299 }, { "epoch": 18.4, "grad_norm": 24.385950088500977, "learning_rate": 4.536888888888889e-05, "loss": 1.5131, "step": 2300 }, { "epoch": 18.408, "grad_norm": 36.93797302246094, "learning_rate": 4.536444444444445e-05, "loss": 1.5954, "step": 2301 }, { "epoch": 18.416, "grad_norm": 32.24222183227539, "learning_rate": 4.536e-05, "loss": 1.339, "step": 2302 }, { "epoch": 18.424, "grad_norm": 48.95425033569336, "learning_rate": 4.5355555555555554e-05, "loss": 1.5975, "step": 2303 }, { "epoch": 18.432, "grad_norm": 17.372835159301758, "learning_rate": 4.535111111111111e-05, "loss": 1.0491, "step": 2304 }, { "epoch": 18.44, "grad_norm": 22.690385818481445, "learning_rate": 4.534666666666667e-05, "loss": 1.2562, "step": 2305 }, { "epoch": 18.448, "grad_norm": 21.96539878845215, "learning_rate": 4.5342222222222226e-05, "loss": 1.3457, "step": 2306 }, { "epoch": 18.456, "grad_norm": 75.54998779296875, "learning_rate": 4.533777777777778e-05, "loss": 1.0676, "step": 2307 }, { "epoch": 18.464, "grad_norm": 96.56771850585938, "learning_rate": 4.5333333333333335e-05, "loss": 1.7624, "step": 2308 }, { "epoch": 18.472, "grad_norm": 21.160539627075195, "learning_rate": 4.532888888888889e-05, "loss": 1.1182, "step": 2309 }, { "epoch": 18.48, "grad_norm": 25.408668518066406, "learning_rate": 4.5324444444444445e-05, "loss": 0.9113, "step": 2310 }, { "epoch": 18.488, "grad_norm": 32.667816162109375, "learning_rate": 4.532e-05, "loss": 1.1492, "step": 2311 }, { "epoch": 18.496, "grad_norm": 30.401330947875977, "learning_rate": 4.5315555555555555e-05, "loss": 1.2371, "step": 2312 }, { "epoch": 18.504, "grad_norm": 24.251062393188477, "learning_rate": 4.5311111111111116e-05, "loss": 1.6814, "step": 2313 }, { "epoch": 18.512, "grad_norm": 25.484010696411133, "learning_rate": 4.530666666666667e-05, "loss": 1.0337, "step": 2314 }, { "epoch": 18.52, "grad_norm": 55.38465118408203, "learning_rate": 4.5302222222222226e-05, "loss": 1.5794, "step": 2315 }, { "epoch": 18.528, "grad_norm": 38.27043533325195, "learning_rate": 4.529777777777778e-05, "loss": 1.0939, "step": 2316 }, { "epoch": 18.536, "grad_norm": 17.277353286743164, "learning_rate": 4.5293333333333336e-05, "loss": 1.1114, "step": 2317 }, { "epoch": 18.544, "grad_norm": 41.85536193847656, "learning_rate": 4.528888888888889e-05, "loss": 1.2835, "step": 2318 }, { "epoch": 18.552, "grad_norm": 58.112632751464844, "learning_rate": 4.5284444444444445e-05, "loss": 1.0684, "step": 2319 }, { "epoch": 18.56, "grad_norm": 28.29610252380371, "learning_rate": 4.528e-05, "loss": 1.6169, "step": 2320 }, { "epoch": 18.568, "grad_norm": 22.14971923828125, "learning_rate": 4.527555555555556e-05, "loss": 1.1944, "step": 2321 }, { "epoch": 18.576, "grad_norm": 18.60849380493164, "learning_rate": 4.527111111111112e-05, "loss": 1.2024, "step": 2322 }, { "epoch": 18.584, "grad_norm": 32.522705078125, "learning_rate": 4.526666666666667e-05, "loss": 2.5328, "step": 2323 }, { "epoch": 18.592, "grad_norm": 23.521337509155273, "learning_rate": 4.526222222222222e-05, "loss": 1.5192, "step": 2324 }, { "epoch": 18.6, "grad_norm": 30.796342849731445, "learning_rate": 4.525777777777778e-05, "loss": 1.013, "step": 2325 }, { "epoch": 18.608, "grad_norm": 73.54227447509766, "learning_rate": 4.5253333333333336e-05, "loss": 1.3174, "step": 2326 }, { "epoch": 18.616, "grad_norm": 24.7880802154541, "learning_rate": 4.524888888888889e-05, "loss": 1.3423, "step": 2327 }, { "epoch": 18.624, "grad_norm": 33.653167724609375, "learning_rate": 4.5244444444444446e-05, "loss": 2.7156, "step": 2328 }, { "epoch": 18.632, "grad_norm": 18.765047073364258, "learning_rate": 4.524000000000001e-05, "loss": 1.064, "step": 2329 }, { "epoch": 18.64, "grad_norm": 30.397560119628906, "learning_rate": 4.523555555555556e-05, "loss": 1.7568, "step": 2330 }, { "epoch": 18.648, "grad_norm": 65.18970489501953, "learning_rate": 4.523111111111111e-05, "loss": 1.182, "step": 2331 }, { "epoch": 18.656, "grad_norm": 41.72859191894531, "learning_rate": 4.5226666666666665e-05, "loss": 1.0176, "step": 2332 }, { "epoch": 18.664, "grad_norm": 37.14756393432617, "learning_rate": 4.522222222222223e-05, "loss": 1.1403, "step": 2333 }, { "epoch": 18.672, "grad_norm": 36.848609924316406, "learning_rate": 4.521777777777778e-05, "loss": 1.1519, "step": 2334 }, { "epoch": 18.68, "grad_norm": 39.274044036865234, "learning_rate": 4.5213333333333336e-05, "loss": 1.6489, "step": 2335 }, { "epoch": 18.688, "grad_norm": 26.15009880065918, "learning_rate": 4.520888888888889e-05, "loss": 1.0285, "step": 2336 }, { "epoch": 18.696, "grad_norm": 24.286865234375, "learning_rate": 4.5204444444444446e-05, "loss": 1.6639, "step": 2337 }, { "epoch": 18.704, "grad_norm": 24.12560272216797, "learning_rate": 4.52e-05, "loss": 0.8753, "step": 2338 }, { "epoch": 18.712, "grad_norm": 29.818822860717773, "learning_rate": 4.5195555555555556e-05, "loss": 1.2299, "step": 2339 }, { "epoch": 18.72, "grad_norm": 25.415082931518555, "learning_rate": 4.519111111111111e-05, "loss": 0.9977, "step": 2340 }, { "epoch": 18.728, "grad_norm": 43.25128173828125, "learning_rate": 4.518666666666667e-05, "loss": 0.9779, "step": 2341 }, { "epoch": 18.736, "grad_norm": 22.8486385345459, "learning_rate": 4.518222222222223e-05, "loss": 1.0621, "step": 2342 }, { "epoch": 18.744, "grad_norm": 24.91759490966797, "learning_rate": 4.517777777777778e-05, "loss": 1.2753, "step": 2343 }, { "epoch": 18.752, "grad_norm": 43.65217971801758, "learning_rate": 4.517333333333334e-05, "loss": 1.4418, "step": 2344 }, { "epoch": 18.76, "grad_norm": 19.82387351989746, "learning_rate": 4.516888888888889e-05, "loss": 0.9303, "step": 2345 }, { "epoch": 18.768, "grad_norm": 26.640602111816406, "learning_rate": 4.5164444444444446e-05, "loss": 1.3405, "step": 2346 }, { "epoch": 18.776, "grad_norm": 22.976333618164062, "learning_rate": 4.516e-05, "loss": 1.4978, "step": 2347 }, { "epoch": 18.784, "grad_norm": 27.17793846130371, "learning_rate": 4.5155555555555556e-05, "loss": 0.9441, "step": 2348 }, { "epoch": 18.792, "grad_norm": 26.22183609008789, "learning_rate": 4.515111111111111e-05, "loss": 1.35, "step": 2349 }, { "epoch": 18.8, "grad_norm": 34.417274475097656, "learning_rate": 4.514666666666667e-05, "loss": 1.2127, "step": 2350 }, { "epoch": 18.808, "grad_norm": 43.34095764160156, "learning_rate": 4.514222222222223e-05, "loss": 1.983, "step": 2351 }, { "epoch": 18.816, "grad_norm": 38.11582565307617, "learning_rate": 4.5137777777777776e-05, "loss": 0.9497, "step": 2352 }, { "epoch": 18.824, "grad_norm": 41.008392333984375, "learning_rate": 4.513333333333333e-05, "loss": 0.9778, "step": 2353 }, { "epoch": 18.832, "grad_norm": 23.194177627563477, "learning_rate": 4.512888888888889e-05, "loss": 1.1152, "step": 2354 }, { "epoch": 18.84, "grad_norm": 39.38307189941406, "learning_rate": 4.512444444444445e-05, "loss": 1.4158, "step": 2355 }, { "epoch": 18.848, "grad_norm": 26.874954223632812, "learning_rate": 4.512e-05, "loss": 1.2012, "step": 2356 }, { "epoch": 18.856, "grad_norm": 20.161720275878906, "learning_rate": 4.5115555555555557e-05, "loss": 1.6024, "step": 2357 }, { "epoch": 18.864, "grad_norm": 37.71246337890625, "learning_rate": 4.511111111111112e-05, "loss": 1.1535, "step": 2358 }, { "epoch": 18.872, "grad_norm": 26.916297912597656, "learning_rate": 4.5106666666666666e-05, "loss": 1.2284, "step": 2359 }, { "epoch": 18.88, "grad_norm": 46.776668548583984, "learning_rate": 4.510222222222222e-05, "loss": 1.184, "step": 2360 }, { "epoch": 18.888, "grad_norm": 25.128171920776367, "learning_rate": 4.5097777777777776e-05, "loss": 1.07, "step": 2361 }, { "epoch": 18.896, "grad_norm": 24.371057510375977, "learning_rate": 4.509333333333334e-05, "loss": 0.945, "step": 2362 }, { "epoch": 18.904, "grad_norm": 18.44635581970215, "learning_rate": 4.508888888888889e-05, "loss": 0.8619, "step": 2363 }, { "epoch": 18.912, "grad_norm": 38.09754943847656, "learning_rate": 4.508444444444445e-05, "loss": 1.4007, "step": 2364 }, { "epoch": 18.92, "grad_norm": 21.609699249267578, "learning_rate": 4.508e-05, "loss": 1.3771, "step": 2365 }, { "epoch": 18.928, "grad_norm": 30.98387336730957, "learning_rate": 4.507555555555556e-05, "loss": 1.2354, "step": 2366 }, { "epoch": 18.936, "grad_norm": 51.88723373413086, "learning_rate": 4.507111111111111e-05, "loss": 1.2328, "step": 2367 }, { "epoch": 18.944, "grad_norm": 70.88677215576172, "learning_rate": 4.5066666666666667e-05, "loss": 1.5642, "step": 2368 }, { "epoch": 18.951999999999998, "grad_norm": 70.19834899902344, "learning_rate": 4.506222222222222e-05, "loss": 1.0488, "step": 2369 }, { "epoch": 18.96, "grad_norm": 32.15618896484375, "learning_rate": 4.505777777777778e-05, "loss": 3.0745, "step": 2370 }, { "epoch": 18.968, "grad_norm": 24.648277282714844, "learning_rate": 4.505333333333334e-05, "loss": 1.3941, "step": 2371 }, { "epoch": 18.976, "grad_norm": 43.734405517578125, "learning_rate": 4.504888888888889e-05, "loss": 0.9696, "step": 2372 }, { "epoch": 18.984, "grad_norm": 31.716482162475586, "learning_rate": 4.504444444444445e-05, "loss": 1.3076, "step": 2373 }, { "epoch": 18.992, "grad_norm": 35.47687530517578, "learning_rate": 4.504e-05, "loss": 1.6027, "step": 2374 }, { "epoch": 19.0, "grad_norm": 27.31801414489746, "learning_rate": 4.503555555555556e-05, "loss": 1.537, "step": 2375 }, { "epoch": 19.0, "eval_loss": 1.3107187747955322, "eval_map": 0.2747, "eval_map_50": 0.5474, "eval_map_75": 0.2396, "eval_map_Coverall": 0.5665, "eval_map_Face_Shield": 0.1699, "eval_map_Gloves": 0.1857, "eval_map_Goggles": 0.0877, "eval_map_Mask": 0.3635, "eval_map_large": 0.3231, "eval_map_medium": 0.1515, "eval_map_small": -1.0, "eval_mar_1": 0.2488, "eval_mar_10": 0.4956, "eval_mar_100": 0.5119, "eval_mar_100_Coverall": 0.7311, "eval_mar_100_Face_Shield": 0.6059, "eval_mar_100_Gloves": 0.3705, "eval_mar_100_Goggles": 0.4, "eval_mar_100_Mask": 0.4519, "eval_mar_large": 0.5952, "eval_mar_medium": 0.2797, "eval_mar_small": -1.0, "eval_runtime": 3.7126, "eval_samples_per_second": 7.811, "eval_steps_per_second": 0.539, "step": 2375 }, { "epoch": 19.008, "grad_norm": 30.45646095275879, "learning_rate": 4.503111111111111e-05, "loss": 0.9724, "step": 2376 }, { "epoch": 19.016, "grad_norm": 28.890247344970703, "learning_rate": 4.502666666666667e-05, "loss": 1.1499, "step": 2377 }, { "epoch": 19.024, "grad_norm": 48.27378463745117, "learning_rate": 4.502222222222223e-05, "loss": 1.3165, "step": 2378 }, { "epoch": 19.032, "grad_norm": 215.3856964111328, "learning_rate": 4.5017777777777783e-05, "loss": 1.6832, "step": 2379 }, { "epoch": 19.04, "grad_norm": 27.354045867919922, "learning_rate": 4.501333333333334e-05, "loss": 0.893, "step": 2380 }, { "epoch": 19.048, "grad_norm": 41.22970199584961, "learning_rate": 4.5008888888888886e-05, "loss": 1.3124, "step": 2381 }, { "epoch": 19.056, "grad_norm": 42.724796295166016, "learning_rate": 4.500444444444445e-05, "loss": 1.2107, "step": 2382 }, { "epoch": 19.064, "grad_norm": 33.349666595458984, "learning_rate": 4.5e-05, "loss": 2.1069, "step": 2383 }, { "epoch": 19.072, "grad_norm": 60.42729568481445, "learning_rate": 4.499555555555556e-05, "loss": 1.7121, "step": 2384 }, { "epoch": 19.08, "grad_norm": 34.7129020690918, "learning_rate": 4.499111111111111e-05, "loss": 1.2268, "step": 2385 }, { "epoch": 19.088, "grad_norm": 24.544939041137695, "learning_rate": 4.4986666666666674e-05, "loss": 1.2348, "step": 2386 }, { "epoch": 19.096, "grad_norm": 19.542388916015625, "learning_rate": 4.498222222222222e-05, "loss": 1.5257, "step": 2387 }, { "epoch": 19.104, "grad_norm": 20.254362106323242, "learning_rate": 4.497777777777778e-05, "loss": 1.4503, "step": 2388 }, { "epoch": 19.112, "grad_norm": 19.918123245239258, "learning_rate": 4.497333333333333e-05, "loss": 1.2677, "step": 2389 }, { "epoch": 19.12, "grad_norm": 21.80950927734375, "learning_rate": 4.4968888888888894e-05, "loss": 1.1446, "step": 2390 }, { "epoch": 19.128, "grad_norm": 21.882173538208008, "learning_rate": 4.496444444444445e-05, "loss": 1.89, "step": 2391 }, { "epoch": 19.136, "grad_norm": 22.035367965698242, "learning_rate": 4.496e-05, "loss": 1.389, "step": 2392 }, { "epoch": 19.144, "grad_norm": 39.88568878173828, "learning_rate": 4.495555555555556e-05, "loss": 1.4483, "step": 2393 }, { "epoch": 19.152, "grad_norm": 196.54519653320312, "learning_rate": 4.495111111111111e-05, "loss": 1.0646, "step": 2394 }, { "epoch": 19.16, "grad_norm": 62.47575378417969, "learning_rate": 4.494666666666667e-05, "loss": 0.9308, "step": 2395 }, { "epoch": 19.168, "grad_norm": 36.537906646728516, "learning_rate": 4.494222222222222e-05, "loss": 1.2209, "step": 2396 }, { "epoch": 19.176, "grad_norm": 19.73155403137207, "learning_rate": 4.493777777777778e-05, "loss": 1.2176, "step": 2397 }, { "epoch": 19.184, "grad_norm": 36.751380920410156, "learning_rate": 4.493333333333333e-05, "loss": 1.2674, "step": 2398 }, { "epoch": 19.192, "grad_norm": 58.021820068359375, "learning_rate": 4.4928888888888894e-05, "loss": 1.1332, "step": 2399 }, { "epoch": 19.2, "grad_norm": 19.843021392822266, "learning_rate": 4.492444444444445e-05, "loss": 1.4336, "step": 2400 }, { "epoch": 19.208, "grad_norm": 24.727033615112305, "learning_rate": 4.4920000000000004e-05, "loss": 0.908, "step": 2401 }, { "epoch": 19.216, "grad_norm": 22.83860969543457, "learning_rate": 4.491555555555556e-05, "loss": 1.1912, "step": 2402 }, { "epoch": 19.224, "grad_norm": 39.857215881347656, "learning_rate": 4.491111111111111e-05, "loss": 1.5188, "step": 2403 }, { "epoch": 19.232, "grad_norm": 34.569984436035156, "learning_rate": 4.490666666666667e-05, "loss": 1.4818, "step": 2404 }, { "epoch": 19.24, "grad_norm": 21.337610244750977, "learning_rate": 4.490222222222222e-05, "loss": 1.549, "step": 2405 }, { "epoch": 19.248, "grad_norm": 169.97068786621094, "learning_rate": 4.489777777777778e-05, "loss": 1.0654, "step": 2406 }, { "epoch": 19.256, "grad_norm": 24.983137130737305, "learning_rate": 4.489333333333334e-05, "loss": 0.9948, "step": 2407 }, { "epoch": 19.264, "grad_norm": 108.1728515625, "learning_rate": 4.4888888888888894e-05, "loss": 1.9998, "step": 2408 }, { "epoch": 19.272, "grad_norm": 30.033485412597656, "learning_rate": 4.488444444444444e-05, "loss": 1.054, "step": 2409 }, { "epoch": 19.28, "grad_norm": 81.40203857421875, "learning_rate": 4.488e-05, "loss": 1.9663, "step": 2410 }, { "epoch": 19.288, "grad_norm": 27.68346405029297, "learning_rate": 4.487555555555556e-05, "loss": 1.1861, "step": 2411 }, { "epoch": 19.296, "grad_norm": 35.50544738769531, "learning_rate": 4.4871111111111114e-05, "loss": 1.6814, "step": 2412 }, { "epoch": 19.304, "grad_norm": 33.13436508178711, "learning_rate": 4.486666666666667e-05, "loss": 1.1693, "step": 2413 }, { "epoch": 19.312, "grad_norm": 14.273661613464355, "learning_rate": 4.486222222222222e-05, "loss": 2.1383, "step": 2414 }, { "epoch": 19.32, "grad_norm": 25.52344512939453, "learning_rate": 4.4857777777777785e-05, "loss": 1.438, "step": 2415 }, { "epoch": 19.328, "grad_norm": 56.87870788574219, "learning_rate": 4.485333333333333e-05, "loss": 1.045, "step": 2416 }, { "epoch": 19.336, "grad_norm": 73.75410461425781, "learning_rate": 4.484888888888889e-05, "loss": 1.793, "step": 2417 }, { "epoch": 19.344, "grad_norm": 21.444631576538086, "learning_rate": 4.484444444444444e-05, "loss": 1.4651, "step": 2418 }, { "epoch": 19.352, "grad_norm": 27.45106315612793, "learning_rate": 4.4840000000000004e-05, "loss": 0.9173, "step": 2419 }, { "epoch": 19.36, "grad_norm": 17.532949447631836, "learning_rate": 4.483555555555556e-05, "loss": 1.0334, "step": 2420 }, { "epoch": 19.368, "grad_norm": 39.03779983520508, "learning_rate": 4.4831111111111114e-05, "loss": 1.1042, "step": 2421 }, { "epoch": 19.376, "grad_norm": 36.0662956237793, "learning_rate": 4.482666666666667e-05, "loss": 1.0905, "step": 2422 }, { "epoch": 19.384, "grad_norm": 34.777347564697266, "learning_rate": 4.4822222222222224e-05, "loss": 1.2709, "step": 2423 }, { "epoch": 19.392, "grad_norm": 25.93427085876465, "learning_rate": 4.481777777777778e-05, "loss": 1.1122, "step": 2424 }, { "epoch": 19.4, "grad_norm": 25.202497482299805, "learning_rate": 4.4813333333333333e-05, "loss": 1.0424, "step": 2425 }, { "epoch": 19.408, "grad_norm": 34.06864929199219, "learning_rate": 4.480888888888889e-05, "loss": 1.3675, "step": 2426 }, { "epoch": 19.416, "grad_norm": 42.87620544433594, "learning_rate": 4.480444444444445e-05, "loss": 1.1207, "step": 2427 }, { "epoch": 19.424, "grad_norm": 41.48463439941406, "learning_rate": 4.4800000000000005e-05, "loss": 2.874, "step": 2428 }, { "epoch": 19.432, "grad_norm": 26.085084915161133, "learning_rate": 4.479555555555556e-05, "loss": 1.2388, "step": 2429 }, { "epoch": 19.44, "grad_norm": 30.118837356567383, "learning_rate": 4.4791111111111114e-05, "loss": 0.9536, "step": 2430 }, { "epoch": 19.448, "grad_norm": 22.555294036865234, "learning_rate": 4.478666666666667e-05, "loss": 0.9351, "step": 2431 }, { "epoch": 19.456, "grad_norm": 28.720361709594727, "learning_rate": 4.4782222222222224e-05, "loss": 1.4868, "step": 2432 }, { "epoch": 19.464, "grad_norm": 19.662757873535156, "learning_rate": 4.477777777777778e-05, "loss": 1.1423, "step": 2433 }, { "epoch": 19.472, "grad_norm": 70.33831787109375, "learning_rate": 4.4773333333333334e-05, "loss": 1.4098, "step": 2434 }, { "epoch": 19.48, "grad_norm": 32.29353713989258, "learning_rate": 4.4768888888888895e-05, "loss": 1.3945, "step": 2435 }, { "epoch": 19.488, "grad_norm": 102.92512512207031, "learning_rate": 4.476444444444445e-05, "loss": 1.3943, "step": 2436 }, { "epoch": 19.496, "grad_norm": 27.830673217773438, "learning_rate": 4.4760000000000005e-05, "loss": 1.4809, "step": 2437 }, { "epoch": 19.504, "grad_norm": 23.91822624206543, "learning_rate": 4.475555555555555e-05, "loss": 1.5013, "step": 2438 }, { "epoch": 19.512, "grad_norm": 43.74681091308594, "learning_rate": 4.4751111111111115e-05, "loss": 1.7486, "step": 2439 }, { "epoch": 19.52, "grad_norm": 30.895082473754883, "learning_rate": 4.474666666666667e-05, "loss": 0.9125, "step": 2440 }, { "epoch": 19.528, "grad_norm": 21.78778839111328, "learning_rate": 4.4742222222222225e-05, "loss": 1.8374, "step": 2441 }, { "epoch": 19.536, "grad_norm": 198.41490173339844, "learning_rate": 4.473777777777778e-05, "loss": 1.2661, "step": 2442 }, { "epoch": 19.544, "grad_norm": 32.5472526550293, "learning_rate": 4.473333333333334e-05, "loss": 1.4718, "step": 2443 }, { "epoch": 19.552, "grad_norm": 16.073022842407227, "learning_rate": 4.472888888888889e-05, "loss": 1.0163, "step": 2444 }, { "epoch": 19.56, "grad_norm": 39.9145622253418, "learning_rate": 4.4724444444444444e-05, "loss": 1.0624, "step": 2445 }, { "epoch": 19.568, "grad_norm": 32.15300369262695, "learning_rate": 4.472e-05, "loss": 1.3951, "step": 2446 }, { "epoch": 19.576, "grad_norm": 26.65142822265625, "learning_rate": 4.4715555555555554e-05, "loss": 1.3791, "step": 2447 }, { "epoch": 19.584, "grad_norm": 24.703020095825195, "learning_rate": 4.4711111111111115e-05, "loss": 2.6368, "step": 2448 }, { "epoch": 19.592, "grad_norm": 27.56402587890625, "learning_rate": 4.470666666666667e-05, "loss": 1.0557, "step": 2449 }, { "epoch": 19.6, "grad_norm": 31.087549209594727, "learning_rate": 4.4702222222222225e-05, "loss": 1.1006, "step": 2450 }, { "epoch": 19.608, "grad_norm": 24.31462287902832, "learning_rate": 4.469777777777778e-05, "loss": 1.055, "step": 2451 }, { "epoch": 19.616, "grad_norm": 27.169105529785156, "learning_rate": 4.4693333333333335e-05, "loss": 1.132, "step": 2452 }, { "epoch": 19.624, "grad_norm": 38.48044204711914, "learning_rate": 4.468888888888889e-05, "loss": 1.2739, "step": 2453 }, { "epoch": 19.632, "grad_norm": 56.145816802978516, "learning_rate": 4.4684444444444444e-05, "loss": 1.8089, "step": 2454 }, { "epoch": 19.64, "grad_norm": 28.455413818359375, "learning_rate": 4.468e-05, "loss": 1.1998, "step": 2455 }, { "epoch": 19.648, "grad_norm": 25.87156867980957, "learning_rate": 4.467555555555556e-05, "loss": 1.2762, "step": 2456 }, { "epoch": 19.656, "grad_norm": 18.78925895690918, "learning_rate": 4.4671111111111116e-05, "loss": 1.9679, "step": 2457 }, { "epoch": 19.664, "grad_norm": 52.04572296142578, "learning_rate": 4.466666666666667e-05, "loss": 1.163, "step": 2458 }, { "epoch": 19.672, "grad_norm": 20.736547470092773, "learning_rate": 4.4662222222222225e-05, "loss": 1.1379, "step": 2459 }, { "epoch": 19.68, "grad_norm": 21.045364379882812, "learning_rate": 4.465777777777778e-05, "loss": 1.0894, "step": 2460 }, { "epoch": 19.688, "grad_norm": 47.33372497558594, "learning_rate": 4.4653333333333335e-05, "loss": 1.3664, "step": 2461 }, { "epoch": 19.696, "grad_norm": 28.569183349609375, "learning_rate": 4.464888888888889e-05, "loss": 1.4089, "step": 2462 }, { "epoch": 19.704, "grad_norm": 23.64670753479004, "learning_rate": 4.4644444444444445e-05, "loss": 1.0823, "step": 2463 }, { "epoch": 19.712, "grad_norm": 41.73585891723633, "learning_rate": 4.4640000000000006e-05, "loss": 1.0519, "step": 2464 }, { "epoch": 19.72, "grad_norm": 38.10761260986328, "learning_rate": 4.463555555555556e-05, "loss": 1.1915, "step": 2465 }, { "epoch": 19.728, "grad_norm": 24.234561920166016, "learning_rate": 4.463111111111111e-05, "loss": 1.3524, "step": 2466 }, { "epoch": 19.736, "grad_norm": 17.464303970336914, "learning_rate": 4.4626666666666664e-05, "loss": 0.8629, "step": 2467 }, { "epoch": 19.744, "grad_norm": 37.11747741699219, "learning_rate": 4.4622222222222226e-05, "loss": 2.0548, "step": 2468 }, { "epoch": 19.752, "grad_norm": 40.00223159790039, "learning_rate": 4.461777777777778e-05, "loss": 1.6163, "step": 2469 }, { "epoch": 19.76, "grad_norm": 65.45709228515625, "learning_rate": 4.4613333333333335e-05, "loss": 1.0334, "step": 2470 }, { "epoch": 19.768, "grad_norm": 20.630657196044922, "learning_rate": 4.460888888888889e-05, "loss": 1.9572, "step": 2471 }, { "epoch": 19.776, "grad_norm": 39.273040771484375, "learning_rate": 4.460444444444445e-05, "loss": 1.2832, "step": 2472 }, { "epoch": 19.784, "grad_norm": 26.0974178314209, "learning_rate": 4.46e-05, "loss": 1.5983, "step": 2473 }, { "epoch": 19.792, "grad_norm": 29.864465713500977, "learning_rate": 4.4595555555555555e-05, "loss": 1.2217, "step": 2474 }, { "epoch": 19.8, "grad_norm": 24.446182250976562, "learning_rate": 4.459111111111111e-05, "loss": 1.3826, "step": 2475 }, { "epoch": 19.808, "grad_norm": 28.426816940307617, "learning_rate": 4.458666666666667e-05, "loss": 1.4302, "step": 2476 }, { "epoch": 19.816, "grad_norm": 32.27981185913086, "learning_rate": 4.4582222222222226e-05, "loss": 1.2057, "step": 2477 }, { "epoch": 19.824, "grad_norm": 92.38746643066406, "learning_rate": 4.457777777777778e-05, "loss": 1.4702, "step": 2478 }, { "epoch": 19.832, "grad_norm": 32.517822265625, "learning_rate": 4.4573333333333336e-05, "loss": 0.9047, "step": 2479 }, { "epoch": 19.84, "grad_norm": 57.236778259277344, "learning_rate": 4.456888888888889e-05, "loss": 1.3791, "step": 2480 }, { "epoch": 19.848, "grad_norm": 57.47596740722656, "learning_rate": 4.4564444444444445e-05, "loss": 0.8721, "step": 2481 }, { "epoch": 19.856, "grad_norm": 92.35546875, "learning_rate": 4.456e-05, "loss": 1.099, "step": 2482 }, { "epoch": 19.864, "grad_norm": 41.88343811035156, "learning_rate": 4.4555555555555555e-05, "loss": 1.1875, "step": 2483 }, { "epoch": 19.872, "grad_norm": 64.54263305664062, "learning_rate": 4.455111111111112e-05, "loss": 1.1213, "step": 2484 }, { "epoch": 19.88, "grad_norm": 36.08209991455078, "learning_rate": 4.454666666666667e-05, "loss": 1.4088, "step": 2485 }, { "epoch": 19.888, "grad_norm": 28.676925659179688, "learning_rate": 4.4542222222222226e-05, "loss": 1.7078, "step": 2486 }, { "epoch": 19.896, "grad_norm": 23.65768051147461, "learning_rate": 4.453777777777778e-05, "loss": 1.1595, "step": 2487 }, { "epoch": 19.904, "grad_norm": 35.528053283691406, "learning_rate": 4.4533333333333336e-05, "loss": 0.9325, "step": 2488 }, { "epoch": 19.912, "grad_norm": 32.22989273071289, "learning_rate": 4.452888888888889e-05, "loss": 1.2119, "step": 2489 }, { "epoch": 19.92, "grad_norm": 36.1988525390625, "learning_rate": 4.4524444444444446e-05, "loss": 1.3859, "step": 2490 }, { "epoch": 19.928, "grad_norm": 33.79228210449219, "learning_rate": 4.452e-05, "loss": 1.0895, "step": 2491 }, { "epoch": 19.936, "grad_norm": 32.62324905395508, "learning_rate": 4.451555555555556e-05, "loss": 1.2676, "step": 2492 }, { "epoch": 19.944, "grad_norm": 53.38322448730469, "learning_rate": 4.451111111111112e-05, "loss": 0.875, "step": 2493 }, { "epoch": 19.951999999999998, "grad_norm": 40.4191780090332, "learning_rate": 4.450666666666667e-05, "loss": 1.1833, "step": 2494 }, { "epoch": 19.96, "grad_norm": 33.90864944458008, "learning_rate": 4.450222222222222e-05, "loss": 1.1892, "step": 2495 }, { "epoch": 19.968, "grad_norm": 28.612306594848633, "learning_rate": 4.449777777777778e-05, "loss": 0.8875, "step": 2496 }, { "epoch": 19.976, "grad_norm": 29.005504608154297, "learning_rate": 4.4493333333333337e-05, "loss": 1.1679, "step": 2497 }, { "epoch": 19.984, "grad_norm": 37.63240051269531, "learning_rate": 4.448888888888889e-05, "loss": 1.0705, "step": 2498 }, { "epoch": 19.992, "grad_norm": 48.59159469604492, "learning_rate": 4.4484444444444446e-05, "loss": 1.3839, "step": 2499 }, { "epoch": 20.0, "grad_norm": 30.48167610168457, "learning_rate": 4.448e-05, "loss": 1.4175, "step": 2500 }, { "epoch": 20.0, "eval_loss": 1.3480916023254395, "eval_map": 0.2835, "eval_map_50": 0.5472, "eval_map_75": 0.2536, "eval_map_Coverall": 0.525, "eval_map_Face_Shield": 0.1778, "eval_map_Gloves": 0.288, "eval_map_Goggles": 0.1256, "eval_map_Mask": 0.3011, "eval_map_large": 0.3262, "eval_map_medium": 0.1623, "eval_map_small": -1.0, "eval_mar_1": 0.2764, "eval_mar_10": 0.4692, "eval_mar_100": 0.4798, "eval_mar_100_Coverall": 0.6844, "eval_mar_100_Face_Shield": 0.5059, "eval_mar_100_Gloves": 0.4443, "eval_mar_100_Goggles": 0.3625, "eval_mar_100_Mask": 0.4019, "eval_mar_large": 0.5588, "eval_mar_medium": 0.2557, "eval_mar_small": -1.0, "eval_runtime": 4.3024, "eval_samples_per_second": 6.74, "eval_steps_per_second": 0.465, "step": 2500 }, { "epoch": 20.008, "grad_norm": 58.64241409301758, "learning_rate": 4.4475555555555556e-05, "loss": 1.507, "step": 2501 }, { "epoch": 20.016, "grad_norm": 19.886436462402344, "learning_rate": 4.447111111111111e-05, "loss": 1.2527, "step": 2502 }, { "epoch": 20.024, "grad_norm": 29.716257095336914, "learning_rate": 4.4466666666666666e-05, "loss": 1.1165, "step": 2503 }, { "epoch": 20.032, "grad_norm": 28.852554321289062, "learning_rate": 4.446222222222222e-05, "loss": 1.3443, "step": 2504 }, { "epoch": 20.04, "grad_norm": 17.68695831298828, "learning_rate": 4.445777777777778e-05, "loss": 1.7535, "step": 2505 }, { "epoch": 20.048, "grad_norm": 27.980073928833008, "learning_rate": 4.445333333333334e-05, "loss": 1.2882, "step": 2506 }, { "epoch": 20.056, "grad_norm": 32.43731689453125, "learning_rate": 4.444888888888889e-05, "loss": 1.1733, "step": 2507 }, { "epoch": 20.064, "grad_norm": 19.339679718017578, "learning_rate": 4.4444444444444447e-05, "loss": 2.0633, "step": 2508 }, { "epoch": 20.072, "grad_norm": 30.841127395629883, "learning_rate": 4.444e-05, "loss": 1.524, "step": 2509 }, { "epoch": 20.08, "grad_norm": 34.46686935424805, "learning_rate": 4.4435555555555556e-05, "loss": 1.7035, "step": 2510 }, { "epoch": 20.088, "grad_norm": 30.25748062133789, "learning_rate": 4.443111111111111e-05, "loss": 1.1379, "step": 2511 }, { "epoch": 20.096, "grad_norm": 20.02550506591797, "learning_rate": 4.4426666666666666e-05, "loss": 1.1079, "step": 2512 }, { "epoch": 20.104, "grad_norm": 19.716814041137695, "learning_rate": 4.442222222222223e-05, "loss": 1.5449, "step": 2513 }, { "epoch": 20.112, "grad_norm": 40.64385223388672, "learning_rate": 4.441777777777778e-05, "loss": 1.2393, "step": 2514 }, { "epoch": 20.12, "grad_norm": 24.965774536132812, "learning_rate": 4.441333333333334e-05, "loss": 1.5253, "step": 2515 }, { "epoch": 20.128, "grad_norm": 24.888093948364258, "learning_rate": 4.440888888888889e-05, "loss": 1.2517, "step": 2516 }, { "epoch": 20.136, "grad_norm": 27.64808464050293, "learning_rate": 4.440444444444445e-05, "loss": 1.1382, "step": 2517 }, { "epoch": 20.144, "grad_norm": 21.420984268188477, "learning_rate": 4.44e-05, "loss": 1.3786, "step": 2518 }, { "epoch": 20.152, "grad_norm": 61.64635467529297, "learning_rate": 4.439555555555556e-05, "loss": 1.0772, "step": 2519 }, { "epoch": 20.16, "grad_norm": 55.89219665527344, "learning_rate": 4.439111111111111e-05, "loss": 1.2753, "step": 2520 }, { "epoch": 20.168, "grad_norm": 20.569904327392578, "learning_rate": 4.438666666666667e-05, "loss": 1.0398, "step": 2521 }, { "epoch": 20.176, "grad_norm": 24.045185089111328, "learning_rate": 4.438222222222223e-05, "loss": 1.3968, "step": 2522 }, { "epoch": 20.184, "grad_norm": 19.27184295654297, "learning_rate": 4.4377777777777776e-05, "loss": 2.3534, "step": 2523 }, { "epoch": 20.192, "grad_norm": 36.97625732421875, "learning_rate": 4.437333333333333e-05, "loss": 1.3079, "step": 2524 }, { "epoch": 20.2, "grad_norm": 41.286312103271484, "learning_rate": 4.436888888888889e-05, "loss": 1.6388, "step": 2525 }, { "epoch": 20.208, "grad_norm": 206.7964630126953, "learning_rate": 4.436444444444445e-05, "loss": 1.6765, "step": 2526 }, { "epoch": 20.216, "grad_norm": 31.782983779907227, "learning_rate": 4.436e-05, "loss": 1.2777, "step": 2527 }, { "epoch": 20.224, "grad_norm": 22.015941619873047, "learning_rate": 4.435555555555556e-05, "loss": 0.9389, "step": 2528 }, { "epoch": 20.232, "grad_norm": 40.7990608215332, "learning_rate": 4.435111111111112e-05, "loss": 1.4488, "step": 2529 }, { "epoch": 20.24, "grad_norm": 30.900178909301758, "learning_rate": 4.434666666666667e-05, "loss": 1.3235, "step": 2530 }, { "epoch": 20.248, "grad_norm": 24.41156768798828, "learning_rate": 4.434222222222222e-05, "loss": 1.4906, "step": 2531 }, { "epoch": 20.256, "grad_norm": 70.47521209716797, "learning_rate": 4.4337777777777776e-05, "loss": 1.1687, "step": 2532 }, { "epoch": 20.264, "grad_norm": 26.68194007873535, "learning_rate": 4.433333333333334e-05, "loss": 0.9338, "step": 2533 }, { "epoch": 20.272, "grad_norm": 112.33180236816406, "learning_rate": 4.432888888888889e-05, "loss": 1.332, "step": 2534 }, { "epoch": 20.28, "grad_norm": 38.60374069213867, "learning_rate": 4.432444444444445e-05, "loss": 0.9202, "step": 2535 }, { "epoch": 20.288, "grad_norm": 75.19774627685547, "learning_rate": 4.432e-05, "loss": 0.8853, "step": 2536 }, { "epoch": 20.296, "grad_norm": 31.909793853759766, "learning_rate": 4.431555555555556e-05, "loss": 1.0598, "step": 2537 }, { "epoch": 20.304, "grad_norm": 47.494911193847656, "learning_rate": 4.431111111111111e-05, "loss": 1.389, "step": 2538 }, { "epoch": 20.312, "grad_norm": 28.46302604675293, "learning_rate": 4.430666666666667e-05, "loss": 1.4784, "step": 2539 }, { "epoch": 20.32, "grad_norm": 26.064878463745117, "learning_rate": 4.430222222222222e-05, "loss": 1.2267, "step": 2540 }, { "epoch": 20.328, "grad_norm": 27.430561065673828, "learning_rate": 4.4297777777777784e-05, "loss": 1.4647, "step": 2541 }, { "epoch": 20.336, "grad_norm": 60.192955017089844, "learning_rate": 4.429333333333334e-05, "loss": 2.2303, "step": 2542 }, { "epoch": 20.344, "grad_norm": 24.004867553710938, "learning_rate": 4.428888888888889e-05, "loss": 1.0264, "step": 2543 }, { "epoch": 20.352, "grad_norm": 56.73411560058594, "learning_rate": 4.428444444444445e-05, "loss": 1.3653, "step": 2544 }, { "epoch": 20.36, "grad_norm": 102.4898452758789, "learning_rate": 4.428e-05, "loss": 1.5058, "step": 2545 }, { "epoch": 20.368, "grad_norm": 39.750274658203125, "learning_rate": 4.427555555555556e-05, "loss": 1.4636, "step": 2546 }, { "epoch": 20.376, "grad_norm": 30.57598876953125, "learning_rate": 4.427111111111111e-05, "loss": 1.3887, "step": 2547 }, { "epoch": 20.384, "grad_norm": 39.98623275756836, "learning_rate": 4.426666666666667e-05, "loss": 1.7386, "step": 2548 }, { "epoch": 20.392, "grad_norm": 24.67047119140625, "learning_rate": 4.426222222222222e-05, "loss": 1.0531, "step": 2549 }, { "epoch": 20.4, "grad_norm": 27.281143188476562, "learning_rate": 4.4257777777777784e-05, "loss": 1.5728, "step": 2550 }, { "epoch": 20.408, "grad_norm": 38.398719787597656, "learning_rate": 4.425333333333334e-05, "loss": 1.2439, "step": 2551 }, { "epoch": 20.416, "grad_norm": 42.39053726196289, "learning_rate": 4.424888888888889e-05, "loss": 1.7077, "step": 2552 }, { "epoch": 20.424, "grad_norm": 38.80982971191406, "learning_rate": 4.424444444444444e-05, "loss": 1.433, "step": 2553 }, { "epoch": 20.432, "grad_norm": 23.058773040771484, "learning_rate": 4.424e-05, "loss": 1.7343, "step": 2554 }, { "epoch": 20.44, "grad_norm": 19.04547882080078, "learning_rate": 4.423555555555556e-05, "loss": 1.0685, "step": 2555 }, { "epoch": 20.448, "grad_norm": 18.936311721801758, "learning_rate": 4.423111111111111e-05, "loss": 1.0514, "step": 2556 }, { "epoch": 20.456, "grad_norm": 37.579612731933594, "learning_rate": 4.422666666666667e-05, "loss": 1.71, "step": 2557 }, { "epoch": 20.464, "grad_norm": 46.242523193359375, "learning_rate": 4.422222222222222e-05, "loss": 1.0098, "step": 2558 }, { "epoch": 20.472, "grad_norm": 51.0615119934082, "learning_rate": 4.421777777777778e-05, "loss": 1.037, "step": 2559 }, { "epoch": 20.48, "grad_norm": 21.464540481567383, "learning_rate": 4.421333333333333e-05, "loss": 0.8328, "step": 2560 }, { "epoch": 20.488, "grad_norm": 40.735557556152344, "learning_rate": 4.420888888888889e-05, "loss": 1.1115, "step": 2561 }, { "epoch": 20.496, "grad_norm": 40.67584991455078, "learning_rate": 4.420444444444445e-05, "loss": 1.3766, "step": 2562 }, { "epoch": 20.504, "grad_norm": 24.975786209106445, "learning_rate": 4.4200000000000004e-05, "loss": 1.2504, "step": 2563 }, { "epoch": 20.512, "grad_norm": 147.02101135253906, "learning_rate": 4.419555555555556e-05, "loss": 3.0895, "step": 2564 }, { "epoch": 20.52, "grad_norm": 25.17919921875, "learning_rate": 4.4191111111111113e-05, "loss": 1.2953, "step": 2565 }, { "epoch": 20.528, "grad_norm": 22.145282745361328, "learning_rate": 4.418666666666667e-05, "loss": 1.1651, "step": 2566 }, { "epoch": 20.536, "grad_norm": 22.460447311401367, "learning_rate": 4.418222222222222e-05, "loss": 0.8486, "step": 2567 }, { "epoch": 20.544, "grad_norm": 39.42176818847656, "learning_rate": 4.417777777777778e-05, "loss": 1.3459, "step": 2568 }, { "epoch": 20.552, "grad_norm": 23.849946975708008, "learning_rate": 4.417333333333333e-05, "loss": 1.0559, "step": 2569 }, { "epoch": 20.56, "grad_norm": 25.515655517578125, "learning_rate": 4.4168888888888894e-05, "loss": 1.4634, "step": 2570 }, { "epoch": 20.568, "grad_norm": 34.02996826171875, "learning_rate": 4.416444444444445e-05, "loss": 1.7676, "step": 2571 }, { "epoch": 20.576, "grad_norm": 24.643617630004883, "learning_rate": 4.4160000000000004e-05, "loss": 1.2287, "step": 2572 }, { "epoch": 20.584, "grad_norm": 38.1386604309082, "learning_rate": 4.415555555555556e-05, "loss": 1.4838, "step": 2573 }, { "epoch": 20.592, "grad_norm": 35.631587982177734, "learning_rate": 4.4151111111111114e-05, "loss": 1.5634, "step": 2574 }, { "epoch": 20.6, "grad_norm": 21.28455352783203, "learning_rate": 4.414666666666667e-05, "loss": 1.3429, "step": 2575 }, { "epoch": 20.608, "grad_norm": 36.9566764831543, "learning_rate": 4.4142222222222223e-05, "loss": 1.3223, "step": 2576 }, { "epoch": 20.616, "grad_norm": 21.76194190979004, "learning_rate": 4.413777777777778e-05, "loss": 0.895, "step": 2577 }, { "epoch": 20.624, "grad_norm": 22.190290451049805, "learning_rate": 4.413333333333334e-05, "loss": 1.4327, "step": 2578 }, { "epoch": 20.632, "grad_norm": 29.22929573059082, "learning_rate": 4.4128888888888895e-05, "loss": 0.8768, "step": 2579 }, { "epoch": 20.64, "grad_norm": 32.66156768798828, "learning_rate": 4.412444444444444e-05, "loss": 0.9816, "step": 2580 }, { "epoch": 20.648, "grad_norm": 39.21051788330078, "learning_rate": 4.412e-05, "loss": 1.015, "step": 2581 }, { "epoch": 20.656, "grad_norm": 58.37334060668945, "learning_rate": 4.411555555555556e-05, "loss": 1.1791, "step": 2582 }, { "epoch": 20.664, "grad_norm": 76.11853790283203, "learning_rate": 4.4111111111111114e-05, "loss": 1.7093, "step": 2583 }, { "epoch": 20.672, "grad_norm": 28.731996536254883, "learning_rate": 4.410666666666667e-05, "loss": 1.2141, "step": 2584 }, { "epoch": 20.68, "grad_norm": 32.728538513183594, "learning_rate": 4.4102222222222224e-05, "loss": 1.4319, "step": 2585 }, { "epoch": 20.688, "grad_norm": 41.50709533691406, "learning_rate": 4.4097777777777785e-05, "loss": 1.3338, "step": 2586 }, { "epoch": 20.696, "grad_norm": 31.24656867980957, "learning_rate": 4.4093333333333334e-05, "loss": 1.1215, "step": 2587 }, { "epoch": 20.704, "grad_norm": 19.337675094604492, "learning_rate": 4.408888888888889e-05, "loss": 0.9213, "step": 2588 }, { "epoch": 20.712, "grad_norm": 20.608409881591797, "learning_rate": 4.408444444444444e-05, "loss": 1.0057, "step": 2589 }, { "epoch": 20.72, "grad_norm": 30.415189743041992, "learning_rate": 4.4080000000000005e-05, "loss": 1.1063, "step": 2590 }, { "epoch": 20.728, "grad_norm": 29.12427520751953, "learning_rate": 4.407555555555556e-05, "loss": 1.7249, "step": 2591 }, { "epoch": 20.736, "grad_norm": 317.8456115722656, "learning_rate": 4.4071111111111115e-05, "loss": 1.7809, "step": 2592 }, { "epoch": 20.744, "grad_norm": 38.17705154418945, "learning_rate": 4.406666666666667e-05, "loss": 0.9378, "step": 2593 }, { "epoch": 20.752, "grad_norm": 31.3406982421875, "learning_rate": 4.4062222222222224e-05, "loss": 1.4134, "step": 2594 }, { "epoch": 20.76, "grad_norm": 24.579662322998047, "learning_rate": 4.405777777777778e-05, "loss": 1.3451, "step": 2595 }, { "epoch": 20.768, "grad_norm": 42.53696060180664, "learning_rate": 4.4053333333333334e-05, "loss": 1.4306, "step": 2596 }, { "epoch": 20.776, "grad_norm": 32.98659133911133, "learning_rate": 4.404888888888889e-05, "loss": 1.0179, "step": 2597 }, { "epoch": 20.784, "grad_norm": 31.716228485107422, "learning_rate": 4.404444444444445e-05, "loss": 1.0523, "step": 2598 }, { "epoch": 20.792, "grad_norm": 43.84135437011719, "learning_rate": 4.4040000000000005e-05, "loss": 1.2572, "step": 2599 }, { "epoch": 20.8, "grad_norm": 46.74502944946289, "learning_rate": 4.403555555555556e-05, "loss": 1.2083, "step": 2600 }, { "epoch": 20.808, "grad_norm": 51.81180953979492, "learning_rate": 4.4031111111111115e-05, "loss": 1.28, "step": 2601 }, { "epoch": 20.816, "grad_norm": 32.87006759643555, "learning_rate": 4.402666666666666e-05, "loss": 1.4516, "step": 2602 }, { "epoch": 20.824, "grad_norm": 55.29604721069336, "learning_rate": 4.4022222222222225e-05, "loss": 1.0344, "step": 2603 }, { "epoch": 20.832, "grad_norm": 39.59788131713867, "learning_rate": 4.401777777777778e-05, "loss": 1.1488, "step": 2604 }, { "epoch": 20.84, "grad_norm": 30.81812286376953, "learning_rate": 4.4013333333333334e-05, "loss": 1.2217, "step": 2605 }, { "epoch": 20.848, "grad_norm": 23.90768051147461, "learning_rate": 4.400888888888889e-05, "loss": 1.1453, "step": 2606 }, { "epoch": 20.856, "grad_norm": 39.58830642700195, "learning_rate": 4.400444444444445e-05, "loss": 1.9846, "step": 2607 }, { "epoch": 20.864, "grad_norm": 31.64342498779297, "learning_rate": 4.4000000000000006e-05, "loss": 1.318, "step": 2608 }, { "epoch": 20.872, "grad_norm": 30.889984130859375, "learning_rate": 4.3995555555555554e-05, "loss": 1.7816, "step": 2609 }, { "epoch": 20.88, "grad_norm": 28.585424423217773, "learning_rate": 4.399111111111111e-05, "loss": 1.3263, "step": 2610 }, { "epoch": 20.888, "grad_norm": 48.970359802246094, "learning_rate": 4.398666666666667e-05, "loss": 0.8284, "step": 2611 }, { "epoch": 20.896, "grad_norm": 70.18790435791016, "learning_rate": 4.3982222222222225e-05, "loss": 1.1381, "step": 2612 }, { "epoch": 20.904, "grad_norm": 33.96465301513672, "learning_rate": 4.397777777777778e-05, "loss": 1.4616, "step": 2613 }, { "epoch": 20.912, "grad_norm": 23.559160232543945, "learning_rate": 4.3973333333333335e-05, "loss": 2.15, "step": 2614 }, { "epoch": 20.92, "grad_norm": 23.37265968322754, "learning_rate": 4.396888888888889e-05, "loss": 1.4565, "step": 2615 }, { "epoch": 20.928, "grad_norm": 52.41451644897461, "learning_rate": 4.3964444444444444e-05, "loss": 1.8163, "step": 2616 }, { "epoch": 20.936, "grad_norm": 60.85514450073242, "learning_rate": 4.396e-05, "loss": 0.997, "step": 2617 }, { "epoch": 20.944, "grad_norm": 18.028566360473633, "learning_rate": 4.3955555555555554e-05, "loss": 1.0783, "step": 2618 }, { "epoch": 20.951999999999998, "grad_norm": 17.437894821166992, "learning_rate": 4.3951111111111116e-05, "loss": 1.2273, "step": 2619 }, { "epoch": 20.96, "grad_norm": 255.23553466796875, "learning_rate": 4.394666666666667e-05, "loss": 1.5374, "step": 2620 }, { "epoch": 20.968, "grad_norm": 37.185630798339844, "learning_rate": 4.3942222222222225e-05, "loss": 1.3849, "step": 2621 }, { "epoch": 20.976, "grad_norm": 28.861419677734375, "learning_rate": 4.393777777777778e-05, "loss": 1.2411, "step": 2622 }, { "epoch": 20.984, "grad_norm": 15.492044448852539, "learning_rate": 4.3933333333333335e-05, "loss": 1.5724, "step": 2623 }, { "epoch": 20.992, "grad_norm": 32.598453521728516, "learning_rate": 4.392888888888889e-05, "loss": 1.3377, "step": 2624 }, { "epoch": 21.0, "grad_norm": 36.64162826538086, "learning_rate": 4.3924444444444445e-05, "loss": 1.3382, "step": 2625 }, { "epoch": 21.0, "eval_loss": 1.3298139572143555, "eval_map": 0.2893, "eval_map_50": 0.5529, "eval_map_75": 0.2565, "eval_map_Coverall": 0.5625, "eval_map_Face_Shield": 0.1962, "eval_map_Gloves": 0.2648, "eval_map_Goggles": 0.0594, "eval_map_Mask": 0.3635, "eval_map_large": 0.3348, "eval_map_medium": 0.2387, "eval_map_small": -1.0, "eval_mar_1": 0.2535, "eval_mar_10": 0.528, "eval_mar_100": 0.5497, "eval_mar_100_Coverall": 0.6867, "eval_mar_100_Face_Shield": 0.7, "eval_mar_100_Gloves": 0.4344, "eval_mar_100_Goggles": 0.4563, "eval_mar_100_Mask": 0.4712, "eval_mar_large": 0.6171, "eval_mar_medium": 0.4446, "eval_mar_small": -1.0, "eval_runtime": 3.6782, "eval_samples_per_second": 7.884, "eval_steps_per_second": 0.544, "step": 2625 }, { "epoch": 21.008, "grad_norm": 49.540470123291016, "learning_rate": 4.392e-05, "loss": 1.3453, "step": 2626 }, { "epoch": 21.016, "grad_norm": 46.18379592895508, "learning_rate": 4.391555555555556e-05, "loss": 1.5516, "step": 2627 }, { "epoch": 21.024, "grad_norm": 18.462533950805664, "learning_rate": 4.3911111111111116e-05, "loss": 1.7646, "step": 2628 }, { "epoch": 21.032, "grad_norm": 21.017793655395508, "learning_rate": 4.390666666666667e-05, "loss": 1.193, "step": 2629 }, { "epoch": 21.04, "grad_norm": 36.558128356933594, "learning_rate": 4.390222222222222e-05, "loss": 1.3152, "step": 2630 }, { "epoch": 21.048, "grad_norm": 80.16055297851562, "learning_rate": 4.389777777777778e-05, "loss": 0.944, "step": 2631 }, { "epoch": 21.056, "grad_norm": 31.782827377319336, "learning_rate": 4.3893333333333335e-05, "loss": 1.3414, "step": 2632 }, { "epoch": 21.064, "grad_norm": 29.060382843017578, "learning_rate": 4.388888888888889e-05, "loss": 2.0786, "step": 2633 }, { "epoch": 21.072, "grad_norm": 31.11857032775879, "learning_rate": 4.3884444444444445e-05, "loss": 0.9791, "step": 2634 }, { "epoch": 21.08, "grad_norm": 29.90601348876953, "learning_rate": 4.388000000000001e-05, "loss": 1.4184, "step": 2635 }, { "epoch": 21.088, "grad_norm": 48.43864822387695, "learning_rate": 4.387555555555556e-05, "loss": 1.2982, "step": 2636 }, { "epoch": 21.096, "grad_norm": 27.320680618286133, "learning_rate": 4.387111111111111e-05, "loss": 1.1478, "step": 2637 }, { "epoch": 21.104, "grad_norm": 71.33726501464844, "learning_rate": 4.3866666666666665e-05, "loss": 1.053, "step": 2638 }, { "epoch": 21.112, "grad_norm": 19.80095672607422, "learning_rate": 4.3862222222222226e-05, "loss": 1.8385, "step": 2639 }, { "epoch": 21.12, "grad_norm": 159.21713256835938, "learning_rate": 4.385777777777778e-05, "loss": 1.3969, "step": 2640 }, { "epoch": 21.128, "grad_norm": 32.97911834716797, "learning_rate": 4.3853333333333336e-05, "loss": 1.4808, "step": 2641 }, { "epoch": 21.136, "grad_norm": 44.09914016723633, "learning_rate": 4.384888888888889e-05, "loss": 1.0083, "step": 2642 }, { "epoch": 21.144, "grad_norm": 45.74631118774414, "learning_rate": 4.384444444444445e-05, "loss": 1.2219, "step": 2643 }, { "epoch": 21.152, "grad_norm": 25.79800033569336, "learning_rate": 4.384e-05, "loss": 1.4535, "step": 2644 }, { "epoch": 21.16, "grad_norm": 49.96670913696289, "learning_rate": 4.3835555555555555e-05, "loss": 1.0077, "step": 2645 }, { "epoch": 21.168, "grad_norm": 56.08883285522461, "learning_rate": 4.383111111111111e-05, "loss": 1.1974, "step": 2646 }, { "epoch": 21.176, "grad_norm": 16.097440719604492, "learning_rate": 4.382666666666667e-05, "loss": 1.0263, "step": 2647 }, { "epoch": 21.184, "grad_norm": 38.923770904541016, "learning_rate": 4.3822222222222227e-05, "loss": 2.3621, "step": 2648 }, { "epoch": 21.192, "grad_norm": 22.720199584960938, "learning_rate": 4.381777777777778e-05, "loss": 1.0852, "step": 2649 }, { "epoch": 21.2, "grad_norm": 21.193851470947266, "learning_rate": 4.3813333333333336e-05, "loss": 1.1846, "step": 2650 }, { "epoch": 21.208, "grad_norm": 25.944591522216797, "learning_rate": 4.380888888888889e-05, "loss": 1.0313, "step": 2651 }, { "epoch": 21.216, "grad_norm": 45.34355545043945, "learning_rate": 4.3804444444444446e-05, "loss": 1.2657, "step": 2652 }, { "epoch": 21.224, "grad_norm": 69.53607177734375, "learning_rate": 4.38e-05, "loss": 1.6452, "step": 2653 }, { "epoch": 21.232, "grad_norm": 33.89225769042969, "learning_rate": 4.3795555555555556e-05, "loss": 1.4936, "step": 2654 }, { "epoch": 21.24, "grad_norm": 31.167743682861328, "learning_rate": 4.379111111111111e-05, "loss": 1.2305, "step": 2655 }, { "epoch": 21.248, "grad_norm": 32.81672286987305, "learning_rate": 4.378666666666667e-05, "loss": 1.2629, "step": 2656 }, { "epoch": 21.256, "grad_norm": 37.84603500366211, "learning_rate": 4.378222222222223e-05, "loss": 1.2391, "step": 2657 }, { "epoch": 21.264, "grad_norm": 24.51150131225586, "learning_rate": 4.377777777777778e-05, "loss": 1.1073, "step": 2658 }, { "epoch": 21.272, "grad_norm": 29.057018280029297, "learning_rate": 4.377333333333333e-05, "loss": 1.193, "step": 2659 }, { "epoch": 21.28, "grad_norm": 22.766498565673828, "learning_rate": 4.376888888888889e-05, "loss": 1.6092, "step": 2660 }, { "epoch": 21.288, "grad_norm": 43.0967903137207, "learning_rate": 4.3764444444444446e-05, "loss": 1.114, "step": 2661 }, { "epoch": 21.296, "grad_norm": 32.25749206542969, "learning_rate": 4.376e-05, "loss": 1.4039, "step": 2662 }, { "epoch": 21.304, "grad_norm": 29.956157684326172, "learning_rate": 4.3755555555555556e-05, "loss": 1.5777, "step": 2663 }, { "epoch": 21.312, "grad_norm": 19.105459213256836, "learning_rate": 4.375111111111112e-05, "loss": 1.2464, "step": 2664 }, { "epoch": 21.32, "grad_norm": 39.72114944458008, "learning_rate": 4.374666666666667e-05, "loss": 1.087, "step": 2665 }, { "epoch": 21.328, "grad_norm": 78.39178466796875, "learning_rate": 4.374222222222222e-05, "loss": 1.3839, "step": 2666 }, { "epoch": 21.336, "grad_norm": 33.58287811279297, "learning_rate": 4.3737777777777775e-05, "loss": 2.0443, "step": 2667 }, { "epoch": 21.344, "grad_norm": 22.179262161254883, "learning_rate": 4.373333333333334e-05, "loss": 1.396, "step": 2668 }, { "epoch": 21.352, "grad_norm": 33.78181838989258, "learning_rate": 4.372888888888889e-05, "loss": 1.2807, "step": 2669 }, { "epoch": 21.36, "grad_norm": 37.31203079223633, "learning_rate": 4.372444444444445e-05, "loss": 1.2384, "step": 2670 }, { "epoch": 21.368, "grad_norm": 32.30540466308594, "learning_rate": 4.372e-05, "loss": 1.3308, "step": 2671 }, { "epoch": 21.376, "grad_norm": 41.32898712158203, "learning_rate": 4.3715555555555556e-05, "loss": 1.2163, "step": 2672 }, { "epoch": 21.384, "grad_norm": 56.98036575317383, "learning_rate": 4.371111111111111e-05, "loss": 1.2975, "step": 2673 }, { "epoch": 21.392, "grad_norm": 22.735855102539062, "learning_rate": 4.3706666666666666e-05, "loss": 1.1378, "step": 2674 }, { "epoch": 21.4, "grad_norm": 17.750818252563477, "learning_rate": 4.370222222222222e-05, "loss": 1.2198, "step": 2675 }, { "epoch": 21.408, "grad_norm": 19.69985008239746, "learning_rate": 4.369777777777778e-05, "loss": 1.4171, "step": 2676 }, { "epoch": 21.416, "grad_norm": 21.256389617919922, "learning_rate": 4.369333333333334e-05, "loss": 1.4136, "step": 2677 }, { "epoch": 21.424, "grad_norm": 58.65774154663086, "learning_rate": 4.368888888888889e-05, "loss": 1.0434, "step": 2678 }, { "epoch": 21.432, "grad_norm": 29.45351791381836, "learning_rate": 4.368444444444445e-05, "loss": 1.3695, "step": 2679 }, { "epoch": 21.44, "grad_norm": 25.89289665222168, "learning_rate": 4.368e-05, "loss": 1.321, "step": 2680 }, { "epoch": 21.448, "grad_norm": 33.9380989074707, "learning_rate": 4.367555555555556e-05, "loss": 1.2034, "step": 2681 }, { "epoch": 21.456, "grad_norm": 55.605098724365234, "learning_rate": 4.367111111111111e-05, "loss": 1.1568, "step": 2682 }, { "epoch": 21.464, "grad_norm": 31.956172943115234, "learning_rate": 4.3666666666666666e-05, "loss": 1.0201, "step": 2683 }, { "epoch": 21.472, "grad_norm": 28.76049041748047, "learning_rate": 4.366222222222223e-05, "loss": 1.4002, "step": 2684 }, { "epoch": 21.48, "grad_norm": 26.131803512573242, "learning_rate": 4.365777777777778e-05, "loss": 1.0493, "step": 2685 }, { "epoch": 21.488, "grad_norm": 40.398155212402344, "learning_rate": 4.365333333333334e-05, "loss": 3.3441, "step": 2686 }, { "epoch": 21.496, "grad_norm": 29.404207229614258, "learning_rate": 4.3648888888888886e-05, "loss": 1.3739, "step": 2687 }, { "epoch": 21.504, "grad_norm": 23.941503524780273, "learning_rate": 4.364444444444445e-05, "loss": 2.5687, "step": 2688 }, { "epoch": 21.512, "grad_norm": 36.355682373046875, "learning_rate": 4.364e-05, "loss": 1.0604, "step": 2689 }, { "epoch": 21.52, "grad_norm": 50.76600646972656, "learning_rate": 4.363555555555556e-05, "loss": 1.1013, "step": 2690 }, { "epoch": 21.528, "grad_norm": 26.547828674316406, "learning_rate": 4.363111111111111e-05, "loss": 1.4009, "step": 2691 }, { "epoch": 21.536, "grad_norm": 17.530920028686523, "learning_rate": 4.3626666666666674e-05, "loss": 1.216, "step": 2692 }, { "epoch": 21.544, "grad_norm": 31.812063217163086, "learning_rate": 4.362222222222223e-05, "loss": 1.2988, "step": 2693 }, { "epoch": 21.552, "grad_norm": 39.93449783325195, "learning_rate": 4.3617777777777777e-05, "loss": 1.543, "step": 2694 }, { "epoch": 21.56, "grad_norm": 28.788990020751953, "learning_rate": 4.361333333333333e-05, "loss": 1.3415, "step": 2695 }, { "epoch": 21.568, "grad_norm": 20.809450149536133, "learning_rate": 4.360888888888889e-05, "loss": 1.4816, "step": 2696 }, { "epoch": 21.576, "grad_norm": 66.54503631591797, "learning_rate": 4.360444444444445e-05, "loss": 1.08, "step": 2697 }, { "epoch": 21.584, "grad_norm": 29.221935272216797, "learning_rate": 4.36e-05, "loss": 1.5513, "step": 2698 }, { "epoch": 21.592, "grad_norm": 35.61470413208008, "learning_rate": 4.359555555555556e-05, "loss": 1.1375, "step": 2699 }, { "epoch": 21.6, "grad_norm": 22.00135040283203, "learning_rate": 4.359111111111112e-05, "loss": 1.1814, "step": 2700 }, { "epoch": 21.608, "grad_norm": 38.814456939697266, "learning_rate": 4.358666666666667e-05, "loss": 1.3299, "step": 2701 }, { "epoch": 21.616, "grad_norm": 22.775230407714844, "learning_rate": 4.358222222222222e-05, "loss": 1.2181, "step": 2702 }, { "epoch": 21.624, "grad_norm": 23.627933502197266, "learning_rate": 4.357777777777778e-05, "loss": 1.2671, "step": 2703 }, { "epoch": 21.632, "grad_norm": 24.332563400268555, "learning_rate": 4.357333333333333e-05, "loss": 1.613, "step": 2704 }, { "epoch": 21.64, "grad_norm": 24.3319149017334, "learning_rate": 4.356888888888889e-05, "loss": 0.8921, "step": 2705 }, { "epoch": 21.648, "grad_norm": 35.84003829956055, "learning_rate": 4.356444444444445e-05, "loss": 1.278, "step": 2706 }, { "epoch": 21.656, "grad_norm": 41.35940170288086, "learning_rate": 4.356e-05, "loss": 1.4634, "step": 2707 }, { "epoch": 21.664, "grad_norm": 27.835464477539062, "learning_rate": 4.355555555555556e-05, "loss": 1.5571, "step": 2708 }, { "epoch": 21.672, "grad_norm": 22.184473037719727, "learning_rate": 4.355111111111111e-05, "loss": 2.4164, "step": 2709 }, { "epoch": 21.68, "grad_norm": 30.2418212890625, "learning_rate": 4.354666666666667e-05, "loss": 1.0716, "step": 2710 }, { "epoch": 21.688, "grad_norm": 46.845401763916016, "learning_rate": 4.354222222222222e-05, "loss": 1.3856, "step": 2711 }, { "epoch": 21.696, "grad_norm": 24.46016502380371, "learning_rate": 4.353777777777778e-05, "loss": 1.2995, "step": 2712 }, { "epoch": 21.704, "grad_norm": 32.59542465209961, "learning_rate": 4.353333333333334e-05, "loss": 1.3653, "step": 2713 }, { "epoch": 21.712, "grad_norm": 25.859088897705078, "learning_rate": 4.3528888888888894e-05, "loss": 1.9232, "step": 2714 }, { "epoch": 21.72, "grad_norm": 26.718994140625, "learning_rate": 4.352444444444445e-05, "loss": 1.3982, "step": 2715 }, { "epoch": 21.728, "grad_norm": 32.85890579223633, "learning_rate": 4.352e-05, "loss": 0.9959, "step": 2716 }, { "epoch": 21.736, "grad_norm": 24.476150512695312, "learning_rate": 4.351555555555556e-05, "loss": 1.5794, "step": 2717 }, { "epoch": 21.744, "grad_norm": 97.77742004394531, "learning_rate": 4.351111111111111e-05, "loss": 1.2243, "step": 2718 }, { "epoch": 21.752, "grad_norm": 19.91697120666504, "learning_rate": 4.350666666666667e-05, "loss": 1.3605, "step": 2719 }, { "epoch": 21.76, "grad_norm": 24.9511775970459, "learning_rate": 4.350222222222222e-05, "loss": 1.1955, "step": 2720 }, { "epoch": 21.768, "grad_norm": 26.434492111206055, "learning_rate": 4.3497777777777784e-05, "loss": 1.337, "step": 2721 }, { "epoch": 21.776, "grad_norm": 37.015419006347656, "learning_rate": 4.349333333333334e-05, "loss": 1.2497, "step": 2722 }, { "epoch": 21.784, "grad_norm": 24.892248153686523, "learning_rate": 4.348888888888889e-05, "loss": 1.6784, "step": 2723 }, { "epoch": 21.792, "grad_norm": 15.861111640930176, "learning_rate": 4.348444444444444e-05, "loss": 0.8268, "step": 2724 }, { "epoch": 21.8, "grad_norm": 29.82301139831543, "learning_rate": 4.3480000000000004e-05, "loss": 1.3273, "step": 2725 }, { "epoch": 21.808, "grad_norm": 62.4602165222168, "learning_rate": 4.347555555555556e-05, "loss": 1.0545, "step": 2726 }, { "epoch": 21.816, "grad_norm": 27.883440017700195, "learning_rate": 4.3471111111111114e-05, "loss": 0.9671, "step": 2727 }, { "epoch": 21.824, "grad_norm": 27.581655502319336, "learning_rate": 4.346666666666667e-05, "loss": 1.0697, "step": 2728 }, { "epoch": 21.832, "grad_norm": 27.286724090576172, "learning_rate": 4.346222222222222e-05, "loss": 1.2637, "step": 2729 }, { "epoch": 21.84, "grad_norm": 37.971099853515625, "learning_rate": 4.345777777777778e-05, "loss": 1.1986, "step": 2730 }, { "epoch": 21.848, "grad_norm": 24.952781677246094, "learning_rate": 4.345333333333333e-05, "loss": 0.9637, "step": 2731 }, { "epoch": 21.856, "grad_norm": 42.228492736816406, "learning_rate": 4.344888888888889e-05, "loss": 1.1495, "step": 2732 }, { "epoch": 21.864, "grad_norm": 27.326040267944336, "learning_rate": 4.344444444444445e-05, "loss": 1.0325, "step": 2733 }, { "epoch": 21.872, "grad_norm": 22.378393173217773, "learning_rate": 4.3440000000000004e-05, "loss": 1.0507, "step": 2734 }, { "epoch": 21.88, "grad_norm": 28.99925422668457, "learning_rate": 4.343555555555556e-05, "loss": 1.2985, "step": 2735 }, { "epoch": 21.888, "grad_norm": 20.68667984008789, "learning_rate": 4.3431111111111114e-05, "loss": 0.7716, "step": 2736 }, { "epoch": 21.896, "grad_norm": 34.24416732788086, "learning_rate": 4.342666666666667e-05, "loss": 0.9703, "step": 2737 }, { "epoch": 21.904, "grad_norm": 43.09847640991211, "learning_rate": 4.3422222222222224e-05, "loss": 1.0941, "step": 2738 }, { "epoch": 21.912, "grad_norm": 22.39882469177246, "learning_rate": 4.341777777777778e-05, "loss": 1.0961, "step": 2739 }, { "epoch": 21.92, "grad_norm": 16.663618087768555, "learning_rate": 4.341333333333333e-05, "loss": 0.9483, "step": 2740 }, { "epoch": 21.928, "grad_norm": 21.754684448242188, "learning_rate": 4.3408888888888895e-05, "loss": 1.2348, "step": 2741 }, { "epoch": 21.936, "grad_norm": 17.754261016845703, "learning_rate": 4.340444444444445e-05, "loss": 1.276, "step": 2742 }, { "epoch": 21.944, "grad_norm": 36.997467041015625, "learning_rate": 4.3400000000000005e-05, "loss": 1.0608, "step": 2743 }, { "epoch": 21.951999999999998, "grad_norm": 58.43959426879883, "learning_rate": 4.339555555555555e-05, "loss": 1.8558, "step": 2744 }, { "epoch": 21.96, "grad_norm": 425.7507629394531, "learning_rate": 4.3391111111111114e-05, "loss": 1.155, "step": 2745 }, { "epoch": 21.968, "grad_norm": 57.1981201171875, "learning_rate": 4.338666666666667e-05, "loss": 1.481, "step": 2746 }, { "epoch": 21.976, "grad_norm": 21.139497756958008, "learning_rate": 4.3382222222222224e-05, "loss": 1.1267, "step": 2747 }, { "epoch": 21.984, "grad_norm": 24.719711303710938, "learning_rate": 4.337777777777778e-05, "loss": 1.4854, "step": 2748 }, { "epoch": 21.992, "grad_norm": 15.869653701782227, "learning_rate": 4.337333333333334e-05, "loss": 0.9746, "step": 2749 }, { "epoch": 22.0, "grad_norm": 23.14392852783203, "learning_rate": 4.3368888888888895e-05, "loss": 0.656, "step": 2750 }, { "epoch": 22.0, "eval_loss": 1.2556428909301758, "eval_map": 0.3161, "eval_map_50": 0.6078, "eval_map_75": 0.3123, "eval_map_Coverall": 0.5449, "eval_map_Face_Shield": 0.3133, "eval_map_Gloves": 0.2745, "eval_map_Goggles": 0.1306, "eval_map_Mask": 0.3175, "eval_map_large": 0.3806, "eval_map_medium": 0.1599, "eval_map_small": -1.0, "eval_mar_1": 0.2938, "eval_mar_10": 0.4808, "eval_mar_100": 0.4931, "eval_mar_100_Coverall": 0.7067, "eval_mar_100_Face_Shield": 0.5588, "eval_mar_100_Gloves": 0.4115, "eval_mar_100_Goggles": 0.375, "eval_mar_100_Mask": 0.4135, "eval_mar_large": 0.6007, "eval_mar_medium": 0.2256, "eval_mar_small": -1.0, "eval_runtime": 3.6367, "eval_samples_per_second": 7.974, "eval_steps_per_second": 0.55, "step": 2750 }, { "epoch": 22.008, "grad_norm": 27.64745330810547, "learning_rate": 4.336444444444444e-05, "loss": 1.2585, "step": 2751 }, { "epoch": 22.016, "grad_norm": 20.2198486328125, "learning_rate": 4.336e-05, "loss": 1.2074, "step": 2752 }, { "epoch": 22.024, "grad_norm": 38.58494186401367, "learning_rate": 4.335555555555556e-05, "loss": 0.9982, "step": 2753 }, { "epoch": 22.032, "grad_norm": 18.329753875732422, "learning_rate": 4.3351111111111115e-05, "loss": 1.438, "step": 2754 }, { "epoch": 22.04, "grad_norm": 15.37198543548584, "learning_rate": 4.334666666666667e-05, "loss": 1.2642, "step": 2755 }, { "epoch": 22.048, "grad_norm": 22.866044998168945, "learning_rate": 4.3342222222222224e-05, "loss": 1.4382, "step": 2756 }, { "epoch": 22.056, "grad_norm": 19.080474853515625, "learning_rate": 4.333777777777778e-05, "loss": 0.8758, "step": 2757 }, { "epoch": 22.064, "grad_norm": 19.657289505004883, "learning_rate": 4.3333333333333334e-05, "loss": 1.4255, "step": 2758 }, { "epoch": 22.072, "grad_norm": 27.9865779876709, "learning_rate": 4.332888888888889e-05, "loss": 1.3271, "step": 2759 }, { "epoch": 22.08, "grad_norm": 27.133901596069336, "learning_rate": 4.3324444444444444e-05, "loss": 0.9373, "step": 2760 }, { "epoch": 22.088, "grad_norm": 20.166982650756836, "learning_rate": 4.332e-05, "loss": 1.0816, "step": 2761 }, { "epoch": 22.096, "grad_norm": 27.750614166259766, "learning_rate": 4.331555555555556e-05, "loss": 0.8717, "step": 2762 }, { "epoch": 22.104, "grad_norm": 28.72871208190918, "learning_rate": 4.3311111111111115e-05, "loss": 0.9788, "step": 2763 }, { "epoch": 22.112, "grad_norm": 32.370174407958984, "learning_rate": 4.330666666666667e-05, "loss": 1.3266, "step": 2764 }, { "epoch": 22.12, "grad_norm": 14.309309005737305, "learning_rate": 4.3302222222222225e-05, "loss": 1.0692, "step": 2765 }, { "epoch": 22.128, "grad_norm": 31.999927520751953, "learning_rate": 4.329777777777778e-05, "loss": 1.3408, "step": 2766 }, { "epoch": 22.136, "grad_norm": 38.06651306152344, "learning_rate": 4.3293333333333334e-05, "loss": 1.4419, "step": 2767 }, { "epoch": 22.144, "grad_norm": 26.01654052734375, "learning_rate": 4.328888888888889e-05, "loss": 1.2019, "step": 2768 }, { "epoch": 22.152, "grad_norm": 23.76739501953125, "learning_rate": 4.3284444444444444e-05, "loss": 0.9901, "step": 2769 }, { "epoch": 22.16, "grad_norm": 22.67184066772461, "learning_rate": 4.3280000000000006e-05, "loss": 1.1831, "step": 2770 }, { "epoch": 22.168, "grad_norm": 14.888052940368652, "learning_rate": 4.327555555555556e-05, "loss": 0.9274, "step": 2771 }, { "epoch": 22.176, "grad_norm": 37.790096282958984, "learning_rate": 4.3271111111111115e-05, "loss": 1.1771, "step": 2772 }, { "epoch": 22.184, "grad_norm": 64.69725036621094, "learning_rate": 4.3266666666666664e-05, "loss": 1.3001, "step": 2773 }, { "epoch": 22.192, "grad_norm": 38.904300689697266, "learning_rate": 4.3262222222222225e-05, "loss": 1.3629, "step": 2774 }, { "epoch": 22.2, "grad_norm": 25.057226181030273, "learning_rate": 4.325777777777778e-05, "loss": 1.0976, "step": 2775 }, { "epoch": 22.208, "grad_norm": 30.751144409179688, "learning_rate": 4.3253333333333335e-05, "loss": 1.1752, "step": 2776 }, { "epoch": 22.216, "grad_norm": 24.891267776489258, "learning_rate": 4.324888888888889e-05, "loss": 0.976, "step": 2777 }, { "epoch": 22.224, "grad_norm": 22.181324005126953, "learning_rate": 4.324444444444445e-05, "loss": 0.7642, "step": 2778 }, { "epoch": 22.232, "grad_norm": 35.58783721923828, "learning_rate": 4.324e-05, "loss": 1.016, "step": 2779 }, { "epoch": 22.24, "grad_norm": 78.28234100341797, "learning_rate": 4.3235555555555554e-05, "loss": 1.0555, "step": 2780 }, { "epoch": 22.248, "grad_norm": 20.41179656982422, "learning_rate": 4.323111111111111e-05, "loss": 1.3422, "step": 2781 }, { "epoch": 22.256, "grad_norm": 24.378276824951172, "learning_rate": 4.322666666666667e-05, "loss": 1.3429, "step": 2782 }, { "epoch": 22.264, "grad_norm": 30.694204330444336, "learning_rate": 4.3222222222222226e-05, "loss": 1.1401, "step": 2783 }, { "epoch": 22.272, "grad_norm": 43.588722229003906, "learning_rate": 4.321777777777778e-05, "loss": 1.3817, "step": 2784 }, { "epoch": 22.28, "grad_norm": 29.185453414916992, "learning_rate": 4.3213333333333335e-05, "loss": 1.0331, "step": 2785 }, { "epoch": 22.288, "grad_norm": 19.90559959411621, "learning_rate": 4.320888888888889e-05, "loss": 1.0602, "step": 2786 }, { "epoch": 22.296, "grad_norm": 33.431034088134766, "learning_rate": 4.3204444444444445e-05, "loss": 2.6584, "step": 2787 }, { "epoch": 22.304, "grad_norm": 38.0379753112793, "learning_rate": 4.32e-05, "loss": 1.2395, "step": 2788 }, { "epoch": 22.312, "grad_norm": 37.20198059082031, "learning_rate": 4.3195555555555555e-05, "loss": 1.3004, "step": 2789 }, { "epoch": 22.32, "grad_norm": 208.11688232421875, "learning_rate": 4.3191111111111116e-05, "loss": 1.0698, "step": 2790 }, { "epoch": 22.328, "grad_norm": 54.62007141113281, "learning_rate": 4.318666666666667e-05, "loss": 1.6239, "step": 2791 }, { "epoch": 22.336, "grad_norm": 38.82936096191406, "learning_rate": 4.3182222222222226e-05, "loss": 2.9024, "step": 2792 }, { "epoch": 22.344, "grad_norm": 32.617027282714844, "learning_rate": 4.317777777777778e-05, "loss": 1.2523, "step": 2793 }, { "epoch": 22.352, "grad_norm": 19.380216598510742, "learning_rate": 4.3173333333333336e-05, "loss": 1.271, "step": 2794 }, { "epoch": 22.36, "grad_norm": 39.615379333496094, "learning_rate": 4.316888888888889e-05, "loss": 1.1553, "step": 2795 }, { "epoch": 22.368, "grad_norm": 26.595386505126953, "learning_rate": 4.3164444444444445e-05, "loss": 1.1072, "step": 2796 }, { "epoch": 22.376, "grad_norm": 17.291852951049805, "learning_rate": 4.316e-05, "loss": 1.0344, "step": 2797 }, { "epoch": 22.384, "grad_norm": 34.94704055786133, "learning_rate": 4.315555555555556e-05, "loss": 1.2966, "step": 2798 }, { "epoch": 22.392, "grad_norm": 20.19791603088379, "learning_rate": 4.3151111111111117e-05, "loss": 1.3256, "step": 2799 }, { "epoch": 22.4, "grad_norm": 55.774898529052734, "learning_rate": 4.314666666666667e-05, "loss": 1.651, "step": 2800 }, { "epoch": 22.408, "grad_norm": 45.00945281982422, "learning_rate": 4.314222222222222e-05, "loss": 2.3388, "step": 2801 }, { "epoch": 22.416, "grad_norm": 23.96236228942871, "learning_rate": 4.313777777777778e-05, "loss": 1.1624, "step": 2802 }, { "epoch": 22.424, "grad_norm": 29.339405059814453, "learning_rate": 4.3133333333333336e-05, "loss": 1.0551, "step": 2803 }, { "epoch": 22.432, "grad_norm": 33.45759582519531, "learning_rate": 4.312888888888889e-05, "loss": 0.8314, "step": 2804 }, { "epoch": 22.44, "grad_norm": 14.942334175109863, "learning_rate": 4.3124444444444446e-05, "loss": 1.1599, "step": 2805 }, { "epoch": 22.448, "grad_norm": 31.034372329711914, "learning_rate": 4.312000000000001e-05, "loss": 0.9506, "step": 2806 }, { "epoch": 22.456, "grad_norm": 29.849491119384766, "learning_rate": 4.311555555555556e-05, "loss": 0.9719, "step": 2807 }, { "epoch": 22.464, "grad_norm": 21.76592445373535, "learning_rate": 4.311111111111111e-05, "loss": 1.2542, "step": 2808 }, { "epoch": 22.472, "grad_norm": 46.00763702392578, "learning_rate": 4.3106666666666665e-05, "loss": 1.2465, "step": 2809 }, { "epoch": 22.48, "grad_norm": 39.42862319946289, "learning_rate": 4.310222222222222e-05, "loss": 1.2254, "step": 2810 }, { "epoch": 22.488, "grad_norm": 43.63135528564453, "learning_rate": 4.309777777777778e-05, "loss": 1.4427, "step": 2811 }, { "epoch": 22.496, "grad_norm": 24.313812255859375, "learning_rate": 4.3093333333333336e-05, "loss": 1.0492, "step": 2812 }, { "epoch": 22.504, "grad_norm": 64.2651596069336, "learning_rate": 4.308888888888889e-05, "loss": 1.1278, "step": 2813 }, { "epoch": 22.512, "grad_norm": 31.57308006286621, "learning_rate": 4.3084444444444446e-05, "loss": 1.1691, "step": 2814 }, { "epoch": 22.52, "grad_norm": 36.637393951416016, "learning_rate": 4.308e-05, "loss": 1.2399, "step": 2815 }, { "epoch": 22.528, "grad_norm": 32.49770736694336, "learning_rate": 4.3075555555555556e-05, "loss": 1.4488, "step": 2816 }, { "epoch": 22.536, "grad_norm": 35.520328521728516, "learning_rate": 4.307111111111111e-05, "loss": 1.3554, "step": 2817 }, { "epoch": 22.544, "grad_norm": 16.95859718322754, "learning_rate": 4.3066666666666665e-05, "loss": 1.3328, "step": 2818 }, { "epoch": 22.552, "grad_norm": 24.56216812133789, "learning_rate": 4.306222222222223e-05, "loss": 1.3498, "step": 2819 }, { "epoch": 22.56, "grad_norm": 22.83340072631836, "learning_rate": 4.305777777777778e-05, "loss": 0.8645, "step": 2820 }, { "epoch": 22.568, "grad_norm": 21.02724838256836, "learning_rate": 4.305333333333334e-05, "loss": 1.0941, "step": 2821 }, { "epoch": 22.576, "grad_norm": 50.008872985839844, "learning_rate": 4.304888888888889e-05, "loss": 1.4975, "step": 2822 }, { "epoch": 22.584, "grad_norm": 28.180686950683594, "learning_rate": 4.3044444444444446e-05, "loss": 1.2533, "step": 2823 }, { "epoch": 22.592, "grad_norm": 16.057144165039062, "learning_rate": 4.304e-05, "loss": 1.1406, "step": 2824 }, { "epoch": 22.6, "grad_norm": 64.83454895019531, "learning_rate": 4.3035555555555556e-05, "loss": 1.3899, "step": 2825 }, { "epoch": 22.608, "grad_norm": 28.054628372192383, "learning_rate": 4.303111111111111e-05, "loss": 1.7064, "step": 2826 }, { "epoch": 22.616, "grad_norm": 41.883758544921875, "learning_rate": 4.302666666666667e-05, "loss": 1.5933, "step": 2827 }, { "epoch": 22.624, "grad_norm": 15.923346519470215, "learning_rate": 4.302222222222223e-05, "loss": 1.1584, "step": 2828 }, { "epoch": 22.632, "grad_norm": 17.24540138244629, "learning_rate": 4.301777777777778e-05, "loss": 1.5997, "step": 2829 }, { "epoch": 22.64, "grad_norm": 28.81597328186035, "learning_rate": 4.301333333333333e-05, "loss": 1.249, "step": 2830 }, { "epoch": 22.648, "grad_norm": 23.187076568603516, "learning_rate": 4.300888888888889e-05, "loss": 0.9218, "step": 2831 }, { "epoch": 22.656, "grad_norm": 27.16583251953125, "learning_rate": 4.300444444444445e-05, "loss": 2.0552, "step": 2832 }, { "epoch": 22.664, "grad_norm": 35.94168472290039, "learning_rate": 4.3e-05, "loss": 1.807, "step": 2833 }, { "epoch": 22.672, "grad_norm": 21.637832641601562, "learning_rate": 4.2995555555555556e-05, "loss": 1.9406, "step": 2834 }, { "epoch": 22.68, "grad_norm": 45.1744384765625, "learning_rate": 4.299111111111112e-05, "loss": 1.1584, "step": 2835 }, { "epoch": 22.688, "grad_norm": 36.59823989868164, "learning_rate": 4.2986666666666666e-05, "loss": 1.2806, "step": 2836 }, { "epoch": 22.696, "grad_norm": 65.49803161621094, "learning_rate": 4.298222222222222e-05, "loss": 1.1339, "step": 2837 }, { "epoch": 22.704, "grad_norm": 14.242673873901367, "learning_rate": 4.2977777777777776e-05, "loss": 1.3407, "step": 2838 }, { "epoch": 22.712, "grad_norm": 67.513671875, "learning_rate": 4.297333333333334e-05, "loss": 1.2154, "step": 2839 }, { "epoch": 22.72, "grad_norm": 27.451271057128906, "learning_rate": 4.296888888888889e-05, "loss": 1.3749, "step": 2840 }, { "epoch": 22.728, "grad_norm": 22.53129768371582, "learning_rate": 4.296444444444445e-05, "loss": 1.0326, "step": 2841 }, { "epoch": 22.736, "grad_norm": 51.98845672607422, "learning_rate": 4.296e-05, "loss": 1.0703, "step": 2842 }, { "epoch": 22.744, "grad_norm": 22.601783752441406, "learning_rate": 4.295555555555556e-05, "loss": 1.2541, "step": 2843 }, { "epoch": 22.752, "grad_norm": 32.42585754394531, "learning_rate": 4.295111111111111e-05, "loss": 1.3946, "step": 2844 }, { "epoch": 22.76, "grad_norm": 71.70478820800781, "learning_rate": 4.2946666666666667e-05, "loss": 1.1787, "step": 2845 }, { "epoch": 22.768, "grad_norm": 26.072431564331055, "learning_rate": 4.294222222222222e-05, "loss": 0.9609, "step": 2846 }, { "epoch": 22.776, "grad_norm": 17.970792770385742, "learning_rate": 4.293777777777778e-05, "loss": 1.321, "step": 2847 }, { "epoch": 22.784, "grad_norm": 43.80386734008789, "learning_rate": 4.293333333333334e-05, "loss": 1.0654, "step": 2848 }, { "epoch": 22.792, "grad_norm": 38.412776947021484, "learning_rate": 4.292888888888889e-05, "loss": 1.0642, "step": 2849 }, { "epoch": 22.8, "grad_norm": 27.91657829284668, "learning_rate": 4.292444444444445e-05, "loss": 1.0987, "step": 2850 }, { "epoch": 22.808, "grad_norm": 22.398279190063477, "learning_rate": 4.292e-05, "loss": 1.1351, "step": 2851 }, { "epoch": 22.816, "grad_norm": 22.750444412231445, "learning_rate": 4.291555555555556e-05, "loss": 1.0169, "step": 2852 }, { "epoch": 22.824, "grad_norm": 14.25595760345459, "learning_rate": 4.291111111111111e-05, "loss": 1.4368, "step": 2853 }, { "epoch": 22.832, "grad_norm": 30.56637191772461, "learning_rate": 4.290666666666667e-05, "loss": 1.3407, "step": 2854 }, { "epoch": 22.84, "grad_norm": 21.493127822875977, "learning_rate": 4.290222222222223e-05, "loss": 1.1618, "step": 2855 }, { "epoch": 22.848, "grad_norm": 30.705211639404297, "learning_rate": 4.2897777777777783e-05, "loss": 0.912, "step": 2856 }, { "epoch": 22.856, "grad_norm": 30.538455963134766, "learning_rate": 4.289333333333334e-05, "loss": 1.299, "step": 2857 }, { "epoch": 22.864, "grad_norm": 17.59905242919922, "learning_rate": 4.2888888888888886e-05, "loss": 1.363, "step": 2858 }, { "epoch": 22.872, "grad_norm": 34.88896942138672, "learning_rate": 4.288444444444444e-05, "loss": 1.0146, "step": 2859 }, { "epoch": 22.88, "grad_norm": 27.63620948791504, "learning_rate": 4.288e-05, "loss": 1.0125, "step": 2860 }, { "epoch": 22.888, "grad_norm": 17.13077735900879, "learning_rate": 4.287555555555556e-05, "loss": 1.4083, "step": 2861 }, { "epoch": 22.896, "grad_norm": 35.702301025390625, "learning_rate": 4.287111111111111e-05, "loss": 1.1358, "step": 2862 }, { "epoch": 22.904, "grad_norm": 23.214519500732422, "learning_rate": 4.286666666666667e-05, "loss": 0.7864, "step": 2863 }, { "epoch": 22.912, "grad_norm": 31.624868392944336, "learning_rate": 4.286222222222223e-05, "loss": 1.2303, "step": 2864 }, { "epoch": 22.92, "grad_norm": 53.58061599731445, "learning_rate": 4.285777777777778e-05, "loss": 1.9367, "step": 2865 }, { "epoch": 22.928, "grad_norm": 25.114471435546875, "learning_rate": 4.285333333333333e-05, "loss": 1.1705, "step": 2866 }, { "epoch": 22.936, "grad_norm": 80.70735931396484, "learning_rate": 4.284888888888889e-05, "loss": 1.4331, "step": 2867 }, { "epoch": 22.944, "grad_norm": 22.268966674804688, "learning_rate": 4.284444444444445e-05, "loss": 1.435, "step": 2868 }, { "epoch": 22.951999999999998, "grad_norm": 27.453033447265625, "learning_rate": 4.284e-05, "loss": 0.9991, "step": 2869 }, { "epoch": 22.96, "grad_norm": 28.087617874145508, "learning_rate": 4.283555555555556e-05, "loss": 1.4234, "step": 2870 }, { "epoch": 22.968, "grad_norm": 19.682254791259766, "learning_rate": 4.283111111111111e-05, "loss": 1.3057, "step": 2871 }, { "epoch": 22.976, "grad_norm": 50.15097427368164, "learning_rate": 4.282666666666667e-05, "loss": 0.7093, "step": 2872 }, { "epoch": 22.984, "grad_norm": 30.215364456176758, "learning_rate": 4.282222222222222e-05, "loss": 1.3134, "step": 2873 }, { "epoch": 22.992, "grad_norm": 29.00379753112793, "learning_rate": 4.281777777777778e-05, "loss": 1.4673, "step": 2874 }, { "epoch": 23.0, "grad_norm": 20.834583282470703, "learning_rate": 4.281333333333333e-05, "loss": 0.8686, "step": 2875 }, { "epoch": 23.0, "eval_loss": 1.2195816040039062, "eval_map": 0.3119, "eval_map_50": 0.5877, "eval_map_75": 0.2934, "eval_map_Coverall": 0.5548, "eval_map_Face_Shield": 0.2634, "eval_map_Gloves": 0.3086, "eval_map_Goggles": 0.1168, "eval_map_Mask": 0.3159, "eval_map_large": 0.3677, "eval_map_medium": 0.1572, "eval_map_small": -1.0, "eval_mar_1": 0.277, "eval_mar_10": 0.5019, "eval_mar_100": 0.5116, "eval_mar_100_Coverall": 0.7, "eval_mar_100_Face_Shield": 0.6176, "eval_mar_100_Gloves": 0.4492, "eval_mar_100_Goggles": 0.3625, "eval_mar_100_Mask": 0.4288, "eval_mar_large": 0.6099, "eval_mar_medium": 0.225, "eval_mar_small": -1.0, "eval_runtime": 3.8342, "eval_samples_per_second": 7.563, "eval_steps_per_second": 0.522, "step": 2875 }, { "epoch": 23.008, "grad_norm": 16.691301345825195, "learning_rate": 4.2808888888888894e-05, "loss": 1.5504, "step": 2876 }, { "epoch": 23.016, "grad_norm": 16.61989402770996, "learning_rate": 4.280444444444445e-05, "loss": 1.071, "step": 2877 }, { "epoch": 23.024, "grad_norm": 27.884326934814453, "learning_rate": 4.2800000000000004e-05, "loss": 1.2413, "step": 2878 }, { "epoch": 23.032, "grad_norm": 31.300312042236328, "learning_rate": 4.279555555555556e-05, "loss": 1.4599, "step": 2879 }, { "epoch": 23.04, "grad_norm": 68.05166625976562, "learning_rate": 4.279111111111111e-05, "loss": 1.2313, "step": 2880 }, { "epoch": 23.048, "grad_norm": 39.56521987915039, "learning_rate": 4.278666666666667e-05, "loss": 1.1084, "step": 2881 }, { "epoch": 23.056, "grad_norm": 29.28224754333496, "learning_rate": 4.278222222222222e-05, "loss": 0.9474, "step": 2882 }, { "epoch": 23.064, "grad_norm": 17.398475646972656, "learning_rate": 4.277777777777778e-05, "loss": 0.9621, "step": 2883 }, { "epoch": 23.072, "grad_norm": 21.90506362915039, "learning_rate": 4.277333333333334e-05, "loss": 1.1068, "step": 2884 }, { "epoch": 23.08, "grad_norm": 33.0342903137207, "learning_rate": 4.2768888888888894e-05, "loss": 1.0763, "step": 2885 }, { "epoch": 23.088, "grad_norm": 40.851993560791016, "learning_rate": 4.276444444444445e-05, "loss": 1.3187, "step": 2886 }, { "epoch": 23.096, "grad_norm": 12.244890213012695, "learning_rate": 4.276e-05, "loss": 0.932, "step": 2887 }, { "epoch": 23.104, "grad_norm": 37.06093978881836, "learning_rate": 4.275555555555556e-05, "loss": 1.336, "step": 2888 }, { "epoch": 23.112, "grad_norm": 26.46609878540039, "learning_rate": 4.2751111111111114e-05, "loss": 1.3879, "step": 2889 }, { "epoch": 23.12, "grad_norm": 17.216794967651367, "learning_rate": 4.274666666666667e-05, "loss": 1.0419, "step": 2890 }, { "epoch": 23.128, "grad_norm": 30.16519546508789, "learning_rate": 4.274222222222222e-05, "loss": 1.1925, "step": 2891 }, { "epoch": 23.136, "grad_norm": 24.521730422973633, "learning_rate": 4.2737777777777785e-05, "loss": 1.2875, "step": 2892 }, { "epoch": 23.144, "grad_norm": 23.4704532623291, "learning_rate": 4.273333333333333e-05, "loss": 0.8427, "step": 2893 }, { "epoch": 23.152, "grad_norm": 32.75620651245117, "learning_rate": 4.272888888888889e-05, "loss": 1.2717, "step": 2894 }, { "epoch": 23.16, "grad_norm": 21.739643096923828, "learning_rate": 4.272444444444444e-05, "loss": 1.198, "step": 2895 }, { "epoch": 23.168, "grad_norm": 17.220216751098633, "learning_rate": 4.2720000000000004e-05, "loss": 1.4094, "step": 2896 }, { "epoch": 23.176, "grad_norm": 25.687707901000977, "learning_rate": 4.271555555555556e-05, "loss": 1.317, "step": 2897 }, { "epoch": 23.184, "grad_norm": 26.653018951416016, "learning_rate": 4.2711111111111114e-05, "loss": 1.1392, "step": 2898 }, { "epoch": 23.192, "grad_norm": 19.41118049621582, "learning_rate": 4.270666666666667e-05, "loss": 1.2869, "step": 2899 }, { "epoch": 23.2, "grad_norm": 17.924182891845703, "learning_rate": 4.2702222222222224e-05, "loss": 1.1309, "step": 2900 }, { "epoch": 23.208, "grad_norm": 24.01381492614746, "learning_rate": 4.269777777777778e-05, "loss": 1.0549, "step": 2901 }, { "epoch": 23.216, "grad_norm": 23.080411911010742, "learning_rate": 4.2693333333333333e-05, "loss": 1.2986, "step": 2902 }, { "epoch": 23.224, "grad_norm": 21.06586265563965, "learning_rate": 4.268888888888889e-05, "loss": 1.0925, "step": 2903 }, { "epoch": 23.232, "grad_norm": 31.560319900512695, "learning_rate": 4.268444444444445e-05, "loss": 1.1275, "step": 2904 }, { "epoch": 23.24, "grad_norm": 22.210731506347656, "learning_rate": 4.2680000000000005e-05, "loss": 1.1259, "step": 2905 }, { "epoch": 23.248, "grad_norm": 19.8430233001709, "learning_rate": 4.267555555555556e-05, "loss": 1.3772, "step": 2906 }, { "epoch": 23.256, "grad_norm": 50.241111755371094, "learning_rate": 4.2671111111111114e-05, "loss": 1.2745, "step": 2907 }, { "epoch": 23.264, "grad_norm": 28.02466583251953, "learning_rate": 4.266666666666667e-05, "loss": 1.1798, "step": 2908 }, { "epoch": 23.272, "grad_norm": 27.626953125, "learning_rate": 4.2662222222222224e-05, "loss": 1.0155, "step": 2909 }, { "epoch": 23.28, "grad_norm": 34.09748077392578, "learning_rate": 4.265777777777778e-05, "loss": 1.1699, "step": 2910 }, { "epoch": 23.288, "grad_norm": 51.15103530883789, "learning_rate": 4.2653333333333334e-05, "loss": 3.219, "step": 2911 }, { "epoch": 23.296, "grad_norm": 53.13898849487305, "learning_rate": 4.264888888888889e-05, "loss": 1.522, "step": 2912 }, { "epoch": 23.304, "grad_norm": 31.559690475463867, "learning_rate": 4.264444444444445e-05, "loss": 1.6795, "step": 2913 }, { "epoch": 23.312, "grad_norm": 56.20608901977539, "learning_rate": 4.2640000000000005e-05, "loss": 1.3538, "step": 2914 }, { "epoch": 23.32, "grad_norm": 16.282318115234375, "learning_rate": 4.263555555555555e-05, "loss": 1.2033, "step": 2915 }, { "epoch": 23.328, "grad_norm": 36.28321838378906, "learning_rate": 4.263111111111111e-05, "loss": 1.0589, "step": 2916 }, { "epoch": 23.336, "grad_norm": 29.30141258239746, "learning_rate": 4.262666666666667e-05, "loss": 1.4056, "step": 2917 }, { "epoch": 23.344, "grad_norm": 32.67239761352539, "learning_rate": 4.2622222222222224e-05, "loss": 1.4648, "step": 2918 }, { "epoch": 23.352, "grad_norm": 26.956239700317383, "learning_rate": 4.261777777777778e-05, "loss": 1.0815, "step": 2919 }, { "epoch": 23.36, "grad_norm": 28.341768264770508, "learning_rate": 4.2613333333333334e-05, "loss": 0.9417, "step": 2920 }, { "epoch": 23.368, "grad_norm": 32.184417724609375, "learning_rate": 4.2608888888888896e-05, "loss": 1.312, "step": 2921 }, { "epoch": 23.376, "grad_norm": 25.823551177978516, "learning_rate": 4.2604444444444444e-05, "loss": 1.5029, "step": 2922 }, { "epoch": 23.384, "grad_norm": 45.40620040893555, "learning_rate": 4.26e-05, "loss": 1.555, "step": 2923 }, { "epoch": 23.392, "grad_norm": 36.27566146850586, "learning_rate": 4.2595555555555554e-05, "loss": 1.4029, "step": 2924 }, { "epoch": 23.4, "grad_norm": 18.569778442382812, "learning_rate": 4.2591111111111115e-05, "loss": 0.671, "step": 2925 }, { "epoch": 23.408, "grad_norm": 17.985797882080078, "learning_rate": 4.258666666666667e-05, "loss": 1.4041, "step": 2926 }, { "epoch": 23.416, "grad_norm": 17.111282348632812, "learning_rate": 4.2582222222222225e-05, "loss": 1.4141, "step": 2927 }, { "epoch": 23.424, "grad_norm": 43.32246780395508, "learning_rate": 4.257777777777778e-05, "loss": 1.175, "step": 2928 }, { "epoch": 23.432, "grad_norm": 29.097219467163086, "learning_rate": 4.2573333333333335e-05, "loss": 1.2987, "step": 2929 }, { "epoch": 23.44, "grad_norm": 20.824745178222656, "learning_rate": 4.256888888888889e-05, "loss": 1.2995, "step": 2930 }, { "epoch": 23.448, "grad_norm": 42.0792121887207, "learning_rate": 4.2564444444444444e-05, "loss": 1.3797, "step": 2931 }, { "epoch": 23.456, "grad_norm": 59.76313400268555, "learning_rate": 4.256e-05, "loss": 1.0887, "step": 2932 }, { "epoch": 23.464, "grad_norm": 31.603548049926758, "learning_rate": 4.255555555555556e-05, "loss": 1.263, "step": 2933 }, { "epoch": 23.472, "grad_norm": 104.50334167480469, "learning_rate": 4.2551111111111116e-05, "loss": 1.1882, "step": 2934 }, { "epoch": 23.48, "grad_norm": 31.651412963867188, "learning_rate": 4.254666666666667e-05, "loss": 1.2067, "step": 2935 }, { "epoch": 23.488, "grad_norm": 32.014892578125, "learning_rate": 4.2542222222222225e-05, "loss": 1.3427, "step": 2936 }, { "epoch": 23.496, "grad_norm": 20.444490432739258, "learning_rate": 4.253777777777778e-05, "loss": 1.4194, "step": 2937 }, { "epoch": 23.504, "grad_norm": 20.601533889770508, "learning_rate": 4.2533333333333335e-05, "loss": 1.1095, "step": 2938 }, { "epoch": 23.512, "grad_norm": 23.126325607299805, "learning_rate": 4.252888888888889e-05, "loss": 1.0633, "step": 2939 }, { "epoch": 23.52, "grad_norm": 30.63460922241211, "learning_rate": 4.2524444444444445e-05, "loss": 1.3062, "step": 2940 }, { "epoch": 23.528, "grad_norm": 79.1376724243164, "learning_rate": 4.2520000000000006e-05, "loss": 1.0449, "step": 2941 }, { "epoch": 23.536, "grad_norm": 24.64475440979004, "learning_rate": 4.251555555555556e-05, "loss": 1.3448, "step": 2942 }, { "epoch": 23.544, "grad_norm": 120.89686584472656, "learning_rate": 4.2511111111111116e-05, "loss": 2.137, "step": 2943 }, { "epoch": 23.552, "grad_norm": 37.72684860229492, "learning_rate": 4.2506666666666664e-05, "loss": 1.269, "step": 2944 }, { "epoch": 23.56, "grad_norm": 33.292884826660156, "learning_rate": 4.2502222222222226e-05, "loss": 1.0939, "step": 2945 }, { "epoch": 23.568, "grad_norm": 47.79861068725586, "learning_rate": 4.249777777777778e-05, "loss": 1.3008, "step": 2946 }, { "epoch": 23.576, "grad_norm": 26.2646484375, "learning_rate": 4.2493333333333335e-05, "loss": 1.0021, "step": 2947 }, { "epoch": 23.584, "grad_norm": 50.070556640625, "learning_rate": 4.248888888888889e-05, "loss": 1.3055, "step": 2948 }, { "epoch": 23.592, "grad_norm": 26.603178024291992, "learning_rate": 4.248444444444445e-05, "loss": 1.3744, "step": 2949 }, { "epoch": 23.6, "grad_norm": 31.090618133544922, "learning_rate": 4.248e-05, "loss": 1.3293, "step": 2950 }, { "epoch": 23.608, "grad_norm": 46.94121551513672, "learning_rate": 4.2475555555555555e-05, "loss": 1.0037, "step": 2951 }, { "epoch": 23.616, "grad_norm": 27.161056518554688, "learning_rate": 4.247111111111111e-05, "loss": 1.0671, "step": 2952 }, { "epoch": 23.624, "grad_norm": 51.40691375732422, "learning_rate": 4.246666666666667e-05, "loss": 1.1173, "step": 2953 }, { "epoch": 23.632, "grad_norm": 18.006778717041016, "learning_rate": 4.2462222222222226e-05, "loss": 1.0238, "step": 2954 }, { "epoch": 23.64, "grad_norm": 28.02982521057129, "learning_rate": 4.245777777777778e-05, "loss": 1.4636, "step": 2955 }, { "epoch": 23.648, "grad_norm": 27.136474609375, "learning_rate": 4.2453333333333336e-05, "loss": 1.8098, "step": 2956 }, { "epoch": 23.656, "grad_norm": 26.505481719970703, "learning_rate": 4.244888888888889e-05, "loss": 1.6867, "step": 2957 }, { "epoch": 23.664, "grad_norm": 40.751277923583984, "learning_rate": 4.2444444444444445e-05, "loss": 0.9219, "step": 2958 }, { "epoch": 23.672, "grad_norm": 22.545866012573242, "learning_rate": 4.244e-05, "loss": 1.0361, "step": 2959 }, { "epoch": 23.68, "grad_norm": 40.854835510253906, "learning_rate": 4.2435555555555555e-05, "loss": 1.1293, "step": 2960 }, { "epoch": 23.688, "grad_norm": 26.18552017211914, "learning_rate": 4.243111111111111e-05, "loss": 1.0563, "step": 2961 }, { "epoch": 23.696, "grad_norm": 33.67617416381836, "learning_rate": 4.242666666666667e-05, "loss": 1.4536, "step": 2962 }, { "epoch": 23.704, "grad_norm": 60.81672668457031, "learning_rate": 4.2422222222222226e-05, "loss": 2.4848, "step": 2963 }, { "epoch": 23.712, "grad_norm": 23.171680450439453, "learning_rate": 4.241777777777778e-05, "loss": 1.335, "step": 2964 }, { "epoch": 23.72, "grad_norm": 30.18621063232422, "learning_rate": 4.241333333333333e-05, "loss": 1.3463, "step": 2965 }, { "epoch": 23.728, "grad_norm": 30.968229293823242, "learning_rate": 4.240888888888889e-05, "loss": 1.4183, "step": 2966 }, { "epoch": 23.736, "grad_norm": 26.47138023376465, "learning_rate": 4.2404444444444446e-05, "loss": 1.2328, "step": 2967 }, { "epoch": 23.744, "grad_norm": 30.365781784057617, "learning_rate": 4.24e-05, "loss": 1.3826, "step": 2968 }, { "epoch": 23.752, "grad_norm": 28.758460998535156, "learning_rate": 4.2395555555555555e-05, "loss": 1.2509, "step": 2969 }, { "epoch": 23.76, "grad_norm": 30.585037231445312, "learning_rate": 4.239111111111112e-05, "loss": 2.4161, "step": 2970 }, { "epoch": 23.768, "grad_norm": 44.85927963256836, "learning_rate": 4.238666666666667e-05, "loss": 1.0319, "step": 2971 }, { "epoch": 23.776, "grad_norm": 24.119548797607422, "learning_rate": 4.238222222222222e-05, "loss": 0.9896, "step": 2972 }, { "epoch": 23.784, "grad_norm": 25.116535186767578, "learning_rate": 4.2377777777777775e-05, "loss": 1.3382, "step": 2973 }, { "epoch": 23.792, "grad_norm": 22.08856964111328, "learning_rate": 4.2373333333333336e-05, "loss": 1.0513, "step": 2974 }, { "epoch": 23.8, "grad_norm": 21.150089263916016, "learning_rate": 4.236888888888889e-05, "loss": 1.4892, "step": 2975 }, { "epoch": 23.808, "grad_norm": 32.829036712646484, "learning_rate": 4.2364444444444446e-05, "loss": 1.2969, "step": 2976 }, { "epoch": 23.816, "grad_norm": 26.661691665649414, "learning_rate": 4.236e-05, "loss": 1.1129, "step": 2977 }, { "epoch": 23.824, "grad_norm": 44.40062713623047, "learning_rate": 4.235555555555556e-05, "loss": 0.9881, "step": 2978 }, { "epoch": 23.832, "grad_norm": 657.956298828125, "learning_rate": 4.235111111111111e-05, "loss": 1.1079, "step": 2979 }, { "epoch": 23.84, "grad_norm": 18.203126907348633, "learning_rate": 4.2346666666666666e-05, "loss": 1.1745, "step": 2980 }, { "epoch": 23.848, "grad_norm": 19.72690200805664, "learning_rate": 4.234222222222222e-05, "loss": 0.9657, "step": 2981 }, { "epoch": 23.856, "grad_norm": 45.17610549926758, "learning_rate": 4.233777777777778e-05, "loss": 2.1649, "step": 2982 }, { "epoch": 23.864, "grad_norm": 17.02524757385254, "learning_rate": 4.233333333333334e-05, "loss": 1.1439, "step": 2983 }, { "epoch": 23.872, "grad_norm": 29.43332290649414, "learning_rate": 4.232888888888889e-05, "loss": 0.7559, "step": 2984 }, { "epoch": 23.88, "grad_norm": 15.051115989685059, "learning_rate": 4.2324444444444447e-05, "loss": 1.1807, "step": 2985 }, { "epoch": 23.888, "grad_norm": 24.502769470214844, "learning_rate": 4.232e-05, "loss": 1.1813, "step": 2986 }, { "epoch": 23.896, "grad_norm": 24.570261001586914, "learning_rate": 4.2315555555555556e-05, "loss": 1.0261, "step": 2987 }, { "epoch": 23.904, "grad_norm": 20.978300094604492, "learning_rate": 4.231111111111111e-05, "loss": 1.3329, "step": 2988 }, { "epoch": 23.912, "grad_norm": 25.386388778686523, "learning_rate": 4.2306666666666666e-05, "loss": 1.0457, "step": 2989 }, { "epoch": 23.92, "grad_norm": 36.72083282470703, "learning_rate": 4.230222222222223e-05, "loss": 1.1788, "step": 2990 }, { "epoch": 23.928, "grad_norm": 25.84003448486328, "learning_rate": 4.229777777777778e-05, "loss": 1.0224, "step": 2991 }, { "epoch": 23.936, "grad_norm": 20.81496810913086, "learning_rate": 4.229333333333334e-05, "loss": 1.2145, "step": 2992 }, { "epoch": 23.944, "grad_norm": 102.87274932861328, "learning_rate": 4.228888888888889e-05, "loss": 0.9309, "step": 2993 }, { "epoch": 23.951999999999998, "grad_norm": 31.601425170898438, "learning_rate": 4.228444444444445e-05, "loss": 1.3253, "step": 2994 }, { "epoch": 23.96, "grad_norm": 24.008529663085938, "learning_rate": 4.228e-05, "loss": 1.4018, "step": 2995 }, { "epoch": 23.968, "grad_norm": 30.088172912597656, "learning_rate": 4.227555555555556e-05, "loss": 1.2808, "step": 2996 }, { "epoch": 23.976, "grad_norm": 32.10993576049805, "learning_rate": 4.227111111111111e-05, "loss": 1.0094, "step": 2997 }, { "epoch": 23.984, "grad_norm": 33.862327575683594, "learning_rate": 4.226666666666667e-05, "loss": 1.6915, "step": 2998 }, { "epoch": 23.992, "grad_norm": 34.36629104614258, "learning_rate": 4.226222222222223e-05, "loss": 1.4084, "step": 2999 }, { "epoch": 24.0, "grad_norm": 25.368206024169922, "learning_rate": 4.225777777777778e-05, "loss": 1.1951, "step": 3000 }, { "epoch": 24.0, "eval_loss": 1.1726927757263184, "eval_map": 0.3237, "eval_map_50": 0.641, "eval_map_75": 0.2936, "eval_map_Coverall": 0.5523, "eval_map_Face_Shield": 0.2711, "eval_map_Gloves": 0.2618, "eval_map_Goggles": 0.1127, "eval_map_Mask": 0.4203, "eval_map_large": 0.3658, "eval_map_medium": 0.1917, "eval_map_small": -1.0, "eval_mar_1": 0.2732, "eval_mar_10": 0.4951, "eval_mar_100": 0.5235, "eval_mar_100_Coverall": 0.7333, "eval_mar_100_Face_Shield": 0.5, "eval_mar_100_Gloves": 0.4426, "eval_mar_100_Goggles": 0.4281, "eval_mar_100_Mask": 0.5135, "eval_mar_large": 0.6076, "eval_mar_medium": 0.2723, "eval_mar_small": -1.0, "eval_runtime": 3.6411, "eval_samples_per_second": 7.965, "eval_steps_per_second": 0.549, "step": 3000 }, { "epoch": 24.008, "grad_norm": 36.29368591308594, "learning_rate": 4.225333333333333e-05, "loss": 1.1103, "step": 3001 }, { "epoch": 24.016, "grad_norm": 28.32022476196289, "learning_rate": 4.224888888888889e-05, "loss": 0.9848, "step": 3002 }, { "epoch": 24.024, "grad_norm": 14.894607543945312, "learning_rate": 4.224444444444445e-05, "loss": 1.2494, "step": 3003 }, { "epoch": 24.032, "grad_norm": 15.4511137008667, "learning_rate": 4.224e-05, "loss": 0.9681, "step": 3004 }, { "epoch": 24.04, "grad_norm": 28.494388580322266, "learning_rate": 4.223555555555556e-05, "loss": 0.6288, "step": 3005 }, { "epoch": 24.048, "grad_norm": 29.76190948486328, "learning_rate": 4.223111111111112e-05, "loss": 1.9438, "step": 3006 }, { "epoch": 24.056, "grad_norm": 31.53886604309082, "learning_rate": 4.222666666666667e-05, "loss": 0.7515, "step": 3007 }, { "epoch": 24.064, "grad_norm": 41.91164016723633, "learning_rate": 4.222222222222222e-05, "loss": 0.9063, "step": 3008 }, { "epoch": 24.072, "grad_norm": 27.963550567626953, "learning_rate": 4.2217777777777776e-05, "loss": 1.1058, "step": 3009 }, { "epoch": 24.08, "grad_norm": 21.867101669311523, "learning_rate": 4.221333333333334e-05, "loss": 1.1588, "step": 3010 }, { "epoch": 24.088, "grad_norm": 21.641620635986328, "learning_rate": 4.220888888888889e-05, "loss": 1.3, "step": 3011 }, { "epoch": 24.096, "grad_norm": 28.517780303955078, "learning_rate": 4.220444444444445e-05, "loss": 1.4181, "step": 3012 }, { "epoch": 24.104, "grad_norm": 52.99163818359375, "learning_rate": 4.22e-05, "loss": 0.9426, "step": 3013 }, { "epoch": 24.112, "grad_norm": 36.787742614746094, "learning_rate": 4.219555555555556e-05, "loss": 1.3443, "step": 3014 }, { "epoch": 24.12, "grad_norm": 26.2950496673584, "learning_rate": 4.219111111111111e-05, "loss": 0.9685, "step": 3015 }, { "epoch": 24.128, "grad_norm": 29.4748477935791, "learning_rate": 4.218666666666667e-05, "loss": 1.2076, "step": 3016 }, { "epoch": 24.136, "grad_norm": 39.77871322631836, "learning_rate": 4.218222222222222e-05, "loss": 1.9208, "step": 3017 }, { "epoch": 24.144, "grad_norm": 40.55914306640625, "learning_rate": 4.217777777777778e-05, "loss": 1.0225, "step": 3018 }, { "epoch": 24.152, "grad_norm": 22.479520797729492, "learning_rate": 4.217333333333334e-05, "loss": 1.1059, "step": 3019 }, { "epoch": 24.16, "grad_norm": 33.509403228759766, "learning_rate": 4.216888888888889e-05, "loss": 1.3264, "step": 3020 }, { "epoch": 24.168, "grad_norm": 123.8053207397461, "learning_rate": 4.216444444444445e-05, "loss": 1.3192, "step": 3021 }, { "epoch": 24.176, "grad_norm": 65.37147521972656, "learning_rate": 4.2159999999999996e-05, "loss": 1.3985, "step": 3022 }, { "epoch": 24.184, "grad_norm": 24.226486206054688, "learning_rate": 4.215555555555556e-05, "loss": 1.0209, "step": 3023 }, { "epoch": 24.192, "grad_norm": 26.80531883239746, "learning_rate": 4.215111111111111e-05, "loss": 2.3771, "step": 3024 }, { "epoch": 24.2, "grad_norm": 36.26300048828125, "learning_rate": 4.214666666666667e-05, "loss": 0.7695, "step": 3025 }, { "epoch": 24.208, "grad_norm": 27.943979263305664, "learning_rate": 4.214222222222222e-05, "loss": 0.9897, "step": 3026 }, { "epoch": 24.216, "grad_norm": 16.611169815063477, "learning_rate": 4.2137777777777784e-05, "loss": 0.9243, "step": 3027 }, { "epoch": 24.224, "grad_norm": 22.532855987548828, "learning_rate": 4.213333333333334e-05, "loss": 1.2164, "step": 3028 }, { "epoch": 24.232, "grad_norm": 17.325695037841797, "learning_rate": 4.212888888888889e-05, "loss": 1.1584, "step": 3029 }, { "epoch": 24.24, "grad_norm": 44.24565887451172, "learning_rate": 4.212444444444444e-05, "loss": 1.2768, "step": 3030 }, { "epoch": 24.248, "grad_norm": 41.85745620727539, "learning_rate": 4.212e-05, "loss": 1.3682, "step": 3031 }, { "epoch": 24.256, "grad_norm": 25.393264770507812, "learning_rate": 4.211555555555556e-05, "loss": 0.9577, "step": 3032 }, { "epoch": 24.264, "grad_norm": 28.713871002197266, "learning_rate": 4.211111111111111e-05, "loss": 1.2228, "step": 3033 }, { "epoch": 24.272, "grad_norm": 24.85226821899414, "learning_rate": 4.210666666666667e-05, "loss": 1.1456, "step": 3034 }, { "epoch": 24.28, "grad_norm": 30.547609329223633, "learning_rate": 4.210222222222223e-05, "loss": 0.7828, "step": 3035 }, { "epoch": 24.288, "grad_norm": 21.056941986083984, "learning_rate": 4.209777777777778e-05, "loss": 1.2346, "step": 3036 }, { "epoch": 24.296, "grad_norm": 22.37769889831543, "learning_rate": 4.209333333333333e-05, "loss": 0.78, "step": 3037 }, { "epoch": 24.304, "grad_norm": 17.69169807434082, "learning_rate": 4.208888888888889e-05, "loss": 1.329, "step": 3038 }, { "epoch": 24.312, "grad_norm": 33.34103775024414, "learning_rate": 4.208444444444445e-05, "loss": 1.6052, "step": 3039 }, { "epoch": 24.32, "grad_norm": 32.86015319824219, "learning_rate": 4.2080000000000004e-05, "loss": 1.0227, "step": 3040 }, { "epoch": 24.328, "grad_norm": 18.54184341430664, "learning_rate": 4.207555555555556e-05, "loss": 1.0991, "step": 3041 }, { "epoch": 24.336, "grad_norm": 25.61751937866211, "learning_rate": 4.207111111111111e-05, "loss": 1.4665, "step": 3042 }, { "epoch": 24.344, "grad_norm": 32.20993423461914, "learning_rate": 4.206666666666667e-05, "loss": 0.8991, "step": 3043 }, { "epoch": 24.352, "grad_norm": 24.154342651367188, "learning_rate": 4.206222222222222e-05, "loss": 0.9582, "step": 3044 }, { "epoch": 24.36, "grad_norm": 18.989212036132812, "learning_rate": 4.205777777777778e-05, "loss": 1.2617, "step": 3045 }, { "epoch": 24.368, "grad_norm": 21.232826232910156, "learning_rate": 4.205333333333333e-05, "loss": 1.384, "step": 3046 }, { "epoch": 24.376, "grad_norm": 13.203964233398438, "learning_rate": 4.2048888888888894e-05, "loss": 1.1438, "step": 3047 }, { "epoch": 24.384, "grad_norm": 20.839685440063477, "learning_rate": 4.204444444444445e-05, "loss": 1.0799, "step": 3048 }, { "epoch": 24.392, "grad_norm": 45.20412826538086, "learning_rate": 4.2040000000000004e-05, "loss": 1.7416, "step": 3049 }, { "epoch": 24.4, "grad_norm": 18.776769638061523, "learning_rate": 4.203555555555556e-05, "loss": 1.3913, "step": 3050 }, { "epoch": 24.408, "grad_norm": 23.40610694885254, "learning_rate": 4.2031111111111114e-05, "loss": 1.0424, "step": 3051 }, { "epoch": 24.416, "grad_norm": 72.58740997314453, "learning_rate": 4.202666666666667e-05, "loss": 1.0067, "step": 3052 }, { "epoch": 24.424, "grad_norm": 19.408233642578125, "learning_rate": 4.2022222222222223e-05, "loss": 1.2313, "step": 3053 }, { "epoch": 24.432, "grad_norm": 15.89229965209961, "learning_rate": 4.201777777777778e-05, "loss": 1.199, "step": 3054 }, { "epoch": 24.44, "grad_norm": 24.484434127807617, "learning_rate": 4.201333333333334e-05, "loss": 1.193, "step": 3055 }, { "epoch": 24.448, "grad_norm": 28.75279426574707, "learning_rate": 4.2008888888888895e-05, "loss": 1.0585, "step": 3056 }, { "epoch": 24.456, "grad_norm": 167.30499267578125, "learning_rate": 4.200444444444445e-05, "loss": 1.0783, "step": 3057 }, { "epoch": 24.464, "grad_norm": 27.437028884887695, "learning_rate": 4.2e-05, "loss": 1.041, "step": 3058 }, { "epoch": 24.472, "grad_norm": 12.469194412231445, "learning_rate": 4.199555555555556e-05, "loss": 1.1793, "step": 3059 }, { "epoch": 24.48, "grad_norm": 19.462501525878906, "learning_rate": 4.1991111111111114e-05, "loss": 0.9849, "step": 3060 }, { "epoch": 24.488, "grad_norm": 30.905420303344727, "learning_rate": 4.198666666666667e-05, "loss": 1.442, "step": 3061 }, { "epoch": 24.496, "grad_norm": 27.64324188232422, "learning_rate": 4.1982222222222224e-05, "loss": 1.467, "step": 3062 }, { "epoch": 24.504, "grad_norm": 35.371028900146484, "learning_rate": 4.1977777777777785e-05, "loss": 1.066, "step": 3063 }, { "epoch": 24.512, "grad_norm": 43.075462341308594, "learning_rate": 4.1973333333333334e-05, "loss": 1.5229, "step": 3064 }, { "epoch": 24.52, "grad_norm": 45.30946350097656, "learning_rate": 4.196888888888889e-05, "loss": 1.2491, "step": 3065 }, { "epoch": 24.528, "grad_norm": 35.23749923706055, "learning_rate": 4.196444444444444e-05, "loss": 0.8869, "step": 3066 }, { "epoch": 24.536, "grad_norm": 21.719240188598633, "learning_rate": 4.196e-05, "loss": 0.9934, "step": 3067 }, { "epoch": 24.544, "grad_norm": 19.31175422668457, "learning_rate": 4.195555555555556e-05, "loss": 1.023, "step": 3068 }, { "epoch": 24.552, "grad_norm": 48.305362701416016, "learning_rate": 4.1951111111111115e-05, "loss": 1.1724, "step": 3069 }, { "epoch": 24.56, "grad_norm": 15.152873992919922, "learning_rate": 4.194666666666667e-05, "loss": 1.4232, "step": 3070 }, { "epoch": 24.568, "grad_norm": 30.62759780883789, "learning_rate": 4.1942222222222224e-05, "loss": 1.7033, "step": 3071 }, { "epoch": 24.576, "grad_norm": 15.561539649963379, "learning_rate": 4.193777777777778e-05, "loss": 1.1024, "step": 3072 }, { "epoch": 24.584, "grad_norm": 21.72650146484375, "learning_rate": 4.1933333333333334e-05, "loss": 2.2325, "step": 3073 }, { "epoch": 24.592, "grad_norm": 20.229488372802734, "learning_rate": 4.192888888888889e-05, "loss": 1.0496, "step": 3074 }, { "epoch": 24.6, "grad_norm": 27.3934326171875, "learning_rate": 4.1924444444444444e-05, "loss": 0.9387, "step": 3075 }, { "epoch": 24.608, "grad_norm": 30.974557876586914, "learning_rate": 4.1920000000000005e-05, "loss": 1.3907, "step": 3076 }, { "epoch": 24.616, "grad_norm": 15.867173194885254, "learning_rate": 4.191555555555556e-05, "loss": 1.3342, "step": 3077 }, { "epoch": 24.624, "grad_norm": 26.321304321289062, "learning_rate": 4.1911111111111115e-05, "loss": 0.8626, "step": 3078 }, { "epoch": 24.632, "grad_norm": 33.8427619934082, "learning_rate": 4.190666666666666e-05, "loss": 1.7732, "step": 3079 }, { "epoch": 24.64, "grad_norm": 34.71257019042969, "learning_rate": 4.1902222222222225e-05, "loss": 1.0607, "step": 3080 }, { "epoch": 24.648, "grad_norm": 19.484020233154297, "learning_rate": 4.189777777777778e-05, "loss": 1.4698, "step": 3081 }, { "epoch": 24.656, "grad_norm": 26.509681701660156, "learning_rate": 4.1893333333333334e-05, "loss": 0.999, "step": 3082 }, { "epoch": 24.664, "grad_norm": 19.55962562561035, "learning_rate": 4.188888888888889e-05, "loss": 0.9957, "step": 3083 }, { "epoch": 24.672, "grad_norm": 50.62028503417969, "learning_rate": 4.188444444444445e-05, "loss": 1.3819, "step": 3084 }, { "epoch": 24.68, "grad_norm": 39.364322662353516, "learning_rate": 4.1880000000000006e-05, "loss": 1.1064, "step": 3085 }, { "epoch": 24.688, "grad_norm": 51.093502044677734, "learning_rate": 4.1875555555555554e-05, "loss": 1.0618, "step": 3086 }, { "epoch": 24.696, "grad_norm": 27.782520294189453, "learning_rate": 4.187111111111111e-05, "loss": 1.2906, "step": 3087 }, { "epoch": 24.704, "grad_norm": 28.951162338256836, "learning_rate": 4.186666666666667e-05, "loss": 0.8346, "step": 3088 }, { "epoch": 24.712, "grad_norm": 23.50880241394043, "learning_rate": 4.1862222222222225e-05, "loss": 1.1765, "step": 3089 }, { "epoch": 24.72, "grad_norm": 23.732751846313477, "learning_rate": 4.185777777777778e-05, "loss": 1.2174, "step": 3090 }, { "epoch": 24.728, "grad_norm": 15.36304759979248, "learning_rate": 4.1853333333333335e-05, "loss": 1.0423, "step": 3091 }, { "epoch": 24.736, "grad_norm": 20.595861434936523, "learning_rate": 4.1848888888888896e-05, "loss": 1.4525, "step": 3092 }, { "epoch": 24.744, "grad_norm": 35.658355712890625, "learning_rate": 4.1844444444444444e-05, "loss": 1.0868, "step": 3093 }, { "epoch": 24.752, "grad_norm": 23.002485275268555, "learning_rate": 4.184e-05, "loss": 0.7765, "step": 3094 }, { "epoch": 24.76, "grad_norm": 15.052186012268066, "learning_rate": 4.1835555555555554e-05, "loss": 1.1588, "step": 3095 }, { "epoch": 24.768, "grad_norm": 16.63985252380371, "learning_rate": 4.1831111111111116e-05, "loss": 1.4225, "step": 3096 }, { "epoch": 24.776, "grad_norm": 17.307056427001953, "learning_rate": 4.182666666666667e-05, "loss": 0.665, "step": 3097 }, { "epoch": 24.784, "grad_norm": 44.009002685546875, "learning_rate": 4.1822222222222225e-05, "loss": 1.2569, "step": 3098 }, { "epoch": 24.792, "grad_norm": 20.541908264160156, "learning_rate": 4.181777777777778e-05, "loss": 1.836, "step": 3099 }, { "epoch": 24.8, "grad_norm": 28.476360321044922, "learning_rate": 4.1813333333333335e-05, "loss": 1.2329, "step": 3100 }, { "epoch": 24.808, "grad_norm": 35.461856842041016, "learning_rate": 4.180888888888889e-05, "loss": 0.8964, "step": 3101 }, { "epoch": 24.816, "grad_norm": 47.570735931396484, "learning_rate": 4.1804444444444445e-05, "loss": 1.0915, "step": 3102 }, { "epoch": 24.824, "grad_norm": 17.195125579833984, "learning_rate": 4.18e-05, "loss": 1.4528, "step": 3103 }, { "epoch": 24.832, "grad_norm": 41.21711349487305, "learning_rate": 4.179555555555556e-05, "loss": 1.08, "step": 3104 }, { "epoch": 24.84, "grad_norm": 28.777624130249023, "learning_rate": 4.1791111111111116e-05, "loss": 1.3353, "step": 3105 }, { "epoch": 24.848, "grad_norm": 60.18862533569336, "learning_rate": 4.178666666666667e-05, "loss": 0.9259, "step": 3106 }, { "epoch": 24.856, "grad_norm": 29.939056396484375, "learning_rate": 4.1782222222222226e-05, "loss": 1.0513, "step": 3107 }, { "epoch": 24.864, "grad_norm": 35.475006103515625, "learning_rate": 4.177777777777778e-05, "loss": 1.1446, "step": 3108 }, { "epoch": 24.872, "grad_norm": 43.06428146362305, "learning_rate": 4.1773333333333335e-05, "loss": 1.5093, "step": 3109 }, { "epoch": 24.88, "grad_norm": 41.934471130371094, "learning_rate": 4.176888888888889e-05, "loss": 1.6521, "step": 3110 }, { "epoch": 24.888, "grad_norm": 19.150863647460938, "learning_rate": 4.1764444444444445e-05, "loss": 1.2844, "step": 3111 }, { "epoch": 24.896, "grad_norm": 35.73917007446289, "learning_rate": 4.176000000000001e-05, "loss": 1.1712, "step": 3112 }, { "epoch": 24.904, "grad_norm": 27.900096893310547, "learning_rate": 4.175555555555556e-05, "loss": 1.2565, "step": 3113 }, { "epoch": 24.912, "grad_norm": 18.206741333007812, "learning_rate": 4.1751111111111116e-05, "loss": 1.2586, "step": 3114 }, { "epoch": 24.92, "grad_norm": 25.61991310119629, "learning_rate": 4.1746666666666665e-05, "loss": 1.4862, "step": 3115 }, { "epoch": 24.928, "grad_norm": 21.756526947021484, "learning_rate": 4.174222222222222e-05, "loss": 1.0268, "step": 3116 }, { "epoch": 24.936, "grad_norm": 17.01272964477539, "learning_rate": 4.173777777777778e-05, "loss": 1.107, "step": 3117 }, { "epoch": 24.944, "grad_norm": 56.21459197998047, "learning_rate": 4.1733333333333336e-05, "loss": 1.0926, "step": 3118 }, { "epoch": 24.951999999999998, "grad_norm": 30.08202362060547, "learning_rate": 4.172888888888889e-05, "loss": 0.8926, "step": 3119 }, { "epoch": 24.96, "grad_norm": 37.38795471191406, "learning_rate": 4.1724444444444446e-05, "loss": 0.9969, "step": 3120 }, { "epoch": 24.968, "grad_norm": 23.34217643737793, "learning_rate": 4.172e-05, "loss": 1.0846, "step": 3121 }, { "epoch": 24.976, "grad_norm": 70.2861557006836, "learning_rate": 4.1715555555555555e-05, "loss": 1.1697, "step": 3122 }, { "epoch": 24.984, "grad_norm": 42.089176177978516, "learning_rate": 4.171111111111111e-05, "loss": 1.0009, "step": 3123 }, { "epoch": 24.992, "grad_norm": 27.25581169128418, "learning_rate": 4.1706666666666665e-05, "loss": 0.8553, "step": 3124 }, { "epoch": 25.0, "grad_norm": 18.091575622558594, "learning_rate": 4.1702222222222227e-05, "loss": 2.9686, "step": 3125 }, { "epoch": 25.0, "eval_loss": 1.1659414768218994, "eval_map": 0.3333, "eval_map_50": 0.6452, "eval_map_75": 0.2991, "eval_map_Coverall": 0.5976, "eval_map_Face_Shield": 0.2826, "eval_map_Gloves": 0.2766, "eval_map_Goggles": 0.1493, "eval_map_Mask": 0.3603, "eval_map_large": 0.3821, "eval_map_medium": 0.232, "eval_map_small": -1.0, "eval_mar_1": 0.2745, "eval_mar_10": 0.5241, "eval_mar_100": 0.5376, "eval_mar_100_Coverall": 0.7844, "eval_mar_100_Face_Shield": 0.5588, "eval_mar_100_Gloves": 0.4213, "eval_mar_100_Goggles": 0.4313, "eval_mar_100_Mask": 0.4923, "eval_mar_large": 0.5972, "eval_mar_medium": 0.412, "eval_mar_small": -1.0, "eval_runtime": 3.7329, "eval_samples_per_second": 7.769, "eval_steps_per_second": 0.536, "step": 3125 }, { "epoch": 25.008, "grad_norm": 19.152828216552734, "learning_rate": 4.169777777777778e-05, "loss": 1.0801, "step": 3126 }, { "epoch": 25.016, "grad_norm": 20.376527786254883, "learning_rate": 4.1693333333333336e-05, "loss": 1.0725, "step": 3127 }, { "epoch": 25.024, "grad_norm": 27.60177993774414, "learning_rate": 4.168888888888889e-05, "loss": 1.1847, "step": 3128 }, { "epoch": 25.032, "grad_norm": 20.307584762573242, "learning_rate": 4.1684444444444446e-05, "loss": 1.1394, "step": 3129 }, { "epoch": 25.04, "grad_norm": 26.795001983642578, "learning_rate": 4.168e-05, "loss": 1.1748, "step": 3130 }, { "epoch": 25.048, "grad_norm": 19.7105770111084, "learning_rate": 4.1675555555555556e-05, "loss": 1.427, "step": 3131 }, { "epoch": 25.056, "grad_norm": 36.674808502197266, "learning_rate": 4.167111111111111e-05, "loss": 1.1157, "step": 3132 }, { "epoch": 25.064, "grad_norm": 26.6773681640625, "learning_rate": 4.166666666666667e-05, "loss": 1.3807, "step": 3133 }, { "epoch": 25.072, "grad_norm": 130.29641723632812, "learning_rate": 4.166222222222223e-05, "loss": 1.4522, "step": 3134 }, { "epoch": 25.08, "grad_norm": 73.8954849243164, "learning_rate": 4.165777777777778e-05, "loss": 1.2454, "step": 3135 }, { "epoch": 25.088, "grad_norm": 23.571189880371094, "learning_rate": 4.165333333333333e-05, "loss": 1.3138, "step": 3136 }, { "epoch": 25.096, "grad_norm": 30.07436752319336, "learning_rate": 4.164888888888889e-05, "loss": 1.0924, "step": 3137 }, { "epoch": 25.104, "grad_norm": 21.70915985107422, "learning_rate": 4.1644444444444446e-05, "loss": 1.0431, "step": 3138 }, { "epoch": 25.112, "grad_norm": 26.696147918701172, "learning_rate": 4.164e-05, "loss": 1.2845, "step": 3139 }, { "epoch": 25.12, "grad_norm": 29.88347625732422, "learning_rate": 4.1635555555555556e-05, "loss": 1.1807, "step": 3140 }, { "epoch": 25.128, "grad_norm": 365.3180236816406, "learning_rate": 4.163111111111112e-05, "loss": 1.4369, "step": 3141 }, { "epoch": 25.136, "grad_norm": 21.690393447875977, "learning_rate": 4.162666666666667e-05, "loss": 1.0128, "step": 3142 }, { "epoch": 25.144, "grad_norm": 30.872297286987305, "learning_rate": 4.162222222222222e-05, "loss": 0.922, "step": 3143 }, { "epoch": 25.152, "grad_norm": 15.634756088256836, "learning_rate": 4.1617777777777775e-05, "loss": 1.0261, "step": 3144 }, { "epoch": 25.16, "grad_norm": 37.654014587402344, "learning_rate": 4.161333333333334e-05, "loss": 3.087, "step": 3145 }, { "epoch": 25.168, "grad_norm": 28.820310592651367, "learning_rate": 4.160888888888889e-05, "loss": 1.3037, "step": 3146 }, { "epoch": 25.176, "grad_norm": 19.72269058227539, "learning_rate": 4.160444444444445e-05, "loss": 1.2624, "step": 3147 }, { "epoch": 25.184, "grad_norm": 32.468074798583984, "learning_rate": 4.16e-05, "loss": 1.0757, "step": 3148 }, { "epoch": 25.192, "grad_norm": 19.627267837524414, "learning_rate": 4.159555555555556e-05, "loss": 1.2917, "step": 3149 }, { "epoch": 25.2, "grad_norm": 65.0608139038086, "learning_rate": 4.159111111111111e-05, "loss": 1.2738, "step": 3150 }, { "epoch": 25.208, "grad_norm": 19.299665451049805, "learning_rate": 4.1586666666666666e-05, "loss": 1.262, "step": 3151 }, { "epoch": 25.216, "grad_norm": 28.960039138793945, "learning_rate": 4.158222222222222e-05, "loss": 1.0798, "step": 3152 }, { "epoch": 25.224, "grad_norm": 22.006418228149414, "learning_rate": 4.157777777777778e-05, "loss": 0.9868, "step": 3153 }, { "epoch": 25.232, "grad_norm": 41.82169723510742, "learning_rate": 4.157333333333334e-05, "loss": 0.8721, "step": 3154 }, { "epoch": 25.24, "grad_norm": 37.329490661621094, "learning_rate": 4.156888888888889e-05, "loss": 1.0546, "step": 3155 }, { "epoch": 25.248, "grad_norm": 48.62051010131836, "learning_rate": 4.156444444444445e-05, "loss": 1.0262, "step": 3156 }, { "epoch": 25.256, "grad_norm": 26.226463317871094, "learning_rate": 4.156e-05, "loss": 1.0437, "step": 3157 }, { "epoch": 25.264, "grad_norm": 18.63745880126953, "learning_rate": 4.155555555555556e-05, "loss": 1.1061, "step": 3158 }, { "epoch": 25.272, "grad_norm": 27.06569480895996, "learning_rate": 4.155111111111111e-05, "loss": 1.5178, "step": 3159 }, { "epoch": 25.28, "grad_norm": 14.491301536560059, "learning_rate": 4.1546666666666666e-05, "loss": 1.1672, "step": 3160 }, { "epoch": 25.288, "grad_norm": 21.26207160949707, "learning_rate": 4.154222222222223e-05, "loss": 1.0331, "step": 3161 }, { "epoch": 25.296, "grad_norm": 31.375120162963867, "learning_rate": 4.153777777777778e-05, "loss": 1.1991, "step": 3162 }, { "epoch": 25.304, "grad_norm": 21.42015266418457, "learning_rate": 4.153333333333334e-05, "loss": 1.5248, "step": 3163 }, { "epoch": 25.312, "grad_norm": 17.712970733642578, "learning_rate": 4.152888888888889e-05, "loss": 1.1405, "step": 3164 }, { "epoch": 25.32, "grad_norm": 23.51114273071289, "learning_rate": 4.152444444444445e-05, "loss": 1.6553, "step": 3165 }, { "epoch": 25.328, "grad_norm": 15.6224365234375, "learning_rate": 4.152e-05, "loss": 1.1591, "step": 3166 }, { "epoch": 25.336, "grad_norm": 20.27779769897461, "learning_rate": 4.151555555555556e-05, "loss": 1.2537, "step": 3167 }, { "epoch": 25.344, "grad_norm": 34.29461669921875, "learning_rate": 4.151111111111111e-05, "loss": 0.9767, "step": 3168 }, { "epoch": 25.352, "grad_norm": 28.125009536743164, "learning_rate": 4.150666666666667e-05, "loss": 1.1336, "step": 3169 }, { "epoch": 25.36, "grad_norm": 38.56742858886719, "learning_rate": 4.150222222222223e-05, "loss": 1.0152, "step": 3170 }, { "epoch": 25.368, "grad_norm": 117.37774658203125, "learning_rate": 4.1497777777777776e-05, "loss": 0.8829, "step": 3171 }, { "epoch": 25.376, "grad_norm": 19.928821563720703, "learning_rate": 4.149333333333333e-05, "loss": 1.0855, "step": 3172 }, { "epoch": 25.384, "grad_norm": 24.021120071411133, "learning_rate": 4.1488888888888886e-05, "loss": 1.1057, "step": 3173 }, { "epoch": 25.392, "grad_norm": 44.27885055541992, "learning_rate": 4.148444444444445e-05, "loss": 1.9303, "step": 3174 }, { "epoch": 25.4, "grad_norm": 56.10298538208008, "learning_rate": 4.148e-05, "loss": 1.4141, "step": 3175 }, { "epoch": 25.408, "grad_norm": 18.708520889282227, "learning_rate": 4.147555555555556e-05, "loss": 2.1031, "step": 3176 }, { "epoch": 25.416, "grad_norm": 26.631649017333984, "learning_rate": 4.147111111111111e-05, "loss": 1.0882, "step": 3177 }, { "epoch": 25.424, "grad_norm": 25.47998046875, "learning_rate": 4.146666666666667e-05, "loss": 1.0977, "step": 3178 }, { "epoch": 25.432, "grad_norm": 19.99565887451172, "learning_rate": 4.146222222222222e-05, "loss": 1.2597, "step": 3179 }, { "epoch": 25.44, "grad_norm": 34.205223083496094, "learning_rate": 4.145777777777778e-05, "loss": 1.0573, "step": 3180 }, { "epoch": 25.448, "grad_norm": 30.68588638305664, "learning_rate": 4.145333333333333e-05, "loss": 0.9461, "step": 3181 }, { "epoch": 25.456, "grad_norm": 21.811992645263672, "learning_rate": 4.144888888888889e-05, "loss": 1.2641, "step": 3182 }, { "epoch": 25.464, "grad_norm": 20.970415115356445, "learning_rate": 4.144444444444445e-05, "loss": 1.1026, "step": 3183 }, { "epoch": 25.472, "grad_norm": 67.60324096679688, "learning_rate": 4.144e-05, "loss": 0.8767, "step": 3184 }, { "epoch": 25.48, "grad_norm": 46.868412017822266, "learning_rate": 4.143555555555556e-05, "loss": 1.0511, "step": 3185 }, { "epoch": 25.488, "grad_norm": 36.92475128173828, "learning_rate": 4.143111111111111e-05, "loss": 0.859, "step": 3186 }, { "epoch": 25.496, "grad_norm": 28.287704467773438, "learning_rate": 4.142666666666667e-05, "loss": 1.177, "step": 3187 }, { "epoch": 25.504, "grad_norm": 48.12260818481445, "learning_rate": 4.142222222222222e-05, "loss": 1.0839, "step": 3188 }, { "epoch": 25.512, "grad_norm": 33.195796966552734, "learning_rate": 4.141777777777778e-05, "loss": 0.9265, "step": 3189 }, { "epoch": 25.52, "grad_norm": 28.32135581970215, "learning_rate": 4.141333333333334e-05, "loss": 1.0616, "step": 3190 }, { "epoch": 25.528, "grad_norm": 113.87850189208984, "learning_rate": 4.1408888888888894e-05, "loss": 1.3346, "step": 3191 }, { "epoch": 25.536, "grad_norm": 22.50424575805664, "learning_rate": 4.140444444444445e-05, "loss": 1.1281, "step": 3192 }, { "epoch": 25.544, "grad_norm": 19.882238388061523, "learning_rate": 4.14e-05, "loss": 1.6272, "step": 3193 }, { "epoch": 25.552, "grad_norm": 18.8083553314209, "learning_rate": 4.139555555555556e-05, "loss": 1.0393, "step": 3194 }, { "epoch": 25.56, "grad_norm": 787.7474365234375, "learning_rate": 4.139111111111111e-05, "loss": 1.2991, "step": 3195 }, { "epoch": 25.568, "grad_norm": 31.137130737304688, "learning_rate": 4.138666666666667e-05, "loss": 1.1786, "step": 3196 }, { "epoch": 25.576, "grad_norm": 21.31641387939453, "learning_rate": 4.138222222222222e-05, "loss": 1.1567, "step": 3197 }, { "epoch": 25.584, "grad_norm": 56.60578536987305, "learning_rate": 4.1377777777777784e-05, "loss": 1.5296, "step": 3198 }, { "epoch": 25.592, "grad_norm": 20.525304794311523, "learning_rate": 4.137333333333334e-05, "loss": 1.4416, "step": 3199 }, { "epoch": 25.6, "grad_norm": 27.796842575073242, "learning_rate": 4.136888888888889e-05, "loss": 0.8887, "step": 3200 }, { "epoch": 25.608, "grad_norm": 47.70473861694336, "learning_rate": 4.136444444444444e-05, "loss": 1.0968, "step": 3201 }, { "epoch": 25.616, "grad_norm": 65.56297302246094, "learning_rate": 4.1360000000000004e-05, "loss": 1.0271, "step": 3202 }, { "epoch": 25.624, "grad_norm": 25.607683181762695, "learning_rate": 4.135555555555556e-05, "loss": 1.2941, "step": 3203 }, { "epoch": 25.632, "grad_norm": 27.51287078857422, "learning_rate": 4.1351111111111113e-05, "loss": 1.2588, "step": 3204 }, { "epoch": 25.64, "grad_norm": 83.43413543701172, "learning_rate": 4.134666666666667e-05, "loss": 1.6946, "step": 3205 }, { "epoch": 25.648, "grad_norm": 18.454246520996094, "learning_rate": 4.134222222222223e-05, "loss": 0.8876, "step": 3206 }, { "epoch": 25.656, "grad_norm": 21.04839324951172, "learning_rate": 4.133777777777778e-05, "loss": 1.1088, "step": 3207 }, { "epoch": 25.664, "grad_norm": 33.840152740478516, "learning_rate": 4.133333333333333e-05, "loss": 0.8445, "step": 3208 }, { "epoch": 25.672, "grad_norm": 23.18466567993164, "learning_rate": 4.132888888888889e-05, "loss": 1.1281, "step": 3209 }, { "epoch": 25.68, "grad_norm": 46.110984802246094, "learning_rate": 4.132444444444445e-05, "loss": 1.342, "step": 3210 }, { "epoch": 25.688, "grad_norm": 16.739038467407227, "learning_rate": 4.1320000000000004e-05, "loss": 1.8141, "step": 3211 }, { "epoch": 25.696, "grad_norm": 22.24211883544922, "learning_rate": 4.131555555555556e-05, "loss": 1.2803, "step": 3212 }, { "epoch": 25.704, "grad_norm": 32.20793533325195, "learning_rate": 4.1311111111111114e-05, "loss": 1.4656, "step": 3213 }, { "epoch": 25.712, "grad_norm": 28.752452850341797, "learning_rate": 4.130666666666667e-05, "loss": 1.2633, "step": 3214 }, { "epoch": 25.72, "grad_norm": 29.312480926513672, "learning_rate": 4.1302222222222224e-05, "loss": 1.0565, "step": 3215 }, { "epoch": 25.728, "grad_norm": 39.52705764770508, "learning_rate": 4.129777777777778e-05, "loss": 1.5478, "step": 3216 }, { "epoch": 25.736, "grad_norm": 21.568410873413086, "learning_rate": 4.129333333333333e-05, "loss": 1.0137, "step": 3217 }, { "epoch": 25.744, "grad_norm": 41.025604248046875, "learning_rate": 4.1288888888888895e-05, "loss": 1.5524, "step": 3218 }, { "epoch": 25.752, "grad_norm": 33.869449615478516, "learning_rate": 4.128444444444445e-05, "loss": 1.3319, "step": 3219 }, { "epoch": 25.76, "grad_norm": 41.90703201293945, "learning_rate": 4.1280000000000005e-05, "loss": 0.9586, "step": 3220 }, { "epoch": 25.768, "grad_norm": 30.15308952331543, "learning_rate": 4.127555555555556e-05, "loss": 1.3619, "step": 3221 }, { "epoch": 25.776, "grad_norm": 36.147796630859375, "learning_rate": 4.127111111111111e-05, "loss": 1.1514, "step": 3222 }, { "epoch": 25.784, "grad_norm": 17.45172691345215, "learning_rate": 4.126666666666667e-05, "loss": 1.1932, "step": 3223 }, { "epoch": 25.792, "grad_norm": 25.874847412109375, "learning_rate": 4.1262222222222224e-05, "loss": 0.941, "step": 3224 }, { "epoch": 25.8, "grad_norm": 26.351709365844727, "learning_rate": 4.125777777777778e-05, "loss": 1.3018, "step": 3225 }, { "epoch": 25.808, "grad_norm": 30.332185745239258, "learning_rate": 4.1253333333333334e-05, "loss": 1.2142, "step": 3226 }, { "epoch": 25.816, "grad_norm": 21.60276985168457, "learning_rate": 4.1248888888888895e-05, "loss": 1.3826, "step": 3227 }, { "epoch": 25.824, "grad_norm": 24.846269607543945, "learning_rate": 4.124444444444444e-05, "loss": 1.3678, "step": 3228 }, { "epoch": 25.832, "grad_norm": 26.109575271606445, "learning_rate": 4.124e-05, "loss": 1.3685, "step": 3229 }, { "epoch": 25.84, "grad_norm": 23.2872257232666, "learning_rate": 4.123555555555555e-05, "loss": 1.1426, "step": 3230 }, { "epoch": 25.848, "grad_norm": 35.57892990112305, "learning_rate": 4.1231111111111115e-05, "loss": 1.3255, "step": 3231 }, { "epoch": 25.856, "grad_norm": 37.271358489990234, "learning_rate": 4.122666666666667e-05, "loss": 1.0645, "step": 3232 }, { "epoch": 25.864, "grad_norm": 26.64946746826172, "learning_rate": 4.1222222222222224e-05, "loss": 1.7051, "step": 3233 }, { "epoch": 25.872, "grad_norm": 41.502044677734375, "learning_rate": 4.121777777777778e-05, "loss": 1.27, "step": 3234 }, { "epoch": 25.88, "grad_norm": 27.875402450561523, "learning_rate": 4.1213333333333334e-05, "loss": 1.3415, "step": 3235 }, { "epoch": 25.888, "grad_norm": 19.264102935791016, "learning_rate": 4.120888888888889e-05, "loss": 1.2101, "step": 3236 }, { "epoch": 25.896, "grad_norm": 22.67976951599121, "learning_rate": 4.1204444444444444e-05, "loss": 1.5011, "step": 3237 }, { "epoch": 25.904, "grad_norm": 60.88962936401367, "learning_rate": 4.12e-05, "loss": 1.1437, "step": 3238 }, { "epoch": 25.912, "grad_norm": 15.304656028747559, "learning_rate": 4.119555555555556e-05, "loss": 1.0859, "step": 3239 }, { "epoch": 25.92, "grad_norm": 19.506013870239258, "learning_rate": 4.1191111111111115e-05, "loss": 1.1362, "step": 3240 }, { "epoch": 25.928, "grad_norm": 88.9142074584961, "learning_rate": 4.118666666666667e-05, "loss": 1.0562, "step": 3241 }, { "epoch": 25.936, "grad_norm": 81.29895782470703, "learning_rate": 4.1182222222222225e-05, "loss": 1.3961, "step": 3242 }, { "epoch": 25.944, "grad_norm": 19.555267333984375, "learning_rate": 4.117777777777778e-05, "loss": 1.2888, "step": 3243 }, { "epoch": 25.951999999999998, "grad_norm": 28.399126052856445, "learning_rate": 4.1173333333333334e-05, "loss": 1.0129, "step": 3244 }, { "epoch": 25.96, "grad_norm": 54.1552734375, "learning_rate": 4.116888888888889e-05, "loss": 1.0786, "step": 3245 }, { "epoch": 25.968, "grad_norm": 45.333072662353516, "learning_rate": 4.1164444444444444e-05, "loss": 1.6181, "step": 3246 }, { "epoch": 25.976, "grad_norm": 22.467506408691406, "learning_rate": 4.1160000000000006e-05, "loss": 1.2782, "step": 3247 }, { "epoch": 25.984, "grad_norm": 20.76904296875, "learning_rate": 4.115555555555556e-05, "loss": 2.1444, "step": 3248 }, { "epoch": 25.992, "grad_norm": 22.30204963684082, "learning_rate": 4.1151111111111115e-05, "loss": 0.9878, "step": 3249 }, { "epoch": 26.0, "grad_norm": 28.163244247436523, "learning_rate": 4.1146666666666663e-05, "loss": 1.227, "step": 3250 }, { "epoch": 26.0, "eval_loss": 1.1257553100585938, "eval_map": 0.3529, "eval_map_50": 0.6478, "eval_map_75": 0.2894, "eval_map_Coverall": 0.5815, "eval_map_Face_Shield": 0.2639, "eval_map_Gloves": 0.2989, "eval_map_Goggles": 0.1511, "eval_map_Mask": 0.4692, "eval_map_large": 0.3941, "eval_map_medium": 0.2356, "eval_map_small": -1.0, "eval_mar_1": 0.3219, "eval_mar_10": 0.5543, "eval_mar_100": 0.5647, "eval_mar_100_Coverall": 0.6778, "eval_mar_100_Face_Shield": 0.6588, "eval_mar_100_Gloves": 0.4902, "eval_mar_100_Goggles": 0.4469, "eval_mar_100_Mask": 0.55, "eval_mar_large": 0.6377, "eval_mar_medium": 0.4032, "eval_mar_small": -1.0, "eval_runtime": 4.9663, "eval_samples_per_second": 5.839, "eval_steps_per_second": 0.403, "step": 3250 }, { "epoch": 26.008, "grad_norm": 19.43587875366211, "learning_rate": 4.1142222222222225e-05, "loss": 1.0095, "step": 3251 }, { "epoch": 26.016, "grad_norm": 16.672622680664062, "learning_rate": 4.113777777777778e-05, "loss": 0.9879, "step": 3252 }, { "epoch": 26.024, "grad_norm": 34.73092269897461, "learning_rate": 4.1133333333333335e-05, "loss": 1.3032, "step": 3253 }, { "epoch": 26.032, "grad_norm": 34.48142623901367, "learning_rate": 4.112888888888889e-05, "loss": 0.854, "step": 3254 }, { "epoch": 26.04, "grad_norm": 27.316333770751953, "learning_rate": 4.112444444444445e-05, "loss": 1.0538, "step": 3255 }, { "epoch": 26.048, "grad_norm": 48.63473892211914, "learning_rate": 4.1120000000000006e-05, "loss": 1.3163, "step": 3256 }, { "epoch": 26.056, "grad_norm": 31.727876663208008, "learning_rate": 4.1115555555555554e-05, "loss": 1.008, "step": 3257 }, { "epoch": 26.064, "grad_norm": 47.415260314941406, "learning_rate": 4.111111111111111e-05, "loss": 1.719, "step": 3258 }, { "epoch": 26.072, "grad_norm": 28.55284881591797, "learning_rate": 4.110666666666667e-05, "loss": 1.2274, "step": 3259 }, { "epoch": 26.08, "grad_norm": 57.392391204833984, "learning_rate": 4.1102222222222225e-05, "loss": 1.0447, "step": 3260 }, { "epoch": 26.088, "grad_norm": 33.20504379272461, "learning_rate": 4.109777777777778e-05, "loss": 1.0063, "step": 3261 }, { "epoch": 26.096, "grad_norm": 25.883621215820312, "learning_rate": 4.1093333333333335e-05, "loss": 1.2628, "step": 3262 }, { "epoch": 26.104, "grad_norm": 32.58131790161133, "learning_rate": 4.10888888888889e-05, "loss": 1.2877, "step": 3263 }, { "epoch": 26.112, "grad_norm": 24.05667495727539, "learning_rate": 4.1084444444444445e-05, "loss": 0.8467, "step": 3264 }, { "epoch": 26.12, "grad_norm": 43.472023010253906, "learning_rate": 4.108e-05, "loss": 1.2334, "step": 3265 }, { "epoch": 26.128, "grad_norm": 63.91143035888672, "learning_rate": 4.1075555555555555e-05, "loss": 1.2631, "step": 3266 }, { "epoch": 26.136, "grad_norm": 27.40350341796875, "learning_rate": 4.1071111111111116e-05, "loss": 1.5866, "step": 3267 }, { "epoch": 26.144, "grad_norm": 30.831756591796875, "learning_rate": 4.106666666666667e-05, "loss": 1.1009, "step": 3268 }, { "epoch": 26.152, "grad_norm": 22.23996925354004, "learning_rate": 4.1062222222222226e-05, "loss": 1.2468, "step": 3269 }, { "epoch": 26.16, "grad_norm": 32.82743835449219, "learning_rate": 4.105777777777778e-05, "loss": 3.4988, "step": 3270 }, { "epoch": 26.168, "grad_norm": 22.00217628479004, "learning_rate": 4.1053333333333336e-05, "loss": 1.0872, "step": 3271 }, { "epoch": 26.176, "grad_norm": 22.23670768737793, "learning_rate": 4.104888888888889e-05, "loss": 2.404, "step": 3272 }, { "epoch": 26.184, "grad_norm": 23.033472061157227, "learning_rate": 4.1044444444444445e-05, "loss": 1.106, "step": 3273 }, { "epoch": 26.192, "grad_norm": 29.851606369018555, "learning_rate": 4.104e-05, "loss": 2.3609, "step": 3274 }, { "epoch": 26.2, "grad_norm": 30.21463394165039, "learning_rate": 4.1035555555555555e-05, "loss": 0.8897, "step": 3275 }, { "epoch": 26.208, "grad_norm": 47.22298049926758, "learning_rate": 4.1031111111111117e-05, "loss": 1.1054, "step": 3276 }, { "epoch": 26.216, "grad_norm": 32.9202995300293, "learning_rate": 4.102666666666667e-05, "loss": 0.8019, "step": 3277 }, { "epoch": 26.224, "grad_norm": 18.53605079650879, "learning_rate": 4.1022222222222226e-05, "loss": 1.0412, "step": 3278 }, { "epoch": 26.232, "grad_norm": 40.21073913574219, "learning_rate": 4.1017777777777774e-05, "loss": 1.3433, "step": 3279 }, { "epoch": 26.24, "grad_norm": 44.05936050415039, "learning_rate": 4.1013333333333336e-05, "loss": 1.3449, "step": 3280 }, { "epoch": 26.248, "grad_norm": 22.974496841430664, "learning_rate": 4.100888888888889e-05, "loss": 1.0417, "step": 3281 }, { "epoch": 26.256, "grad_norm": 36.65011978149414, "learning_rate": 4.1004444444444446e-05, "loss": 1.2232, "step": 3282 }, { "epoch": 26.264, "grad_norm": 46.99016189575195, "learning_rate": 4.1e-05, "loss": 1.3838, "step": 3283 }, { "epoch": 26.272, "grad_norm": 46.195579528808594, "learning_rate": 4.099555555555556e-05, "loss": 1.147, "step": 3284 }, { "epoch": 26.28, "grad_norm": 36.8903694152832, "learning_rate": 4.099111111111111e-05, "loss": 1.0268, "step": 3285 }, { "epoch": 26.288, "grad_norm": 17.772207260131836, "learning_rate": 4.0986666666666665e-05, "loss": 1.0823, "step": 3286 }, { "epoch": 26.296, "grad_norm": 19.45478630065918, "learning_rate": 4.098222222222222e-05, "loss": 1.2338, "step": 3287 }, { "epoch": 26.304, "grad_norm": 22.744083404541016, "learning_rate": 4.097777777777778e-05, "loss": 0.7607, "step": 3288 }, { "epoch": 26.312, "grad_norm": 21.424457550048828, "learning_rate": 4.0973333333333336e-05, "loss": 1.3318, "step": 3289 }, { "epoch": 26.32, "grad_norm": 34.79893112182617, "learning_rate": 4.096888888888889e-05, "loss": 1.2037, "step": 3290 }, { "epoch": 26.328, "grad_norm": 28.30164909362793, "learning_rate": 4.0964444444444446e-05, "loss": 1.4631, "step": 3291 }, { "epoch": 26.336, "grad_norm": 33.78545379638672, "learning_rate": 4.096e-05, "loss": 1.1081, "step": 3292 }, { "epoch": 26.344, "grad_norm": 28.676952362060547, "learning_rate": 4.0955555555555556e-05, "loss": 0.9244, "step": 3293 }, { "epoch": 26.352, "grad_norm": 33.67453384399414, "learning_rate": 4.095111111111111e-05, "loss": 1.2039, "step": 3294 }, { "epoch": 26.36, "grad_norm": 40.234214782714844, "learning_rate": 4.0946666666666665e-05, "loss": 1.4247, "step": 3295 }, { "epoch": 26.368, "grad_norm": 31.44230079650879, "learning_rate": 4.094222222222223e-05, "loss": 1.4495, "step": 3296 }, { "epoch": 26.376, "grad_norm": 35.13457107543945, "learning_rate": 4.093777777777778e-05, "loss": 1.0049, "step": 3297 }, { "epoch": 26.384, "grad_norm": 29.44091796875, "learning_rate": 4.093333333333334e-05, "loss": 1.0441, "step": 3298 }, { "epoch": 26.392, "grad_norm": 28.5892333984375, "learning_rate": 4.092888888888889e-05, "loss": 1.0506, "step": 3299 }, { "epoch": 26.4, "grad_norm": 27.09096336364746, "learning_rate": 4.0924444444444446e-05, "loss": 0.9767, "step": 3300 }, { "epoch": 26.408, "grad_norm": 20.668474197387695, "learning_rate": 4.092e-05, "loss": 0.9308, "step": 3301 }, { "epoch": 26.416, "grad_norm": 29.40928077697754, "learning_rate": 4.0915555555555556e-05, "loss": 1.5607, "step": 3302 }, { "epoch": 26.424, "grad_norm": 25.24091148376465, "learning_rate": 4.091111111111111e-05, "loss": 0.7835, "step": 3303 }, { "epoch": 26.432, "grad_norm": 21.834821701049805, "learning_rate": 4.090666666666667e-05, "loss": 1.3326, "step": 3304 }, { "epoch": 26.44, "grad_norm": 40.70970153808594, "learning_rate": 4.090222222222223e-05, "loss": 1.0441, "step": 3305 }, { "epoch": 26.448, "grad_norm": 28.879535675048828, "learning_rate": 4.089777777777778e-05, "loss": 0.853, "step": 3306 }, { "epoch": 26.456, "grad_norm": 233.7320556640625, "learning_rate": 4.089333333333333e-05, "loss": 1.0921, "step": 3307 }, { "epoch": 26.464, "grad_norm": 28.390907287597656, "learning_rate": 4.088888888888889e-05, "loss": 1.1253, "step": 3308 }, { "epoch": 26.472, "grad_norm": 33.936012268066406, "learning_rate": 4.088444444444445e-05, "loss": 1.3085, "step": 3309 }, { "epoch": 26.48, "grad_norm": 21.231279373168945, "learning_rate": 4.088e-05, "loss": 1.3066, "step": 3310 }, { "epoch": 26.488, "grad_norm": 24.310617446899414, "learning_rate": 4.0875555555555556e-05, "loss": 1.7739, "step": 3311 }, { "epoch": 26.496, "grad_norm": 32.12537384033203, "learning_rate": 4.087111111111112e-05, "loss": 1.5306, "step": 3312 }, { "epoch": 26.504, "grad_norm": 23.37247085571289, "learning_rate": 4.086666666666667e-05, "loss": 1.0243, "step": 3313 }, { "epoch": 26.512, "grad_norm": 15.906960487365723, "learning_rate": 4.086222222222222e-05, "loss": 0.8575, "step": 3314 }, { "epoch": 26.52, "grad_norm": 20.5546875, "learning_rate": 4.0857777777777776e-05, "loss": 0.8797, "step": 3315 }, { "epoch": 26.528, "grad_norm": 25.896669387817383, "learning_rate": 4.085333333333334e-05, "loss": 1.3727, "step": 3316 }, { "epoch": 26.536, "grad_norm": 87.23601531982422, "learning_rate": 4.084888888888889e-05, "loss": 1.2235, "step": 3317 }, { "epoch": 26.544, "grad_norm": 27.828134536743164, "learning_rate": 4.084444444444445e-05, "loss": 1.4265, "step": 3318 }, { "epoch": 26.552, "grad_norm": 56.1827392578125, "learning_rate": 4.084e-05, "loss": 1.364, "step": 3319 }, { "epoch": 26.56, "grad_norm": 65.57494354248047, "learning_rate": 4.083555555555556e-05, "loss": 1.0711, "step": 3320 }, { "epoch": 26.568, "grad_norm": 47.36205291748047, "learning_rate": 4.083111111111111e-05, "loss": 1.3013, "step": 3321 }, { "epoch": 26.576, "grad_norm": 23.222000122070312, "learning_rate": 4.0826666666666667e-05, "loss": 1.0508, "step": 3322 }, { "epoch": 26.584, "grad_norm": 15.508691787719727, "learning_rate": 4.082222222222222e-05, "loss": 0.9272, "step": 3323 }, { "epoch": 26.592, "grad_norm": 20.94869041442871, "learning_rate": 4.0817777777777776e-05, "loss": 1.7074, "step": 3324 }, { "epoch": 26.6, "grad_norm": 25.968528747558594, "learning_rate": 4.081333333333334e-05, "loss": 1.1926, "step": 3325 }, { "epoch": 26.608, "grad_norm": 35.87032699584961, "learning_rate": 4.080888888888889e-05, "loss": 1.5928, "step": 3326 }, { "epoch": 26.616, "grad_norm": 37.427242279052734, "learning_rate": 4.080444444444445e-05, "loss": 1.2197, "step": 3327 }, { "epoch": 26.624, "grad_norm": 21.703399658203125, "learning_rate": 4.08e-05, "loss": 0.764, "step": 3328 }, { "epoch": 26.632, "grad_norm": 23.74730110168457, "learning_rate": 4.079555555555556e-05, "loss": 1.5858, "step": 3329 }, { "epoch": 26.64, "grad_norm": 14.393590927124023, "learning_rate": 4.079111111111111e-05, "loss": 1.345, "step": 3330 }, { "epoch": 26.648, "grad_norm": 22.402118682861328, "learning_rate": 4.078666666666667e-05, "loss": 1.0501, "step": 3331 }, { "epoch": 26.656, "grad_norm": 21.155921936035156, "learning_rate": 4.078222222222222e-05, "loss": 1.4686, "step": 3332 }, { "epoch": 26.664, "grad_norm": 35.7061882019043, "learning_rate": 4.0777777777777783e-05, "loss": 1.2995, "step": 3333 }, { "epoch": 26.672, "grad_norm": 52.29239273071289, "learning_rate": 4.077333333333334e-05, "loss": 0.9222, "step": 3334 }, { "epoch": 26.68, "grad_norm": 20.150476455688477, "learning_rate": 4.076888888888889e-05, "loss": 1.3446, "step": 3335 }, { "epoch": 26.688, "grad_norm": 32.0345344543457, "learning_rate": 4.076444444444444e-05, "loss": 1.5713, "step": 3336 }, { "epoch": 26.696, "grad_norm": 51.12588882446289, "learning_rate": 4.076e-05, "loss": 1.136, "step": 3337 }, { "epoch": 26.704, "grad_norm": 16.243789672851562, "learning_rate": 4.075555555555556e-05, "loss": 1.2199, "step": 3338 }, { "epoch": 26.712, "grad_norm": 18.081295013427734, "learning_rate": 4.075111111111111e-05, "loss": 1.0081, "step": 3339 }, { "epoch": 26.72, "grad_norm": 49.4538688659668, "learning_rate": 4.074666666666667e-05, "loss": 1.1559, "step": 3340 }, { "epoch": 26.728, "grad_norm": 990.0347900390625, "learning_rate": 4.074222222222223e-05, "loss": 1.1228, "step": 3341 }, { "epoch": 26.736, "grad_norm": 16.92673110961914, "learning_rate": 4.073777777777778e-05, "loss": 1.1277, "step": 3342 }, { "epoch": 26.744, "grad_norm": 38.83031463623047, "learning_rate": 4.073333333333333e-05, "loss": 0.8926, "step": 3343 }, { "epoch": 26.752, "grad_norm": 16.650569915771484, "learning_rate": 4.072888888888889e-05, "loss": 1.2371, "step": 3344 }, { "epoch": 26.76, "grad_norm": 36.00374984741211, "learning_rate": 4.072444444444445e-05, "loss": 0.9482, "step": 3345 }, { "epoch": 26.768, "grad_norm": 44.29996109008789, "learning_rate": 4.072e-05, "loss": 0.747, "step": 3346 }, { "epoch": 26.776, "grad_norm": 30.6263484954834, "learning_rate": 4.071555555555556e-05, "loss": 1.4018, "step": 3347 }, { "epoch": 26.784, "grad_norm": 24.951616287231445, "learning_rate": 4.071111111111111e-05, "loss": 1.3756, "step": 3348 }, { "epoch": 26.792, "grad_norm": 21.559635162353516, "learning_rate": 4.070666666666667e-05, "loss": 1.2148, "step": 3349 }, { "epoch": 26.8, "grad_norm": 25.450000762939453, "learning_rate": 4.070222222222222e-05, "loss": 1.0473, "step": 3350 }, { "epoch": 26.808, "grad_norm": 21.45862579345703, "learning_rate": 4.069777777777778e-05, "loss": 1.3935, "step": 3351 }, { "epoch": 26.816, "grad_norm": 59.80723190307617, "learning_rate": 4.069333333333333e-05, "loss": 1.1579, "step": 3352 }, { "epoch": 26.824, "grad_norm": 43.8259162902832, "learning_rate": 4.0688888888888894e-05, "loss": 1.1484, "step": 3353 }, { "epoch": 26.832, "grad_norm": 47.90812683105469, "learning_rate": 4.068444444444445e-05, "loss": 1.081, "step": 3354 }, { "epoch": 26.84, "grad_norm": 21.24610710144043, "learning_rate": 4.0680000000000004e-05, "loss": 1.6204, "step": 3355 }, { "epoch": 26.848, "grad_norm": 29.283964157104492, "learning_rate": 4.067555555555556e-05, "loss": 1.8694, "step": 3356 }, { "epoch": 26.856, "grad_norm": 50.60352325439453, "learning_rate": 4.067111111111111e-05, "loss": 0.9902, "step": 3357 }, { "epoch": 26.864, "grad_norm": 25.535985946655273, "learning_rate": 4.066666666666667e-05, "loss": 1.6303, "step": 3358 }, { "epoch": 26.872, "grad_norm": 34.43397521972656, "learning_rate": 4.066222222222222e-05, "loss": 1.3715, "step": 3359 }, { "epoch": 26.88, "grad_norm": 34.591556549072266, "learning_rate": 4.065777777777778e-05, "loss": 0.8589, "step": 3360 }, { "epoch": 26.888, "grad_norm": 23.746925354003906, "learning_rate": 4.065333333333334e-05, "loss": 1.3222, "step": 3361 }, { "epoch": 26.896, "grad_norm": 24.47118377685547, "learning_rate": 4.0648888888888894e-05, "loss": 1.475, "step": 3362 }, { "epoch": 26.904, "grad_norm": 19.680994033813477, "learning_rate": 4.064444444444445e-05, "loss": 1.2984, "step": 3363 }, { "epoch": 26.912, "grad_norm": 18.529808044433594, "learning_rate": 4.064e-05, "loss": 1.2827, "step": 3364 }, { "epoch": 26.92, "grad_norm": 28.8149356842041, "learning_rate": 4.063555555555556e-05, "loss": 0.7746, "step": 3365 }, { "epoch": 26.928, "grad_norm": 56.208892822265625, "learning_rate": 4.0631111111111114e-05, "loss": 0.6608, "step": 3366 }, { "epoch": 26.936, "grad_norm": 37.552513122558594, "learning_rate": 4.062666666666667e-05, "loss": 1.1513, "step": 3367 }, { "epoch": 26.944, "grad_norm": 25.062366485595703, "learning_rate": 4.062222222222222e-05, "loss": 0.9251, "step": 3368 }, { "epoch": 26.951999999999998, "grad_norm": 42.85176086425781, "learning_rate": 4.0617777777777785e-05, "loss": 1.3158, "step": 3369 }, { "epoch": 26.96, "grad_norm": 28.13178825378418, "learning_rate": 4.061333333333334e-05, "loss": 1.2721, "step": 3370 }, { "epoch": 26.968, "grad_norm": 167.16819763183594, "learning_rate": 4.060888888888889e-05, "loss": 1.1974, "step": 3371 }, { "epoch": 26.976, "grad_norm": 26.927783966064453, "learning_rate": 4.060444444444444e-05, "loss": 1.166, "step": 3372 }, { "epoch": 26.984, "grad_norm": 34.37556457519531, "learning_rate": 4.0600000000000004e-05, "loss": 1.1455, "step": 3373 }, { "epoch": 26.992, "grad_norm": 16.033855438232422, "learning_rate": 4.059555555555556e-05, "loss": 0.9768, "step": 3374 }, { "epoch": 27.0, "grad_norm": 24.41372299194336, "learning_rate": 4.0591111111111114e-05, "loss": 2.2222, "step": 3375 }, { "epoch": 27.0, "eval_loss": 1.1777212619781494, "eval_map": 0.3361, "eval_map_50": 0.663, "eval_map_75": 0.2977, "eval_map_Coverall": 0.5439, "eval_map_Face_Shield": 0.3084, "eval_map_Gloves": 0.3377, "eval_map_Goggles": 0.1651, "eval_map_Mask": 0.3253, "eval_map_large": 0.39, "eval_map_medium": 0.2027, "eval_map_small": -1.0, "eval_mar_1": 0.2917, "eval_mar_10": 0.5095, "eval_mar_100": 0.5338, "eval_mar_100_Coverall": 0.7044, "eval_mar_100_Face_Shield": 0.6059, "eval_mar_100_Gloves": 0.4689, "eval_mar_100_Goggles": 0.4531, "eval_mar_100_Mask": 0.4365, "eval_mar_large": 0.6438, "eval_mar_medium": 0.2868, "eval_mar_small": -1.0, "eval_runtime": 3.8601, "eval_samples_per_second": 7.513, "eval_steps_per_second": 0.518, "step": 3375 }, { "epoch": 27.008, "grad_norm": 26.427831649780273, "learning_rate": 4.058666666666667e-05, "loss": 1.3902, "step": 3376 }, { "epoch": 27.016, "grad_norm": 29.336477279663086, "learning_rate": 4.0582222222222224e-05, "loss": 0.817, "step": 3377 }, { "epoch": 27.024, "grad_norm": 109.09412384033203, "learning_rate": 4.057777777777778e-05, "loss": 1.0589, "step": 3378 }, { "epoch": 27.032, "grad_norm": 39.298038482666016, "learning_rate": 4.057333333333333e-05, "loss": 0.8922, "step": 3379 }, { "epoch": 27.04, "grad_norm": 96.7221450805664, "learning_rate": 4.056888888888889e-05, "loss": 1.0757, "step": 3380 }, { "epoch": 27.048, "grad_norm": 27.43782615661621, "learning_rate": 4.056444444444444e-05, "loss": 0.8466, "step": 3381 }, { "epoch": 27.056, "grad_norm": 32.80760955810547, "learning_rate": 4.0560000000000005e-05, "loss": 1.2629, "step": 3382 }, { "epoch": 27.064, "grad_norm": 23.86261558532715, "learning_rate": 4.055555555555556e-05, "loss": 0.8471, "step": 3383 }, { "epoch": 27.072, "grad_norm": 53.20918655395508, "learning_rate": 4.0551111111111114e-05, "loss": 1.1683, "step": 3384 }, { "epoch": 27.08, "grad_norm": 27.833362579345703, "learning_rate": 4.054666666666667e-05, "loss": 1.2377, "step": 3385 }, { "epoch": 27.088, "grad_norm": 40.7601432800293, "learning_rate": 4.0542222222222224e-05, "loss": 1.3194, "step": 3386 }, { "epoch": 27.096, "grad_norm": 27.911256790161133, "learning_rate": 4.053777777777778e-05, "loss": 1.0482, "step": 3387 }, { "epoch": 27.104, "grad_norm": 85.6786880493164, "learning_rate": 4.0533333333333334e-05, "loss": 1.0705, "step": 3388 }, { "epoch": 27.112, "grad_norm": 37.7642822265625, "learning_rate": 4.052888888888889e-05, "loss": 0.9121, "step": 3389 }, { "epoch": 27.12, "grad_norm": 33.19715881347656, "learning_rate": 4.052444444444445e-05, "loss": 0.9571, "step": 3390 }, { "epoch": 27.128, "grad_norm": 21.986757278442383, "learning_rate": 4.0520000000000005e-05, "loss": 1.1527, "step": 3391 }, { "epoch": 27.136, "grad_norm": 53.19879913330078, "learning_rate": 4.051555555555556e-05, "loss": 1.4067, "step": 3392 }, { "epoch": 27.144, "grad_norm": 24.430681228637695, "learning_rate": 4.051111111111111e-05, "loss": 0.9785, "step": 3393 }, { "epoch": 27.152, "grad_norm": 34.71595764160156, "learning_rate": 4.050666666666667e-05, "loss": 1.4342, "step": 3394 }, { "epoch": 27.16, "grad_norm": 29.044492721557617, "learning_rate": 4.0502222222222224e-05, "loss": 1.2053, "step": 3395 }, { "epoch": 27.168, "grad_norm": 25.040678024291992, "learning_rate": 4.049777777777778e-05, "loss": 1.2604, "step": 3396 }, { "epoch": 27.176, "grad_norm": 20.77334976196289, "learning_rate": 4.0493333333333334e-05, "loss": 0.829, "step": 3397 }, { "epoch": 27.184, "grad_norm": 34.80867385864258, "learning_rate": 4.0488888888888896e-05, "loss": 1.1526, "step": 3398 }, { "epoch": 27.192, "grad_norm": 275.37420654296875, "learning_rate": 4.0484444444444444e-05, "loss": 1.4408, "step": 3399 }, { "epoch": 27.2, "grad_norm": 23.993419647216797, "learning_rate": 4.048e-05, "loss": 2.9337, "step": 3400 }, { "epoch": 27.208, "grad_norm": 46.83903121948242, "learning_rate": 4.0475555555555554e-05, "loss": 1.1988, "step": 3401 }, { "epoch": 27.216, "grad_norm": 126.29737854003906, "learning_rate": 4.0471111111111115e-05, "loss": 1.4007, "step": 3402 }, { "epoch": 27.224, "grad_norm": 39.75178146362305, "learning_rate": 4.046666666666667e-05, "loss": 1.2207, "step": 3403 }, { "epoch": 27.232, "grad_norm": 31.600807189941406, "learning_rate": 4.0462222222222225e-05, "loss": 1.3144, "step": 3404 }, { "epoch": 27.24, "grad_norm": 29.118896484375, "learning_rate": 4.045777777777778e-05, "loss": 1.3808, "step": 3405 }, { "epoch": 27.248, "grad_norm": 46.120880126953125, "learning_rate": 4.0453333333333335e-05, "loss": 1.3709, "step": 3406 }, { "epoch": 27.256, "grad_norm": 41.785377502441406, "learning_rate": 4.044888888888889e-05, "loss": 1.0698, "step": 3407 }, { "epoch": 27.264, "grad_norm": 44.70484161376953, "learning_rate": 4.0444444444444444e-05, "loss": 1.0197, "step": 3408 }, { "epoch": 27.272, "grad_norm": 17.54303741455078, "learning_rate": 4.044e-05, "loss": 1.0821, "step": 3409 }, { "epoch": 27.28, "grad_norm": 19.910608291625977, "learning_rate": 4.043555555555556e-05, "loss": 1.3766, "step": 3410 }, { "epoch": 27.288, "grad_norm": 17.87439727783203, "learning_rate": 4.0431111111111116e-05, "loss": 0.8256, "step": 3411 }, { "epoch": 27.296, "grad_norm": 47.5792236328125, "learning_rate": 4.042666666666667e-05, "loss": 2.3947, "step": 3412 }, { "epoch": 27.304, "grad_norm": 31.41539192199707, "learning_rate": 4.0422222222222225e-05, "loss": 0.8066, "step": 3413 }, { "epoch": 27.312, "grad_norm": 27.366191864013672, "learning_rate": 4.041777777777778e-05, "loss": 1.1528, "step": 3414 }, { "epoch": 27.32, "grad_norm": 57.31110763549805, "learning_rate": 4.0413333333333335e-05, "loss": 1.3495, "step": 3415 }, { "epoch": 27.328, "grad_norm": 19.67345428466797, "learning_rate": 4.040888888888889e-05, "loss": 1.1092, "step": 3416 }, { "epoch": 27.336, "grad_norm": 122.37451171875, "learning_rate": 4.0404444444444445e-05, "loss": 1.4266, "step": 3417 }, { "epoch": 27.344, "grad_norm": 45.76695251464844, "learning_rate": 4.0400000000000006e-05, "loss": 0.8759, "step": 3418 }, { "epoch": 27.352, "grad_norm": 22.359182357788086, "learning_rate": 4.039555555555556e-05, "loss": 1.2925, "step": 3419 }, { "epoch": 27.36, "grad_norm": 39.113616943359375, "learning_rate": 4.0391111111111116e-05, "loss": 1.5898, "step": 3420 }, { "epoch": 27.368, "grad_norm": 41.80508804321289, "learning_rate": 4.0386666666666664e-05, "loss": 1.0357, "step": 3421 }, { "epoch": 27.376, "grad_norm": 24.609132766723633, "learning_rate": 4.0382222222222226e-05, "loss": 1.3317, "step": 3422 }, { "epoch": 27.384, "grad_norm": 46.1746711730957, "learning_rate": 4.037777777777778e-05, "loss": 1.3527, "step": 3423 }, { "epoch": 27.392, "grad_norm": 12.184392929077148, "learning_rate": 4.0373333333333335e-05, "loss": 1.1657, "step": 3424 }, { "epoch": 27.4, "grad_norm": 24.21722412109375, "learning_rate": 4.036888888888889e-05, "loss": 1.4629, "step": 3425 }, { "epoch": 27.408, "grad_norm": 60.47889709472656, "learning_rate": 4.0364444444444445e-05, "loss": 1.2141, "step": 3426 }, { "epoch": 27.416, "grad_norm": 22.81849479675293, "learning_rate": 4.0360000000000007e-05, "loss": 1.574, "step": 3427 }, { "epoch": 27.424, "grad_norm": 23.84885597229004, "learning_rate": 4.0355555555555555e-05, "loss": 0.8794, "step": 3428 }, { "epoch": 27.432, "grad_norm": 32.51510238647461, "learning_rate": 4.035111111111111e-05, "loss": 1.6206, "step": 3429 }, { "epoch": 27.44, "grad_norm": 18.657154083251953, "learning_rate": 4.0346666666666664e-05, "loss": 2.3767, "step": 3430 }, { "epoch": 27.448, "grad_norm": 27.75027084350586, "learning_rate": 4.0342222222222226e-05, "loss": 0.969, "step": 3431 }, { "epoch": 27.456, "grad_norm": 19.285804748535156, "learning_rate": 4.033777777777778e-05, "loss": 1.3307, "step": 3432 }, { "epoch": 27.464, "grad_norm": 17.996715545654297, "learning_rate": 4.0333333333333336e-05, "loss": 1.3187, "step": 3433 }, { "epoch": 27.472, "grad_norm": 20.564496994018555, "learning_rate": 4.032888888888889e-05, "loss": 0.7568, "step": 3434 }, { "epoch": 27.48, "grad_norm": 34.8608283996582, "learning_rate": 4.0324444444444445e-05, "loss": 1.1821, "step": 3435 }, { "epoch": 27.488, "grad_norm": 21.070627212524414, "learning_rate": 4.032e-05, "loss": 0.9755, "step": 3436 }, { "epoch": 27.496, "grad_norm": 41.33171463012695, "learning_rate": 4.0315555555555555e-05, "loss": 1.1301, "step": 3437 }, { "epoch": 27.504, "grad_norm": 36.00835418701172, "learning_rate": 4.031111111111111e-05, "loss": 1.3771, "step": 3438 }, { "epoch": 27.512, "grad_norm": 16.765888214111328, "learning_rate": 4.030666666666667e-05, "loss": 1.0843, "step": 3439 }, { "epoch": 27.52, "grad_norm": 56.634761810302734, "learning_rate": 4.0302222222222226e-05, "loss": 0.9101, "step": 3440 }, { "epoch": 27.528, "grad_norm": 47.962406158447266, "learning_rate": 4.029777777777778e-05, "loss": 1.1578, "step": 3441 }, { "epoch": 27.536, "grad_norm": 36.99934387207031, "learning_rate": 4.0293333333333336e-05, "loss": 1.0244, "step": 3442 }, { "epoch": 27.544, "grad_norm": 18.45305633544922, "learning_rate": 4.028888888888889e-05, "loss": 1.3269, "step": 3443 }, { "epoch": 27.552, "grad_norm": 17.62411880493164, "learning_rate": 4.0284444444444446e-05, "loss": 1.2605, "step": 3444 }, { "epoch": 27.56, "grad_norm": 36.528690338134766, "learning_rate": 4.028e-05, "loss": 1.1238, "step": 3445 }, { "epoch": 27.568, "grad_norm": 14.428620338439941, "learning_rate": 4.0275555555555555e-05, "loss": 1.2702, "step": 3446 }, { "epoch": 27.576, "grad_norm": 44.44088363647461, "learning_rate": 4.027111111111112e-05, "loss": 1.0243, "step": 3447 }, { "epoch": 27.584, "grad_norm": 21.24051856994629, "learning_rate": 4.026666666666667e-05, "loss": 1.358, "step": 3448 }, { "epoch": 27.592, "grad_norm": 28.96320152282715, "learning_rate": 4.026222222222223e-05, "loss": 0.7926, "step": 3449 }, { "epoch": 27.6, "grad_norm": 48.23795700073242, "learning_rate": 4.0257777777777775e-05, "loss": 1.3731, "step": 3450 }, { "epoch": 27.608, "grad_norm": 25.2567138671875, "learning_rate": 4.0253333333333336e-05, "loss": 0.7846, "step": 3451 }, { "epoch": 27.616, "grad_norm": 23.550905227661133, "learning_rate": 4.024888888888889e-05, "loss": 1.1965, "step": 3452 }, { "epoch": 27.624, "grad_norm": 17.634857177734375, "learning_rate": 4.0244444444444446e-05, "loss": 1.119, "step": 3453 }, { "epoch": 27.632, "grad_norm": 20.397768020629883, "learning_rate": 4.024e-05, "loss": 1.0715, "step": 3454 }, { "epoch": 27.64, "grad_norm": 18.953325271606445, "learning_rate": 4.023555555555556e-05, "loss": 1.0504, "step": 3455 }, { "epoch": 27.648, "grad_norm": 22.39470672607422, "learning_rate": 4.023111111111111e-05, "loss": 1.0756, "step": 3456 }, { "epoch": 27.656, "grad_norm": 20.73375129699707, "learning_rate": 4.0226666666666666e-05, "loss": 1.7464, "step": 3457 }, { "epoch": 27.664, "grad_norm": 459.8608703613281, "learning_rate": 4.022222222222222e-05, "loss": 1.3223, "step": 3458 }, { "epoch": 27.672, "grad_norm": 21.226491928100586, "learning_rate": 4.021777777777778e-05, "loss": 0.8396, "step": 3459 }, { "epoch": 27.68, "grad_norm": 31.173011779785156, "learning_rate": 4.021333333333334e-05, "loss": 1.2407, "step": 3460 }, { "epoch": 27.688, "grad_norm": 25.531139373779297, "learning_rate": 4.020888888888889e-05, "loss": 1.1013, "step": 3461 }, { "epoch": 27.696, "grad_norm": 32.233909606933594, "learning_rate": 4.0204444444444447e-05, "loss": 0.7891, "step": 3462 }, { "epoch": 27.704, "grad_norm": 30.200538635253906, "learning_rate": 4.02e-05, "loss": 1.174, "step": 3463 }, { "epoch": 27.712, "grad_norm": 23.30518341064453, "learning_rate": 4.0195555555555556e-05, "loss": 1.25, "step": 3464 }, { "epoch": 27.72, "grad_norm": 50.35039520263672, "learning_rate": 4.019111111111111e-05, "loss": 1.1101, "step": 3465 }, { "epoch": 27.728, "grad_norm": 30.374671936035156, "learning_rate": 4.0186666666666666e-05, "loss": 1.0867, "step": 3466 }, { "epoch": 27.736, "grad_norm": 28.35224723815918, "learning_rate": 4.018222222222223e-05, "loss": 1.3138, "step": 3467 }, { "epoch": 27.744, "grad_norm": 81.17507934570312, "learning_rate": 4.017777777777778e-05, "loss": 1.3403, "step": 3468 }, { "epoch": 27.752, "grad_norm": 16.28787612915039, "learning_rate": 4.017333333333334e-05, "loss": 1.0459, "step": 3469 }, { "epoch": 27.76, "grad_norm": 24.689117431640625, "learning_rate": 4.016888888888889e-05, "loss": 1.3761, "step": 3470 }, { "epoch": 27.768, "grad_norm": 65.72600555419922, "learning_rate": 4.016444444444445e-05, "loss": 1.1326, "step": 3471 }, { "epoch": 27.776, "grad_norm": 50.750823974609375, "learning_rate": 4.016e-05, "loss": 1.1911, "step": 3472 }, { "epoch": 27.784, "grad_norm": 30.5479793548584, "learning_rate": 4.0155555555555557e-05, "loss": 0.891, "step": 3473 }, { "epoch": 27.792, "grad_norm": 40.25010299682617, "learning_rate": 4.015111111111111e-05, "loss": 0.909, "step": 3474 }, { "epoch": 27.8, "grad_norm": 22.892845153808594, "learning_rate": 4.014666666666667e-05, "loss": 1.6136, "step": 3475 }, { "epoch": 27.808, "grad_norm": 20.803936004638672, "learning_rate": 4.014222222222223e-05, "loss": 0.9611, "step": 3476 }, { "epoch": 27.816, "grad_norm": 21.410367965698242, "learning_rate": 4.013777777777778e-05, "loss": 1.5617, "step": 3477 }, { "epoch": 27.824, "grad_norm": 25.930416107177734, "learning_rate": 4.013333333333333e-05, "loss": 1.269, "step": 3478 }, { "epoch": 27.832, "grad_norm": 46.62156295776367, "learning_rate": 4.0128888888888886e-05, "loss": 2.4462, "step": 3479 }, { "epoch": 27.84, "grad_norm": 25.678939819335938, "learning_rate": 4.012444444444445e-05, "loss": 1.2792, "step": 3480 }, { "epoch": 27.848, "grad_norm": 37.10389709472656, "learning_rate": 4.012e-05, "loss": 1.1147, "step": 3481 }, { "epoch": 27.856, "grad_norm": 36.185367584228516, "learning_rate": 4.011555555555556e-05, "loss": 1.359, "step": 3482 }, { "epoch": 27.864, "grad_norm": 25.410762786865234, "learning_rate": 4.011111111111111e-05, "loss": 0.9831, "step": 3483 }, { "epoch": 27.872, "grad_norm": 20.210289001464844, "learning_rate": 4.0106666666666673e-05, "loss": 1.4747, "step": 3484 }, { "epoch": 27.88, "grad_norm": 31.87897300720215, "learning_rate": 4.010222222222222e-05, "loss": 0.9059, "step": 3485 }, { "epoch": 27.888, "grad_norm": 20.053152084350586, "learning_rate": 4.0097777777777776e-05, "loss": 1.3471, "step": 3486 }, { "epoch": 27.896, "grad_norm": 25.603931427001953, "learning_rate": 4.009333333333333e-05, "loss": 0.9278, "step": 3487 }, { "epoch": 27.904, "grad_norm": 35.31588363647461, "learning_rate": 4.008888888888889e-05, "loss": 1.9053, "step": 3488 }, { "epoch": 27.912, "grad_norm": 27.036956787109375, "learning_rate": 4.008444444444445e-05, "loss": 1.164, "step": 3489 }, { "epoch": 27.92, "grad_norm": 24.685880661010742, "learning_rate": 4.008e-05, "loss": 1.0098, "step": 3490 }, { "epoch": 27.928, "grad_norm": 58.79537582397461, "learning_rate": 4.007555555555556e-05, "loss": 1.0344, "step": 3491 }, { "epoch": 27.936, "grad_norm": 35.82758331298828, "learning_rate": 4.007111111111111e-05, "loss": 1.2959, "step": 3492 }, { "epoch": 27.944, "grad_norm": 56.36476135253906, "learning_rate": 4.006666666666667e-05, "loss": 1.3152, "step": 3493 }, { "epoch": 27.951999999999998, "grad_norm": 21.987613677978516, "learning_rate": 4.006222222222222e-05, "loss": 1.4514, "step": 3494 }, { "epoch": 27.96, "grad_norm": 40.122501373291016, "learning_rate": 4.005777777777778e-05, "loss": 1.3492, "step": 3495 }, { "epoch": 27.968, "grad_norm": 38.79408264160156, "learning_rate": 4.005333333333334e-05, "loss": 1.7397, "step": 3496 }, { "epoch": 27.976, "grad_norm": 51.69567108154297, "learning_rate": 4.004888888888889e-05, "loss": 1.2413, "step": 3497 }, { "epoch": 27.984, "grad_norm": 24.349634170532227, "learning_rate": 4.004444444444445e-05, "loss": 1.3482, "step": 3498 }, { "epoch": 27.992, "grad_norm": 21.115591049194336, "learning_rate": 4.004e-05, "loss": 0.9049, "step": 3499 }, { "epoch": 28.0, "grad_norm": 35.22541046142578, "learning_rate": 4.003555555555556e-05, "loss": 1.2219, "step": 3500 }, { "epoch": 28.0, "eval_loss": 1.1211919784545898, "eval_map": 0.3566, "eval_map_50": 0.7058, "eval_map_75": 0.3194, "eval_map_Coverall": 0.596, "eval_map_Face_Shield": 0.375, "eval_map_Gloves": 0.3137, "eval_map_Goggles": 0.133, "eval_map_Mask": 0.3652, "eval_map_large": 0.4073, "eval_map_medium": 0.2108, "eval_map_small": -1.0, "eval_mar_1": 0.3081, "eval_mar_10": 0.5415, "eval_mar_100": 0.5725, "eval_mar_100_Coverall": 0.7333, "eval_mar_100_Face_Shield": 0.7, "eval_mar_100_Gloves": 0.477, "eval_mar_100_Goggles": 0.4656, "eval_mar_100_Mask": 0.4865, "eval_mar_large": 0.6539, "eval_mar_medium": 0.3701, "eval_mar_small": -1.0, "eval_runtime": 4.0046, "eval_samples_per_second": 7.242, "eval_steps_per_second": 0.499, "step": 3500 } ], "logging_steps": 1, "max_steps": 12500, "num_input_tokens_seen": 0, "num_train_epochs": 100, "save_steps": 500, "stateful_callbacks": { "EarlyStoppingCallback": { "args": { "early_stopping_patience": 5, "early_stopping_threshold": 0.01 }, "attributes": { "early_stopping_patience_counter": 0 } }, "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": false }, "attributes": {} } }, "total_flos": 3.714487173314636e+19, "train_batch_size": 8, "trial_name": null, "trial_params": null }