{ "best_metric": 1.3475022315979004, "best_model_checkpoint": "detr-resnet-50-cppe5/checkpoint-2625", "epoch": 21.0, "eval_steps": 500, "global_step": 2625, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.008, "grad_norm": NaN, "learning_rate": 0.0, "loss": 3.6274, "step": 1 }, { "epoch": 0.016, "grad_norm": NaN, "learning_rate": 0.0, "loss": 4.0018, "step": 2 }, { "epoch": 0.024, "grad_norm": NaN, "learning_rate": 0.0, "loss": 4.1917, "step": 3 }, { "epoch": 0.032, "grad_norm": NaN, "learning_rate": 0.0, "loss": 3.4708, "step": 4 }, { "epoch": 0.04, "grad_norm": 220.92239379882812, "learning_rate": 4e-08, "loss": 4.1588, "step": 5 }, { "epoch": 0.048, "grad_norm": 164.0594482421875, "learning_rate": 8e-08, "loss": 3.4397, "step": 6 }, { "epoch": 0.056, "grad_norm": 54.94597244262695, "learning_rate": 1.2e-07, "loss": 3.2265, "step": 7 }, { "epoch": 0.064, "grad_norm": 108.73159790039062, "learning_rate": 1.6e-07, "loss": 3.5383, "step": 8 }, { "epoch": 0.072, "grad_norm": 182.83322143554688, "learning_rate": 2.0000000000000002e-07, "loss": 3.8552, "step": 9 }, { "epoch": 0.08, "grad_norm": 112.1514663696289, "learning_rate": 2.4e-07, "loss": 3.7212, "step": 10 }, { "epoch": 0.088, "grad_norm": 66.69995880126953, "learning_rate": 2.8e-07, "loss": 4.6501, "step": 11 }, { "epoch": 0.096, "grad_norm": 92.5389175415039, "learning_rate": 3.2e-07, "loss": 3.8064, "step": 12 }, { "epoch": 0.104, "grad_norm": 216.5779266357422, "learning_rate": 3.6e-07, "loss": 3.9677, "step": 13 }, { "epoch": 0.112, "grad_norm": 442.5048522949219, "learning_rate": 4.0000000000000003e-07, "loss": 3.9874, "step": 14 }, { "epoch": 0.12, "grad_norm": 90.24468994140625, "learning_rate": 4.4e-07, "loss": 4.533, "step": 15 }, { "epoch": 0.128, "grad_norm": 81.57317352294922, "learning_rate": 4.8e-07, "loss": 4.0144, "step": 16 }, { "epoch": 0.136, "grad_norm": 82.48701477050781, "learning_rate": 5.2e-07, "loss": 3.8022, "step": 17 }, { "epoch": 0.144, "grad_norm": 130.919677734375, "learning_rate": 5.6e-07, "loss": 4.2463, "step": 18 }, { "epoch": 0.152, "grad_norm": 136.47686767578125, "learning_rate": 6.000000000000001e-07, "loss": 4.4431, "step": 19 }, { "epoch": 0.16, "grad_norm": 360.122314453125, "learning_rate": 6.4e-07, "loss": 3.9122, "step": 20 }, { "epoch": 0.168, "grad_norm": 89.8421630859375, "learning_rate": 6.8e-07, "loss": 3.8939, "step": 21 }, { "epoch": 0.176, "grad_norm": 85.58394622802734, "learning_rate": 7.2e-07, "loss": 3.9268, "step": 22 }, { "epoch": 0.184, "grad_norm": 312.52630615234375, "learning_rate": 7.6e-07, "loss": 4.0465, "step": 23 }, { "epoch": 0.192, "grad_norm": NaN, "learning_rate": 7.6e-07, "loss": 3.9139, "step": 24 }, { "epoch": 0.2, "grad_norm": 178.92572021484375, "learning_rate": 8.000000000000001e-07, "loss": 3.9408, "step": 25 }, { "epoch": 0.208, "grad_norm": 147.859130859375, "learning_rate": 8.4e-07, "loss": 3.6627, "step": 26 }, { "epoch": 0.216, "grad_norm": 132.38279724121094, "learning_rate": 8.8e-07, "loss": 3.938, "step": 27 }, { "epoch": 0.224, "grad_norm": 355.9041442871094, "learning_rate": 9.2e-07, "loss": 3.368, "step": 28 }, { "epoch": 0.232, "grad_norm": 603.5505981445312, "learning_rate": 9.6e-07, "loss": 4.2603, "step": 29 }, { "epoch": 0.24, "grad_norm": 70.75674438476562, "learning_rate": 1.0000000000000002e-06, "loss": 3.8008, "step": 30 }, { "epoch": 0.248, "grad_norm": 116.27741241455078, "learning_rate": 1.04e-06, "loss": 4.2504, "step": 31 }, { "epoch": 0.256, "grad_norm": 107.80787658691406, "learning_rate": 1.08e-06, "loss": 3.5266, "step": 32 }, { "epoch": 0.264, "grad_norm": 110.53886413574219, "learning_rate": 1.12e-06, "loss": 3.3048, "step": 33 }, { "epoch": 0.272, "grad_norm": 100.43341827392578, "learning_rate": 1.16e-06, "loss": 3.4252, "step": 34 }, { "epoch": 0.28, "grad_norm": 156.83497619628906, "learning_rate": 1.2000000000000002e-06, "loss": 3.4778, "step": 35 }, { "epoch": 0.288, "grad_norm": 90.80496978759766, "learning_rate": 1.24e-06, "loss": 4.5635, "step": 36 }, { "epoch": 0.296, "grad_norm": 180.2742156982422, "learning_rate": 1.28e-06, "loss": 4.5438, "step": 37 }, { "epoch": 0.304, "grad_norm": 67.54047393798828, "learning_rate": 1.32e-06, "loss": 3.4343, "step": 38 }, { "epoch": 0.312, "grad_norm": 87.6423110961914, "learning_rate": 1.36e-06, "loss": 4.0612, "step": 39 }, { "epoch": 0.32, "grad_norm": 61.50324249267578, "learning_rate": 1.4000000000000001e-06, "loss": 3.7887, "step": 40 }, { "epoch": 0.328, "grad_norm": 54.7778205871582, "learning_rate": 1.44e-06, "loss": 3.915, "step": 41 }, { "epoch": 0.336, "grad_norm": 61.5514030456543, "learning_rate": 1.4800000000000002e-06, "loss": 3.4486, "step": 42 }, { "epoch": 0.344, "grad_norm": 157.11669921875, "learning_rate": 1.52e-06, "loss": 3.4968, "step": 43 }, { "epoch": 0.352, "grad_norm": 238.96539306640625, "learning_rate": 1.56e-06, "loss": 3.6731, "step": 44 }, { "epoch": 0.36, "grad_norm": 63.15571212768555, "learning_rate": 1.6000000000000001e-06, "loss": 4.2218, "step": 45 }, { "epoch": 0.368, "grad_norm": 317.410888671875, "learning_rate": 1.6400000000000002e-06, "loss": 3.3539, "step": 46 }, { "epoch": 0.376, "grad_norm": 166.90859985351562, "learning_rate": 1.68e-06, "loss": 3.9369, "step": 47 }, { "epoch": 0.384, "grad_norm": 57.31470489501953, "learning_rate": 1.72e-06, "loss": 3.3689, "step": 48 }, { "epoch": 0.392, "grad_norm": 125.9739761352539, "learning_rate": 1.76e-06, "loss": 3.73, "step": 49 }, { "epoch": 0.4, "grad_norm": 286.5274658203125, "learning_rate": 1.8e-06, "loss": 4.2389, "step": 50 }, { "epoch": 0.408, "grad_norm": 66.74931335449219, "learning_rate": 1.84e-06, "loss": 3.6495, "step": 51 }, { "epoch": 0.416, "grad_norm": 408.8567810058594, "learning_rate": 1.8800000000000002e-06, "loss": 3.7002, "step": 52 }, { "epoch": 0.424, "grad_norm": 336.4726257324219, "learning_rate": 1.92e-06, "loss": 3.8191, "step": 53 }, { "epoch": 0.432, "grad_norm": 71.32438659667969, "learning_rate": 1.96e-06, "loss": 3.7357, "step": 54 }, { "epoch": 0.44, "grad_norm": 54.88041687011719, "learning_rate": 2.0000000000000003e-06, "loss": 3.5531, "step": 55 }, { "epoch": 0.448, "grad_norm": 172.154296875, "learning_rate": 2.0400000000000004e-06, "loss": 3.4171, "step": 56 }, { "epoch": 0.456, "grad_norm": 230.16845703125, "learning_rate": 2.08e-06, "loss": 4.619, "step": 57 }, { "epoch": 0.464, "grad_norm": 215.04600524902344, "learning_rate": 2.12e-06, "loss": 3.6205, "step": 58 }, { "epoch": 0.472, "grad_norm": 124.32198333740234, "learning_rate": 2.16e-06, "loss": 4.0357, "step": 59 }, { "epoch": 0.48, "grad_norm": 405.5516052246094, "learning_rate": 2.2e-06, "loss": 3.9757, "step": 60 }, { "epoch": 0.488, "grad_norm": 276.7423095703125, "learning_rate": 2.24e-06, "loss": 3.3143, "step": 61 }, { "epoch": 0.496, "grad_norm": 73.7153549194336, "learning_rate": 2.28e-06, "loss": 4.0977, "step": 62 }, { "epoch": 0.504, "grad_norm": 98.33488464355469, "learning_rate": 2.32e-06, "loss": 3.2345, "step": 63 }, { "epoch": 0.512, "grad_norm": 143.81153869628906, "learning_rate": 2.36e-06, "loss": 3.7476, "step": 64 }, { "epoch": 0.52, "grad_norm": 95.06722259521484, "learning_rate": 2.4000000000000003e-06, "loss": 3.6238, "step": 65 }, { "epoch": 0.528, "grad_norm": 154.24966430664062, "learning_rate": 2.4400000000000004e-06, "loss": 3.481, "step": 66 }, { "epoch": 0.536, "grad_norm": 328.1234436035156, "learning_rate": 2.48e-06, "loss": 3.9419, "step": 67 }, { "epoch": 0.544, "grad_norm": 88.99818420410156, "learning_rate": 2.52e-06, "loss": 3.4922, "step": 68 }, { "epoch": 0.552, "grad_norm": 121.83319854736328, "learning_rate": 2.56e-06, "loss": 3.6755, "step": 69 }, { "epoch": 0.56, "grad_norm": NaN, "learning_rate": 2.56e-06, "loss": 3.8969, "step": 70 }, { "epoch": 0.568, "grad_norm": 365.6170654296875, "learning_rate": 2.6e-06, "loss": 3.4471, "step": 71 }, { "epoch": 0.576, "grad_norm": 302.1619567871094, "learning_rate": 2.64e-06, "loss": 3.6765, "step": 72 }, { "epoch": 0.584, "grad_norm": 170.5956573486328, "learning_rate": 2.68e-06, "loss": 3.8338, "step": 73 }, { "epoch": 0.592, "grad_norm": 73.1478500366211, "learning_rate": 2.72e-06, "loss": 3.9854, "step": 74 }, { "epoch": 0.6, "grad_norm": 130.3486785888672, "learning_rate": 2.7600000000000003e-06, "loss": 3.4856, "step": 75 }, { "epoch": 0.608, "grad_norm": 659.3062744140625, "learning_rate": 2.8000000000000003e-06, "loss": 3.7704, "step": 76 }, { "epoch": 0.616, "grad_norm": 347.7629699707031, "learning_rate": 2.8400000000000003e-06, "loss": 4.1684, "step": 77 }, { "epoch": 0.624, "grad_norm": 375.5359802246094, "learning_rate": 2.88e-06, "loss": 3.9815, "step": 78 }, { "epoch": 0.632, "grad_norm": 135.36082458496094, "learning_rate": 2.92e-06, "loss": 3.1896, "step": 79 }, { "epoch": 0.64, "grad_norm": 97.36896514892578, "learning_rate": 2.9600000000000005e-06, "loss": 3.2304, "step": 80 }, { "epoch": 0.648, "grad_norm": 81.86087036132812, "learning_rate": 3e-06, "loss": 3.5994, "step": 81 }, { "epoch": 0.656, "grad_norm": 326.7822265625, "learning_rate": 3.04e-06, "loss": 3.4738, "step": 82 }, { "epoch": 0.664, "grad_norm": 106.70575714111328, "learning_rate": 3.08e-06, "loss": 3.8652, "step": 83 }, { "epoch": 0.672, "grad_norm": 145.56300354003906, "learning_rate": 3.12e-06, "loss": 3.5262, "step": 84 }, { "epoch": 0.68, "grad_norm": 92.08047485351562, "learning_rate": 3.1600000000000007e-06, "loss": 3.8159, "step": 85 }, { "epoch": 0.688, "grad_norm": 174.22181701660156, "learning_rate": 3.2000000000000003e-06, "loss": 4.6275, "step": 86 }, { "epoch": 0.696, "grad_norm": 124.65367126464844, "learning_rate": 3.24e-06, "loss": 3.1053, "step": 87 }, { "epoch": 0.704, "grad_norm": 69.98231506347656, "learning_rate": 3.2800000000000004e-06, "loss": 3.8061, "step": 88 }, { "epoch": 0.712, "grad_norm": 66.97435760498047, "learning_rate": 3.3200000000000004e-06, "loss": 3.8979, "step": 89 }, { "epoch": 0.72, "grad_norm": 149.86111450195312, "learning_rate": 3.36e-06, "loss": 3.8159, "step": 90 }, { "epoch": 0.728, "grad_norm": 76.70599365234375, "learning_rate": 3.4000000000000005e-06, "loss": 3.5673, "step": 91 }, { "epoch": 0.736, "grad_norm": 123.68891906738281, "learning_rate": 3.44e-06, "loss": 3.6795, "step": 92 }, { "epoch": 0.744, "grad_norm": 67.35049438476562, "learning_rate": 3.4799999999999997e-06, "loss": 3.4946, "step": 93 }, { "epoch": 0.752, "grad_norm": 61.00783157348633, "learning_rate": 3.52e-06, "loss": 3.7769, "step": 94 }, { "epoch": 0.76, "grad_norm": 206.0460205078125, "learning_rate": 3.5600000000000002e-06, "loss": 3.6638, "step": 95 }, { "epoch": 0.768, "grad_norm": 101.91092681884766, "learning_rate": 3.6e-06, "loss": 4.0872, "step": 96 }, { "epoch": 0.776, "grad_norm": 303.120849609375, "learning_rate": 3.6400000000000003e-06, "loss": 3.4901, "step": 97 }, { "epoch": 0.784, "grad_norm": 79.52167510986328, "learning_rate": 3.68e-06, "loss": 3.1015, "step": 98 }, { "epoch": 0.792, "grad_norm": 852.6346435546875, "learning_rate": 3.72e-06, "loss": 3.8056, "step": 99 }, { "epoch": 0.8, "grad_norm": 242.23019409179688, "learning_rate": 3.7600000000000004e-06, "loss": 4.0269, "step": 100 }, { "epoch": 0.808, "grad_norm": 451.0561828613281, "learning_rate": 3.8e-06, "loss": 3.5843, "step": 101 }, { "epoch": 0.816, "grad_norm": 62.44890594482422, "learning_rate": 3.84e-06, "loss": 3.8225, "step": 102 }, { "epoch": 0.824, "grad_norm": 56.79619216918945, "learning_rate": 3.88e-06, "loss": 3.4239, "step": 103 }, { "epoch": 0.832, "grad_norm": 251.1862030029297, "learning_rate": 3.92e-06, "loss": 4.5939, "step": 104 }, { "epoch": 0.84, "grad_norm": 199.2397918701172, "learning_rate": 3.96e-06, "loss": 3.7805, "step": 105 }, { "epoch": 0.848, "grad_norm": 358.048828125, "learning_rate": 4.000000000000001e-06, "loss": 3.5649, "step": 106 }, { "epoch": 0.856, "grad_norm": 150.43661499023438, "learning_rate": 4.04e-06, "loss": 3.6094, "step": 107 }, { "epoch": 0.864, "grad_norm": 173.72750854492188, "learning_rate": 4.080000000000001e-06, "loss": 3.438, "step": 108 }, { "epoch": 0.872, "grad_norm": 187.98153686523438, "learning_rate": 4.12e-06, "loss": 3.47, "step": 109 }, { "epoch": 0.88, "grad_norm": 171.18820190429688, "learning_rate": 4.16e-06, "loss": 3.8336, "step": 110 }, { "epoch": 0.888, "grad_norm": 49.40830612182617, "learning_rate": 4.2000000000000004e-06, "loss": 3.4171, "step": 111 }, { "epoch": 0.896, "grad_norm": 174.9824676513672, "learning_rate": 4.24e-06, "loss": 2.6775, "step": 112 }, { "epoch": 0.904, "grad_norm": 178.87120056152344, "learning_rate": 4.28e-06, "loss": 3.642, "step": 113 }, { "epoch": 0.912, "grad_norm": 53.1528205871582, "learning_rate": 4.32e-06, "loss": 3.3389, "step": 114 }, { "epoch": 0.92, "grad_norm": 191.46316528320312, "learning_rate": 4.360000000000001e-06, "loss": 3.0125, "step": 115 }, { "epoch": 0.928, "grad_norm": 175.86355590820312, "learning_rate": 4.4e-06, "loss": 3.4028, "step": 116 }, { "epoch": 0.936, "grad_norm": 95.6614990234375, "learning_rate": 4.440000000000001e-06, "loss": 3.3527, "step": 117 }, { "epoch": 0.944, "grad_norm": 99.82764434814453, "learning_rate": 4.48e-06, "loss": 3.8209, "step": 118 }, { "epoch": 0.952, "grad_norm": 119.69508361816406, "learning_rate": 4.52e-06, "loss": 3.2237, "step": 119 }, { "epoch": 0.96, "grad_norm": 83.30669403076172, "learning_rate": 4.56e-06, "loss": 2.9877, "step": 120 }, { "epoch": 0.968, "grad_norm": 70.58946228027344, "learning_rate": 4.6e-06, "loss": 3.4722, "step": 121 }, { "epoch": 0.976, "grad_norm": 93.16459655761719, "learning_rate": 4.64e-06, "loss": 4.0195, "step": 122 }, { "epoch": 0.984, "grad_norm": 533.4874267578125, "learning_rate": 4.68e-06, "loss": 3.2376, "step": 123 }, { "epoch": 0.992, "grad_norm": 128.25697326660156, "learning_rate": 4.72e-06, "loss": 3.5492, "step": 124 }, { "epoch": 1.0, "grad_norm": 135.9224090576172, "learning_rate": 4.76e-06, "loss": 3.1571, "step": 125 }, { "epoch": 1.0, "eval_loss": 3.488889694213867, "eval_map": 0.0008, "eval_map_50": 0.0023, "eval_map_75": 0.0004, "eval_map_Coverall": 0.0018, "eval_map_Face_Shield": 0.0009, "eval_map_Gloves": 0.0, "eval_map_Goggles": 0.0012, "eval_map_Mask": 0.0, "eval_map_large": 0.0013, "eval_map_medium": 0.0006, "eval_map_small": 0.0, "eval_mar_1": 0.0025, "eval_mar_10": 0.0179, "eval_mar_100": 0.0764, "eval_mar_100_Coverall": 0.1022, "eval_mar_100_Face_Shield": 0.2235, "eval_mar_100_Gloves": 0.0, "eval_mar_100_Goggles": 0.0562, "eval_mar_100_Mask": 0.0, "eval_mar_large": 0.0989, "eval_mar_medium": 0.0371, "eval_mar_small": 0.0, "eval_runtime": 2.3314, "eval_samples_per_second": 12.439, "eval_steps_per_second": 0.858, "step": 125 }, { "epoch": 1.008, "grad_norm": 50.99522018432617, "learning_rate": 4.800000000000001e-06, "loss": 3.5751, "step": 126 }, { "epoch": 1.016, "grad_norm": 93.21185302734375, "learning_rate": 4.84e-06, "loss": 3.1972, "step": 127 }, { "epoch": 1.024, "grad_norm": 65.55482482910156, "learning_rate": 4.880000000000001e-06, "loss": 4.1533, "step": 128 }, { "epoch": 1.032, "grad_norm": 427.88360595703125, "learning_rate": 4.92e-06, "loss": 3.2568, "step": 129 }, { "epoch": 1.04, "grad_norm": 131.18316650390625, "learning_rate": 4.96e-06, "loss": 3.071, "step": 130 }, { "epoch": 1.048, "grad_norm": 281.9902038574219, "learning_rate": 5e-06, "loss": 3.3932, "step": 131 }, { "epoch": 1.056, "grad_norm": 95.32235717773438, "learning_rate": 5.04e-06, "loss": 3.6997, "step": 132 }, { "epoch": 1.064, "grad_norm": 179.55511474609375, "learning_rate": 5.08e-06, "loss": 2.8124, "step": 133 }, { "epoch": 1.072, "grad_norm": 56.81507873535156, "learning_rate": 5.12e-06, "loss": 3.4372, "step": 134 }, { "epoch": 1.08, "grad_norm": 410.91961669921875, "learning_rate": 5.1600000000000006e-06, "loss": 3.7426, "step": 135 }, { "epoch": 1.088, "grad_norm": 87.9859390258789, "learning_rate": 5.2e-06, "loss": 3.342, "step": 136 }, { "epoch": 1.096, "grad_norm": 108.58563995361328, "learning_rate": 5.240000000000001e-06, "loss": 3.2968, "step": 137 }, { "epoch": 1.104, "grad_norm": 167.87681579589844, "learning_rate": 5.28e-06, "loss": 3.1124, "step": 138 }, { "epoch": 1.112, "grad_norm": 121.67076110839844, "learning_rate": 5.32e-06, "loss": 3.2898, "step": 139 }, { "epoch": 1.12, "grad_norm": 95.48301696777344, "learning_rate": 5.36e-06, "loss": 2.6284, "step": 140 }, { "epoch": 1.1280000000000001, "grad_norm": 158.17617797851562, "learning_rate": 5.4e-06, "loss": 3.0411, "step": 141 }, { "epoch": 1.1360000000000001, "grad_norm": 94.71068572998047, "learning_rate": 5.44e-06, "loss": 3.3106, "step": 142 }, { "epoch": 1.144, "grad_norm": 83.2244644165039, "learning_rate": 5.48e-06, "loss": 3.0777, "step": 143 }, { "epoch": 1.152, "grad_norm": 500.2181701660156, "learning_rate": 5.5200000000000005e-06, "loss": 2.8969, "step": 144 }, { "epoch": 1.16, "grad_norm": 378.587890625, "learning_rate": 5.56e-06, "loss": 3.5505, "step": 145 }, { "epoch": 1.168, "grad_norm": 62.76191329956055, "learning_rate": 5.600000000000001e-06, "loss": 3.1456, "step": 146 }, { "epoch": 1.176, "grad_norm": 68.61674499511719, "learning_rate": 5.64e-06, "loss": 3.5831, "step": 147 }, { "epoch": 1.184, "grad_norm": 237.96429443359375, "learning_rate": 5.680000000000001e-06, "loss": 2.9911, "step": 148 }, { "epoch": 1.192, "grad_norm": 159.41934204101562, "learning_rate": 5.72e-06, "loss": 3.0774, "step": 149 }, { "epoch": 1.2, "grad_norm": 46.164649963378906, "learning_rate": 5.76e-06, "loss": 3.2515, "step": 150 }, { "epoch": 1.208, "grad_norm": 62.670753479003906, "learning_rate": 5.8e-06, "loss": 2.8419, "step": 151 }, { "epoch": 1.216, "grad_norm": 695.3433227539062, "learning_rate": 5.84e-06, "loss": 3.315, "step": 152 }, { "epoch": 1.224, "grad_norm": 56.769412994384766, "learning_rate": 5.8800000000000005e-06, "loss": 2.6809, "step": 153 }, { "epoch": 1.232, "grad_norm": 122.31832122802734, "learning_rate": 5.920000000000001e-06, "loss": 3.3855, "step": 154 }, { "epoch": 1.24, "grad_norm": 121.68486022949219, "learning_rate": 5.9600000000000005e-06, "loss": 3.3145, "step": 155 }, { "epoch": 1.248, "grad_norm": 172.44613647460938, "learning_rate": 6e-06, "loss": 3.2734, "step": 156 }, { "epoch": 1.256, "grad_norm": 297.0345458984375, "learning_rate": 6.040000000000001e-06, "loss": 2.6729, "step": 157 }, { "epoch": 1.264, "grad_norm": 64.16734313964844, "learning_rate": 6.08e-06, "loss": 3.2353, "step": 158 }, { "epoch": 1.272, "grad_norm": 82.39089965820312, "learning_rate": 6.12e-06, "loss": 3.266, "step": 159 }, { "epoch": 1.28, "grad_norm": 66.64234924316406, "learning_rate": 6.16e-06, "loss": 2.7883, "step": 160 }, { "epoch": 1.288, "grad_norm": 193.49183654785156, "learning_rate": 6.2e-06, "loss": 2.6107, "step": 161 }, { "epoch": 1.296, "grad_norm": 181.9052734375, "learning_rate": 6.24e-06, "loss": 3.4735, "step": 162 }, { "epoch": 1.304, "grad_norm": 200.48898315429688, "learning_rate": 6.28e-06, "loss": 3.1991, "step": 163 }, { "epoch": 1.312, "grad_norm": 100.8204345703125, "learning_rate": 6.320000000000001e-06, "loss": 2.9391, "step": 164 }, { "epoch": 1.32, "grad_norm": 424.3245544433594, "learning_rate": 6.360000000000001e-06, "loss": 3.227, "step": 165 }, { "epoch": 1.328, "grad_norm": 112.46810913085938, "learning_rate": 6.4000000000000006e-06, "loss": 3.2945, "step": 166 }, { "epoch": 1.336, "grad_norm": 98.67427062988281, "learning_rate": 6.44e-06, "loss": 2.9388, "step": 167 }, { "epoch": 1.3439999999999999, "grad_norm": 92.25496673583984, "learning_rate": 6.48e-06, "loss": 2.9036, "step": 168 }, { "epoch": 1.3519999999999999, "grad_norm": 777.2713623046875, "learning_rate": 6.519999999999999e-06, "loss": 3.0882, "step": 169 }, { "epoch": 1.3599999999999999, "grad_norm": 389.8956298828125, "learning_rate": 6.560000000000001e-06, "loss": 3.1264, "step": 170 }, { "epoch": 1.3679999999999999, "grad_norm": 102.7580795288086, "learning_rate": 6.6e-06, "loss": 2.8724, "step": 171 }, { "epoch": 1.376, "grad_norm": 58.11960983276367, "learning_rate": 6.640000000000001e-06, "loss": 3.0743, "step": 172 }, { "epoch": 1.384, "grad_norm": 82.16289520263672, "learning_rate": 6.68e-06, "loss": 2.5357, "step": 173 }, { "epoch": 1.392, "grad_norm": 151.7187957763672, "learning_rate": 6.72e-06, "loss": 2.8603, "step": 174 }, { "epoch": 1.4, "grad_norm": 58.54518508911133, "learning_rate": 6.76e-06, "loss": 3.1248, "step": 175 }, { "epoch": 1.408, "grad_norm": 99.28834533691406, "learning_rate": 6.800000000000001e-06, "loss": 2.787, "step": 176 }, { "epoch": 1.416, "grad_norm": 75.19267272949219, "learning_rate": 6.840000000000001e-06, "loss": 3.8098, "step": 177 }, { "epoch": 1.424, "grad_norm": 41.61681365966797, "learning_rate": 6.88e-06, "loss": 3.0039, "step": 178 }, { "epoch": 1.432, "grad_norm": 94.40145874023438, "learning_rate": 6.92e-06, "loss": 2.5977, "step": 179 }, { "epoch": 1.44, "grad_norm": 39.552425384521484, "learning_rate": 6.9599999999999994e-06, "loss": 2.7662, "step": 180 }, { "epoch": 1.448, "grad_norm": 135.19430541992188, "learning_rate": 7.000000000000001e-06, "loss": 3.0042, "step": 181 }, { "epoch": 1.456, "grad_norm": 216.89808654785156, "learning_rate": 7.04e-06, "loss": 3.0303, "step": 182 }, { "epoch": 1.464, "grad_norm": 54.03306579589844, "learning_rate": 7.080000000000001e-06, "loss": 2.6518, "step": 183 }, { "epoch": 1.472, "grad_norm": 291.9791259765625, "learning_rate": 7.1200000000000004e-06, "loss": 2.7167, "step": 184 }, { "epoch": 1.48, "grad_norm": 175.11582946777344, "learning_rate": 7.16e-06, "loss": 3.031, "step": 185 }, { "epoch": 1.488, "grad_norm": 876.0560302734375, "learning_rate": 7.2e-06, "loss": 2.4896, "step": 186 }, { "epoch": 1.496, "grad_norm": 99.68022918701172, "learning_rate": 7.240000000000001e-06, "loss": 3.0293, "step": 187 }, { "epoch": 1.504, "grad_norm": 79.23816680908203, "learning_rate": 7.280000000000001e-06, "loss": 2.6455, "step": 188 }, { "epoch": 1.512, "grad_norm": 193.14756774902344, "learning_rate": 7.32e-06, "loss": 2.9751, "step": 189 }, { "epoch": 1.52, "grad_norm": 98.05293273925781, "learning_rate": 7.36e-06, "loss": 2.7864, "step": 190 }, { "epoch": 1.528, "grad_norm": 344.5738525390625, "learning_rate": 7.4e-06, "loss": 2.8523, "step": 191 }, { "epoch": 1.536, "grad_norm": 80.12562561035156, "learning_rate": 7.44e-06, "loss": 2.9298, "step": 192 }, { "epoch": 1.544, "grad_norm": 162.23057556152344, "learning_rate": 7.480000000000001e-06, "loss": 2.3612, "step": 193 }, { "epoch": 1.552, "grad_norm": 51.85056686401367, "learning_rate": 7.520000000000001e-06, "loss": 3.1353, "step": 194 }, { "epoch": 1.56, "grad_norm": 109.81664276123047, "learning_rate": 7.5600000000000005e-06, "loss": 2.9924, "step": 195 }, { "epoch": 1.568, "grad_norm": 32.250946044921875, "learning_rate": 7.6e-06, "loss": 3.1788, "step": 196 }, { "epoch": 1.576, "grad_norm": 121.79150390625, "learning_rate": 7.64e-06, "loss": 3.2361, "step": 197 }, { "epoch": 1.584, "grad_norm": 79.35063934326172, "learning_rate": 7.68e-06, "loss": 2.9921, "step": 198 }, { "epoch": 1.592, "grad_norm": 45.8731575012207, "learning_rate": 7.72e-06, "loss": 2.2988, "step": 199 }, { "epoch": 1.6, "grad_norm": 72.78368377685547, "learning_rate": 7.76e-06, "loss": 2.4288, "step": 200 }, { "epoch": 1.608, "grad_norm": 1316.37939453125, "learning_rate": 7.8e-06, "loss": 2.6395, "step": 201 }, { "epoch": 1.616, "grad_norm": 78.43234252929688, "learning_rate": 7.84e-06, "loss": 2.9443, "step": 202 }, { "epoch": 1.624, "grad_norm": 93.8448715209961, "learning_rate": 7.879999999999999e-06, "loss": 3.1692, "step": 203 }, { "epoch": 1.6320000000000001, "grad_norm": 58.146728515625, "learning_rate": 7.92e-06, "loss": 3.346, "step": 204 }, { "epoch": 1.6400000000000001, "grad_norm": 119.63597106933594, "learning_rate": 7.96e-06, "loss": 2.6413, "step": 205 }, { "epoch": 1.6480000000000001, "grad_norm": 69.34164428710938, "learning_rate": 8.000000000000001e-06, "loss": 2.7493, "step": 206 }, { "epoch": 1.6560000000000001, "grad_norm": 2035.927978515625, "learning_rate": 8.040000000000001e-06, "loss": 2.9083, "step": 207 }, { "epoch": 1.6640000000000001, "grad_norm": 83.82562255859375, "learning_rate": 8.08e-06, "loss": 2.7091, "step": 208 }, { "epoch": 1.6720000000000002, "grad_norm": 67.22017669677734, "learning_rate": 8.12e-06, "loss": 2.9763, "step": 209 }, { "epoch": 1.6800000000000002, "grad_norm": 59.69850540161133, "learning_rate": 8.160000000000001e-06, "loss": 2.2394, "step": 210 }, { "epoch": 1.688, "grad_norm": 166.77684020996094, "learning_rate": 8.200000000000001e-06, "loss": 2.8276, "step": 211 }, { "epoch": 1.696, "grad_norm": 168.26475524902344, "learning_rate": 8.24e-06, "loss": 2.304, "step": 212 }, { "epoch": 1.704, "grad_norm": 165.33319091796875, "learning_rate": 8.28e-06, "loss": 2.8372, "step": 213 }, { "epoch": 1.712, "grad_norm": 115.78865814208984, "learning_rate": 8.32e-06, "loss": 2.7579, "step": 214 }, { "epoch": 1.72, "grad_norm": 76.4078598022461, "learning_rate": 8.36e-06, "loss": 2.4737, "step": 215 }, { "epoch": 1.728, "grad_norm": 180.92135620117188, "learning_rate": 8.400000000000001e-06, "loss": 2.8681, "step": 216 }, { "epoch": 1.736, "grad_norm": 155.7196502685547, "learning_rate": 8.44e-06, "loss": 2.5244, "step": 217 }, { "epoch": 1.744, "grad_norm": 69.2465591430664, "learning_rate": 8.48e-06, "loss": 2.9132, "step": 218 }, { "epoch": 1.752, "grad_norm": 150.38404846191406, "learning_rate": 8.52e-06, "loss": 2.6143, "step": 219 }, { "epoch": 1.76, "grad_norm": 302.1109619140625, "learning_rate": 8.56e-06, "loss": 2.7796, "step": 220 }, { "epoch": 1.768, "grad_norm": 49.816627502441406, "learning_rate": 8.599999999999999e-06, "loss": 2.9374, "step": 221 }, { "epoch": 1.776, "grad_norm": 113.82521057128906, "learning_rate": 8.64e-06, "loss": 2.7379, "step": 222 }, { "epoch": 1.784, "grad_norm": 82.34598541259766, "learning_rate": 8.68e-06, "loss": 2.1132, "step": 223 }, { "epoch": 1.792, "grad_norm": 108.49813842773438, "learning_rate": 8.720000000000001e-06, "loss": 2.7638, "step": 224 }, { "epoch": 1.8, "grad_norm": 80.73068237304688, "learning_rate": 8.76e-06, "loss": 2.086, "step": 225 }, { "epoch": 1.808, "grad_norm": 351.2693176269531, "learning_rate": 8.8e-06, "loss": 2.4929, "step": 226 }, { "epoch": 1.8159999999999998, "grad_norm": 452.8299255371094, "learning_rate": 8.840000000000002e-06, "loss": 2.1883, "step": 227 }, { "epoch": 1.8239999999999998, "grad_norm": 80.7880859375, "learning_rate": 8.880000000000001e-06, "loss": 2.6437, "step": 228 }, { "epoch": 1.8319999999999999, "grad_norm": 48.25812911987305, "learning_rate": 8.920000000000001e-06, "loss": 2.991, "step": 229 }, { "epoch": 1.8399999999999999, "grad_norm": 106.26907348632812, "learning_rate": 8.96e-06, "loss": 2.988, "step": 230 }, { "epoch": 1.8479999999999999, "grad_norm": 104.2464370727539, "learning_rate": 9e-06, "loss": 3.066, "step": 231 }, { "epoch": 1.8559999999999999, "grad_norm": 210.95603942871094, "learning_rate": 9.04e-06, "loss": 3.1951, "step": 232 }, { "epoch": 1.8639999999999999, "grad_norm": 131.26107788085938, "learning_rate": 9.080000000000001e-06, "loss": 2.5331, "step": 233 }, { "epoch": 1.8719999999999999, "grad_norm": 32.202667236328125, "learning_rate": 9.12e-06, "loss": 2.5357, "step": 234 }, { "epoch": 1.88, "grad_norm": 242.13876342773438, "learning_rate": 9.16e-06, "loss": 2.3707, "step": 235 }, { "epoch": 1.888, "grad_norm": 198.6193084716797, "learning_rate": 9.2e-06, "loss": 2.8426, "step": 236 }, { "epoch": 1.896, "grad_norm": 121.5751953125, "learning_rate": 9.24e-06, "loss": 2.901, "step": 237 }, { "epoch": 1.904, "grad_norm": 91.37435913085938, "learning_rate": 9.28e-06, "loss": 2.3094, "step": 238 }, { "epoch": 1.912, "grad_norm": 305.49102783203125, "learning_rate": 9.32e-06, "loss": 2.5659, "step": 239 }, { "epoch": 1.92, "grad_norm": 90.07247161865234, "learning_rate": 9.36e-06, "loss": 2.7266, "step": 240 }, { "epoch": 1.928, "grad_norm": 68.57479095458984, "learning_rate": 9.4e-06, "loss": 2.7571, "step": 241 }, { "epoch": 1.936, "grad_norm": 117.37018585205078, "learning_rate": 9.44e-06, "loss": 3.7889, "step": 242 }, { "epoch": 1.944, "grad_norm": 42.809932708740234, "learning_rate": 9.48e-06, "loss": 3.2685, "step": 243 }, { "epoch": 1.952, "grad_norm": 65.71151733398438, "learning_rate": 9.52e-06, "loss": 2.4039, "step": 244 }, { "epoch": 1.96, "grad_norm": 71.61064147949219, "learning_rate": 9.560000000000002e-06, "loss": 2.6732, "step": 245 }, { "epoch": 1.968, "grad_norm": 50.07740783691406, "learning_rate": 9.600000000000001e-06, "loss": 2.7793, "step": 246 }, { "epoch": 1.976, "grad_norm": 97.5501708984375, "learning_rate": 9.640000000000001e-06, "loss": 2.5891, "step": 247 }, { "epoch": 1.984, "grad_norm": 783.6746215820312, "learning_rate": 9.68e-06, "loss": 2.3337, "step": 248 }, { "epoch": 1.992, "grad_norm": 43.17191696166992, "learning_rate": 9.72e-06, "loss": 2.4101, "step": 249 }, { "epoch": 2.0, "grad_norm": 69.44513702392578, "learning_rate": 9.760000000000001e-06, "loss": 2.44, "step": 250 }, { "epoch": 2.0, "eval_loss": 2.8061976432800293, "eval_map": 0.0015, "eval_map_50": 0.0049, "eval_map_75": 0.0009, "eval_map_Coverall": 0.004, "eval_map_Face_Shield": 0.0001, "eval_map_Gloves": 0.0, "eval_map_Goggles": 0.0032, "eval_map_Mask": 0.0003, "eval_map_large": 0.0024, "eval_map_medium": 0.0013, "eval_map_small": 0.0005, "eval_mar_1": 0.0058, "eval_mar_10": 0.0259, "eval_mar_100": 0.0908, "eval_mar_100_Coverall": 0.2444, "eval_mar_100_Face_Shield": 0.0176, "eval_mar_100_Gloves": 0.0, "eval_mar_100_Goggles": 0.1594, "eval_mar_100_Mask": 0.0327, "eval_mar_large": 0.0787, "eval_mar_medium": 0.055, "eval_mar_small": 0.0094, "eval_runtime": 2.3849, "eval_samples_per_second": 12.16, "eval_steps_per_second": 0.839, "step": 250 }, { "epoch": 2.008, "grad_norm": 273.0030822753906, "learning_rate": 9.800000000000001e-06, "loss": 2.8755, "step": 251 }, { "epoch": 2.016, "grad_norm": 119.15902709960938, "learning_rate": 9.84e-06, "loss": 2.6412, "step": 252 }, { "epoch": 2.024, "grad_norm": 80.67162322998047, "learning_rate": 9.88e-06, "loss": 2.8863, "step": 253 }, { "epoch": 2.032, "grad_norm": 77.7457504272461, "learning_rate": 9.92e-06, "loss": 2.8151, "step": 254 }, { "epoch": 2.04, "grad_norm": 123.25385284423828, "learning_rate": 9.96e-06, "loss": 2.5253, "step": 255 }, { "epoch": 2.048, "grad_norm": 37.60317611694336, "learning_rate": 1e-05, "loss": 2.6567, "step": 256 }, { "epoch": 2.056, "grad_norm": 520.920166015625, "learning_rate": 1.004e-05, "loss": 2.7356, "step": 257 }, { "epoch": 2.064, "grad_norm": 423.4622497558594, "learning_rate": 1.008e-05, "loss": 2.4304, "step": 258 }, { "epoch": 2.072, "grad_norm": 114.81659698486328, "learning_rate": 1.012e-05, "loss": 2.3661, "step": 259 }, { "epoch": 2.08, "grad_norm": 32.06356430053711, "learning_rate": 1.016e-05, "loss": 2.2924, "step": 260 }, { "epoch": 2.088, "grad_norm": 47.54756546020508, "learning_rate": 1.02e-05, "loss": 2.6177, "step": 261 }, { "epoch": 2.096, "grad_norm": 103.67440795898438, "learning_rate": 1.024e-05, "loss": 2.3227, "step": 262 }, { "epoch": 2.104, "grad_norm": 46.773494720458984, "learning_rate": 1.0280000000000002e-05, "loss": 2.579, "step": 263 }, { "epoch": 2.112, "grad_norm": 49.957069396972656, "learning_rate": 1.0320000000000001e-05, "loss": 2.7803, "step": 264 }, { "epoch": 2.12, "grad_norm": 38.59306716918945, "learning_rate": 1.036e-05, "loss": 2.3095, "step": 265 }, { "epoch": 2.128, "grad_norm": 33.51152038574219, "learning_rate": 1.04e-05, "loss": 2.7498, "step": 266 }, { "epoch": 2.136, "grad_norm": 42.97795486450195, "learning_rate": 1.0440000000000002e-05, "loss": 2.3999, "step": 267 }, { "epoch": 2.144, "grad_norm": 107.72000122070312, "learning_rate": 1.0480000000000001e-05, "loss": 2.3383, "step": 268 }, { "epoch": 2.152, "grad_norm": 69.89026641845703, "learning_rate": 1.0520000000000001e-05, "loss": 2.5072, "step": 269 }, { "epoch": 2.16, "grad_norm": 74.00114440917969, "learning_rate": 1.056e-05, "loss": 3.1107, "step": 270 }, { "epoch": 2.168, "grad_norm": 122.3204116821289, "learning_rate": 1.06e-05, "loss": 2.5807, "step": 271 }, { "epoch": 2.176, "grad_norm": 113.57158660888672, "learning_rate": 1.064e-05, "loss": 3.2961, "step": 272 }, { "epoch": 2.184, "grad_norm": 68.58211517333984, "learning_rate": 1.0680000000000001e-05, "loss": 3.0807, "step": 273 }, { "epoch": 2.192, "grad_norm": 516.0885009765625, "learning_rate": 1.072e-05, "loss": 2.5269, "step": 274 }, { "epoch": 2.2, "grad_norm": 257.1831359863281, "learning_rate": 1.076e-05, "loss": 2.6555, "step": 275 }, { "epoch": 2.208, "grad_norm": 76.11795043945312, "learning_rate": 1.08e-05, "loss": 2.1763, "step": 276 }, { "epoch": 2.216, "grad_norm": 58.46082305908203, "learning_rate": 1.084e-05, "loss": 2.5224, "step": 277 }, { "epoch": 2.224, "grad_norm": 49.63372802734375, "learning_rate": 1.088e-05, "loss": 2.1092, "step": 278 }, { "epoch": 2.232, "grad_norm": 144.7175750732422, "learning_rate": 1.092e-05, "loss": 2.4838, "step": 279 }, { "epoch": 2.24, "grad_norm": 62.90633010864258, "learning_rate": 1.096e-05, "loss": 2.104, "step": 280 }, { "epoch": 2.248, "grad_norm": 45.719696044921875, "learning_rate": 1.1000000000000001e-05, "loss": 1.9349, "step": 281 }, { "epoch": 2.2560000000000002, "grad_norm": 108.00200653076172, "learning_rate": 1.1040000000000001e-05, "loss": 2.4654, "step": 282 }, { "epoch": 2.2640000000000002, "grad_norm": 68.58576965332031, "learning_rate": 1.108e-05, "loss": 2.2648, "step": 283 }, { "epoch": 2.2720000000000002, "grad_norm": 79.12167358398438, "learning_rate": 1.112e-05, "loss": 2.539, "step": 284 }, { "epoch": 2.2800000000000002, "grad_norm": 128.82247924804688, "learning_rate": 1.1160000000000002e-05, "loss": 2.437, "step": 285 }, { "epoch": 2.288, "grad_norm": 107.65774536132812, "learning_rate": 1.1200000000000001e-05, "loss": 2.1345, "step": 286 }, { "epoch": 2.296, "grad_norm": 74.88320922851562, "learning_rate": 1.124e-05, "loss": 2.9638, "step": 287 }, { "epoch": 2.304, "grad_norm": 69.85789489746094, "learning_rate": 1.128e-05, "loss": 2.2235, "step": 288 }, { "epoch": 2.312, "grad_norm": 30.371458053588867, "learning_rate": 1.132e-05, "loss": 2.2983, "step": 289 }, { "epoch": 2.32, "grad_norm": 167.9508819580078, "learning_rate": 1.1360000000000001e-05, "loss": 2.3716, "step": 290 }, { "epoch": 2.328, "grad_norm": 32.107975006103516, "learning_rate": 1.1400000000000001e-05, "loss": 3.585, "step": 291 }, { "epoch": 2.336, "grad_norm": 81.20243072509766, "learning_rate": 1.144e-05, "loss": 2.217, "step": 292 }, { "epoch": 2.344, "grad_norm": 46.22075653076172, "learning_rate": 1.148e-05, "loss": 2.4713, "step": 293 }, { "epoch": 2.352, "grad_norm": 72.51619720458984, "learning_rate": 1.152e-05, "loss": 2.4677, "step": 294 }, { "epoch": 2.36, "grad_norm": 56.28563690185547, "learning_rate": 1.156e-05, "loss": 2.7389, "step": 295 }, { "epoch": 2.368, "grad_norm": 69.67160034179688, "learning_rate": 1.16e-05, "loss": 2.4064, "step": 296 }, { "epoch": 2.376, "grad_norm": 99.34642028808594, "learning_rate": 1.164e-05, "loss": 2.7255, "step": 297 }, { "epoch": 2.384, "grad_norm": 555.0813598632812, "learning_rate": 1.168e-05, "loss": 2.4948, "step": 298 }, { "epoch": 2.392, "grad_norm": 75.78450012207031, "learning_rate": 1.172e-05, "loss": 2.2938, "step": 299 }, { "epoch": 2.4, "grad_norm": 53.4753532409668, "learning_rate": 1.1760000000000001e-05, "loss": 2.3788, "step": 300 }, { "epoch": 2.408, "grad_norm": 77.2113265991211, "learning_rate": 1.18e-05, "loss": 2.0829, "step": 301 }, { "epoch": 2.416, "grad_norm": 46.806602478027344, "learning_rate": 1.1840000000000002e-05, "loss": 2.1152, "step": 302 }, { "epoch": 2.424, "grad_norm": 54.08843231201172, "learning_rate": 1.1880000000000001e-05, "loss": 2.983, "step": 303 }, { "epoch": 2.432, "grad_norm": 73.82364654541016, "learning_rate": 1.1920000000000001e-05, "loss": 2.157, "step": 304 }, { "epoch": 2.44, "grad_norm": 43.181785583496094, "learning_rate": 1.196e-05, "loss": 2.3997, "step": 305 }, { "epoch": 2.448, "grad_norm": 59.345787048339844, "learning_rate": 1.2e-05, "loss": 2.199, "step": 306 }, { "epoch": 2.456, "grad_norm": 37.84815216064453, "learning_rate": 1.204e-05, "loss": 2.1723, "step": 307 }, { "epoch": 2.464, "grad_norm": 196.50836181640625, "learning_rate": 1.2080000000000001e-05, "loss": 2.2198, "step": 308 }, { "epoch": 2.472, "grad_norm": 76.31352996826172, "learning_rate": 1.2120000000000001e-05, "loss": 2.0052, "step": 309 }, { "epoch": 2.48, "grad_norm": 691.5517578125, "learning_rate": 1.216e-05, "loss": 1.9304, "step": 310 }, { "epoch": 2.488, "grad_norm": 113.79351043701172, "learning_rate": 1.22e-05, "loss": 2.1959, "step": 311 }, { "epoch": 2.496, "grad_norm": 91.83307647705078, "learning_rate": 1.224e-05, "loss": 2.0296, "step": 312 }, { "epoch": 2.504, "grad_norm": 101.8157958984375, "learning_rate": 1.2280000000000001e-05, "loss": 2.0294, "step": 313 }, { "epoch": 2.512, "grad_norm": 259.2900085449219, "learning_rate": 1.232e-05, "loss": 2.8776, "step": 314 }, { "epoch": 2.52, "grad_norm": 41.89806365966797, "learning_rate": 1.236e-05, "loss": 2.7241, "step": 315 }, { "epoch": 2.528, "grad_norm": 202.83502197265625, "learning_rate": 1.24e-05, "loss": 2.6619, "step": 316 }, { "epoch": 2.536, "grad_norm": 50.0519905090332, "learning_rate": 1.244e-05, "loss": 2.1089, "step": 317 }, { "epoch": 2.544, "grad_norm": 187.32899475097656, "learning_rate": 1.248e-05, "loss": 2.0722, "step": 318 }, { "epoch": 2.552, "grad_norm": 30.898834228515625, "learning_rate": 1.252e-05, "loss": 2.1284, "step": 319 }, { "epoch": 2.56, "grad_norm": 40.22548294067383, "learning_rate": 1.256e-05, "loss": 2.5472, "step": 320 }, { "epoch": 2.568, "grad_norm": 118.34954833984375, "learning_rate": 1.2600000000000001e-05, "loss": 2.1889, "step": 321 }, { "epoch": 2.576, "grad_norm": 224.06890869140625, "learning_rate": 1.2640000000000003e-05, "loss": 1.5606, "step": 322 }, { "epoch": 2.584, "grad_norm": 69.17166137695312, "learning_rate": 1.268e-05, "loss": 2.1685, "step": 323 }, { "epoch": 2.592, "grad_norm": 48.758636474609375, "learning_rate": 1.2720000000000002e-05, "loss": 2.6729, "step": 324 }, { "epoch": 2.6, "grad_norm": 97.45775604248047, "learning_rate": 1.276e-05, "loss": 2.4343, "step": 325 }, { "epoch": 2.608, "grad_norm": 69.75992584228516, "learning_rate": 1.2800000000000001e-05, "loss": 1.8153, "step": 326 }, { "epoch": 2.616, "grad_norm": 102.84680938720703, "learning_rate": 1.2839999999999999e-05, "loss": 1.8373, "step": 327 }, { "epoch": 2.624, "grad_norm": 87.10126495361328, "learning_rate": 1.288e-05, "loss": 1.8211, "step": 328 }, { "epoch": 2.632, "grad_norm": 126.39292907714844, "learning_rate": 1.2920000000000002e-05, "loss": 1.9194, "step": 329 }, { "epoch": 2.64, "grad_norm": 43.503257751464844, "learning_rate": 1.296e-05, "loss": 2.1039, "step": 330 }, { "epoch": 2.648, "grad_norm": 151.16343688964844, "learning_rate": 1.3000000000000001e-05, "loss": 1.7227, "step": 331 }, { "epoch": 2.656, "grad_norm": 125.84759521484375, "learning_rate": 1.3039999999999999e-05, "loss": 1.9895, "step": 332 }, { "epoch": 2.664, "grad_norm": 125.65667724609375, "learning_rate": 1.308e-05, "loss": 2.0386, "step": 333 }, { "epoch": 2.672, "grad_norm": 52.83179473876953, "learning_rate": 1.3120000000000001e-05, "loss": 2.0174, "step": 334 }, { "epoch": 2.68, "grad_norm": 55.04395294189453, "learning_rate": 1.316e-05, "loss": 2.5544, "step": 335 }, { "epoch": 2.6879999999999997, "grad_norm": 34.91893005371094, "learning_rate": 1.32e-05, "loss": 2.0269, "step": 336 }, { "epoch": 2.6959999999999997, "grad_norm": 51.98933029174805, "learning_rate": 1.324e-05, "loss": 2.2672, "step": 337 }, { "epoch": 2.7039999999999997, "grad_norm": 45.98496627807617, "learning_rate": 1.3280000000000002e-05, "loss": 2.0155, "step": 338 }, { "epoch": 2.7119999999999997, "grad_norm": 43.7529296875, "learning_rate": 1.3320000000000001e-05, "loss": 2.0091, "step": 339 }, { "epoch": 2.7199999999999998, "grad_norm": 256.66778564453125, "learning_rate": 1.336e-05, "loss": 1.9495, "step": 340 }, { "epoch": 2.7279999999999998, "grad_norm": 210.88426208496094, "learning_rate": 1.3400000000000002e-05, "loss": 2.7492, "step": 341 }, { "epoch": 2.7359999999999998, "grad_norm": 70.38453674316406, "learning_rate": 1.344e-05, "loss": 1.6673, "step": 342 }, { "epoch": 2.7439999999999998, "grad_norm": 45.520179748535156, "learning_rate": 1.3480000000000001e-05, "loss": 2.3146, "step": 343 }, { "epoch": 2.752, "grad_norm": 114.48503875732422, "learning_rate": 1.352e-05, "loss": 2.0667, "step": 344 }, { "epoch": 2.76, "grad_norm": 243.6885223388672, "learning_rate": 1.356e-05, "loss": 2.2657, "step": 345 }, { "epoch": 2.768, "grad_norm": 232.80067443847656, "learning_rate": 1.3600000000000002e-05, "loss": 2.4555, "step": 346 }, { "epoch": 2.776, "grad_norm": 89.6315689086914, "learning_rate": 1.364e-05, "loss": 2.998, "step": 347 }, { "epoch": 2.784, "grad_norm": 146.47518920898438, "learning_rate": 1.3680000000000001e-05, "loss": 2.4406, "step": 348 }, { "epoch": 2.792, "grad_norm": 57.995906829833984, "learning_rate": 1.3719999999999999e-05, "loss": 2.1132, "step": 349 }, { "epoch": 2.8, "grad_norm": 127.05773162841797, "learning_rate": 1.376e-05, "loss": 2.4282, "step": 350 }, { "epoch": 2.808, "grad_norm": 56.27194595336914, "learning_rate": 1.3800000000000002e-05, "loss": 2.2067, "step": 351 }, { "epoch": 2.816, "grad_norm": 302.6056823730469, "learning_rate": 1.384e-05, "loss": 2.4007, "step": 352 }, { "epoch": 2.824, "grad_norm": 73.77351379394531, "learning_rate": 1.3880000000000001e-05, "loss": 1.992, "step": 353 }, { "epoch": 2.832, "grad_norm": 39.93824768066406, "learning_rate": 1.3919999999999999e-05, "loss": 2.1449, "step": 354 }, { "epoch": 2.84, "grad_norm": 68.86712646484375, "learning_rate": 1.396e-05, "loss": 2.3047, "step": 355 }, { "epoch": 2.848, "grad_norm": 456.7715759277344, "learning_rate": 1.4000000000000001e-05, "loss": 1.9841, "step": 356 }, { "epoch": 2.856, "grad_norm": 68.1517333984375, "learning_rate": 1.4040000000000001e-05, "loss": 2.7618, "step": 357 }, { "epoch": 2.864, "grad_norm": 45.37385940551758, "learning_rate": 1.408e-05, "loss": 1.7796, "step": 358 }, { "epoch": 2.872, "grad_norm": 81.78600311279297, "learning_rate": 1.412e-05, "loss": 1.9483, "step": 359 }, { "epoch": 2.88, "grad_norm": 61.10250473022461, "learning_rate": 1.4160000000000002e-05, "loss": 2.1795, "step": 360 }, { "epoch": 2.888, "grad_norm": 88.18387603759766, "learning_rate": 1.42e-05, "loss": 1.8811, "step": 361 }, { "epoch": 2.896, "grad_norm": 30.230112075805664, "learning_rate": 1.4240000000000001e-05, "loss": 2.3058, "step": 362 }, { "epoch": 2.904, "grad_norm": 46.22890853881836, "learning_rate": 1.4280000000000002e-05, "loss": 2.019, "step": 363 }, { "epoch": 2.912, "grad_norm": 93.09568786621094, "learning_rate": 1.432e-05, "loss": 2.418, "step": 364 }, { "epoch": 2.92, "grad_norm": 29.18531036376953, "learning_rate": 1.4360000000000001e-05, "loss": 1.8458, "step": 365 }, { "epoch": 2.928, "grad_norm": 73.72071075439453, "learning_rate": 1.44e-05, "loss": 2.2139, "step": 366 }, { "epoch": 2.936, "grad_norm": 37.35347366333008, "learning_rate": 1.444e-05, "loss": 3.1047, "step": 367 }, { "epoch": 2.944, "grad_norm": 110.39607238769531, "learning_rate": 1.4480000000000002e-05, "loss": 2.7043, "step": 368 }, { "epoch": 2.952, "grad_norm": 100.98404693603516, "learning_rate": 1.452e-05, "loss": 2.058, "step": 369 }, { "epoch": 2.96, "grad_norm": 66.9675064086914, "learning_rate": 1.4560000000000001e-05, "loss": 1.8002, "step": 370 }, { "epoch": 2.968, "grad_norm": 47.804412841796875, "learning_rate": 1.4599999999999999e-05, "loss": 2.1241, "step": 371 }, { "epoch": 2.976, "grad_norm": 686.2343139648438, "learning_rate": 1.464e-05, "loss": 2.8913, "step": 372 }, { "epoch": 2.984, "grad_norm": 242.8663330078125, "learning_rate": 1.4680000000000002e-05, "loss": 1.6699, "step": 373 }, { "epoch": 2.992, "grad_norm": 154.8770751953125, "learning_rate": 1.472e-05, "loss": 2.1046, "step": 374 }, { "epoch": 3.0, "grad_norm": 159.90292358398438, "learning_rate": 1.4760000000000001e-05, "loss": 2.2691, "step": 375 }, { "epoch": 3.0, "eval_loss": 2.417036533355713, "eval_map": 0.0109, "eval_map_50": 0.0253, "eval_map_75": 0.0074, "eval_map_Coverall": 0.0318, "eval_map_Face_Shield": 0.0, "eval_map_Gloves": 0.0048, "eval_map_Goggles": 0.0139, "eval_map_Mask": 0.0043, "eval_map_large": 0.0133, "eval_map_medium": 0.008, "eval_map_small": 0.0003, "eval_mar_1": 0.0305, "eval_mar_10": 0.0888, "eval_mar_100": 0.1742, "eval_mar_100_Coverall": 0.5333, "eval_mar_100_Face_Shield": 0.0, "eval_mar_100_Gloves": 0.0508, "eval_mar_100_Goggles": 0.0656, "eval_mar_100_Mask": 0.2212, "eval_mar_large": 0.1823, "eval_mar_medium": 0.0946, "eval_mar_small": 0.0184, "eval_runtime": 2.332, "eval_samples_per_second": 12.436, "eval_steps_per_second": 0.858, "step": 375 }, { "epoch": 3.008, "grad_norm": 146.17581176757812, "learning_rate": 1.48e-05, "loss": 2.3086, "step": 376 }, { "epoch": 3.016, "grad_norm": 112.1141128540039, "learning_rate": 1.4840000000000002e-05, "loss": 1.9587, "step": 377 }, { "epoch": 3.024, "grad_norm": 63.784149169921875, "learning_rate": 1.488e-05, "loss": 2.0579, "step": 378 }, { "epoch": 3.032, "grad_norm": 48.5575065612793, "learning_rate": 1.4920000000000001e-05, "loss": 2.1732, "step": 379 }, { "epoch": 3.04, "grad_norm": 236.2929229736328, "learning_rate": 1.4960000000000002e-05, "loss": 2.2801, "step": 380 }, { "epoch": 3.048, "grad_norm": 34.67193603515625, "learning_rate": 1.5e-05, "loss": 2.1018, "step": 381 }, { "epoch": 3.056, "grad_norm": 153.69851684570312, "learning_rate": 1.5040000000000002e-05, "loss": 1.4769, "step": 382 }, { "epoch": 3.064, "grad_norm": 71.13607025146484, "learning_rate": 1.508e-05, "loss": 3.6226, "step": 383 }, { "epoch": 3.072, "grad_norm": 41.95542907714844, "learning_rate": 1.5120000000000001e-05, "loss": 2.6717, "step": 384 }, { "epoch": 3.08, "grad_norm": 40.001953125, "learning_rate": 1.5160000000000002e-05, "loss": 1.9662, "step": 385 }, { "epoch": 3.088, "grad_norm": 87.35448455810547, "learning_rate": 1.52e-05, "loss": 2.2235, "step": 386 }, { "epoch": 3.096, "grad_norm": 66.22189331054688, "learning_rate": 1.5240000000000001e-05, "loss": 2.1917, "step": 387 }, { "epoch": 3.104, "grad_norm": 67.88890075683594, "learning_rate": 1.528e-05, "loss": 2.096, "step": 388 }, { "epoch": 3.112, "grad_norm": 57.8862190246582, "learning_rate": 1.5320000000000002e-05, "loss": 2.0083, "step": 389 }, { "epoch": 3.12, "grad_norm": 92.78666687011719, "learning_rate": 1.536e-05, "loss": 1.9228, "step": 390 }, { "epoch": 3.128, "grad_norm": 192.6053924560547, "learning_rate": 1.54e-05, "loss": 2.1849, "step": 391 }, { "epoch": 3.136, "grad_norm": 138.18775939941406, "learning_rate": 1.544e-05, "loss": 2.283, "step": 392 }, { "epoch": 3.144, "grad_norm": 76.73632049560547, "learning_rate": 1.548e-05, "loss": 2.0238, "step": 393 }, { "epoch": 3.152, "grad_norm": 95.17778015136719, "learning_rate": 1.552e-05, "loss": 2.0678, "step": 394 }, { "epoch": 3.16, "grad_norm": 153.00482177734375, "learning_rate": 1.556e-05, "loss": 2.1876, "step": 395 }, { "epoch": 3.168, "grad_norm": 54.646358489990234, "learning_rate": 1.56e-05, "loss": 2.47, "step": 396 }, { "epoch": 3.176, "grad_norm": 63.03348159790039, "learning_rate": 1.5640000000000003e-05, "loss": 2.1291, "step": 397 }, { "epoch": 3.184, "grad_norm": 39.00490951538086, "learning_rate": 1.568e-05, "loss": 1.9068, "step": 398 }, { "epoch": 3.192, "grad_norm": 82.51824188232422, "learning_rate": 1.5720000000000002e-05, "loss": 3.0118, "step": 399 }, { "epoch": 3.2, "grad_norm": 58.42021560668945, "learning_rate": 1.5759999999999998e-05, "loss": 1.9155, "step": 400 }, { "epoch": 3.208, "grad_norm": 40.48151779174805, "learning_rate": 1.58e-05, "loss": 2.4057, "step": 401 }, { "epoch": 3.216, "grad_norm": 124.93944549560547, "learning_rate": 1.584e-05, "loss": 1.5487, "step": 402 }, { "epoch": 3.224, "grad_norm": 132.81399536132812, "learning_rate": 1.588e-05, "loss": 1.8696, "step": 403 }, { "epoch": 3.232, "grad_norm": 62.8841552734375, "learning_rate": 1.592e-05, "loss": 2.0297, "step": 404 }, { "epoch": 3.24, "grad_norm": 379.2613525390625, "learning_rate": 1.596e-05, "loss": 2.0577, "step": 405 }, { "epoch": 3.248, "grad_norm": 33.1185188293457, "learning_rate": 1.6000000000000003e-05, "loss": 1.6667, "step": 406 }, { "epoch": 3.2560000000000002, "grad_norm": 90.23542022705078, "learning_rate": 1.604e-05, "loss": 2.3147, "step": 407 }, { "epoch": 3.2640000000000002, "grad_norm": 61.52249526977539, "learning_rate": 1.6080000000000002e-05, "loss": 1.7595, "step": 408 }, { "epoch": 3.2720000000000002, "grad_norm": 38.154685974121094, "learning_rate": 1.612e-05, "loss": 1.9209, "step": 409 }, { "epoch": 3.2800000000000002, "grad_norm": 90.80926513671875, "learning_rate": 1.616e-05, "loss": 2.0858, "step": 410 }, { "epoch": 3.288, "grad_norm": 97.52944946289062, "learning_rate": 1.62e-05, "loss": 2.3973, "step": 411 }, { "epoch": 3.296, "grad_norm": 62.36835479736328, "learning_rate": 1.624e-05, "loss": 2.2728, "step": 412 }, { "epoch": 3.304, "grad_norm": 44.6312141418457, "learning_rate": 1.628e-05, "loss": 2.2321, "step": 413 }, { "epoch": 3.312, "grad_norm": 60.92213439941406, "learning_rate": 1.6320000000000003e-05, "loss": 1.9542, "step": 414 }, { "epoch": 3.32, "grad_norm": 86.6742935180664, "learning_rate": 1.636e-05, "loss": 2.0682, "step": 415 }, { "epoch": 3.328, "grad_norm": 40.74186706542969, "learning_rate": 1.6400000000000002e-05, "loss": 1.9871, "step": 416 }, { "epoch": 3.336, "grad_norm": 83.4647216796875, "learning_rate": 1.644e-05, "loss": 2.0067, "step": 417 }, { "epoch": 3.344, "grad_norm": 63.716819763183594, "learning_rate": 1.648e-05, "loss": 1.6137, "step": 418 }, { "epoch": 3.352, "grad_norm": 250.9705047607422, "learning_rate": 1.652e-05, "loss": 2.1008, "step": 419 }, { "epoch": 3.36, "grad_norm": 198.97764587402344, "learning_rate": 1.656e-05, "loss": 2.3545, "step": 420 }, { "epoch": 3.368, "grad_norm": 53.6713981628418, "learning_rate": 1.66e-05, "loss": 2.3975, "step": 421 }, { "epoch": 3.376, "grad_norm": 70.55389404296875, "learning_rate": 1.664e-05, "loss": 2.0715, "step": 422 }, { "epoch": 3.384, "grad_norm": 111.49563598632812, "learning_rate": 1.668e-05, "loss": 2.2244, "step": 423 }, { "epoch": 3.392, "grad_norm": 89.2464828491211, "learning_rate": 1.672e-05, "loss": 3.3691, "step": 424 }, { "epoch": 3.4, "grad_norm": 56.47273254394531, "learning_rate": 1.6760000000000002e-05, "loss": 1.65, "step": 425 }, { "epoch": 3.408, "grad_norm": 426.0000915527344, "learning_rate": 1.6800000000000002e-05, "loss": 2.0852, "step": 426 }, { "epoch": 3.416, "grad_norm": 96.90975952148438, "learning_rate": 1.684e-05, "loss": 2.0854, "step": 427 }, { "epoch": 3.424, "grad_norm": 55.55000305175781, "learning_rate": 1.688e-05, "loss": 1.9608, "step": 428 }, { "epoch": 3.432, "grad_norm": 59.63561248779297, "learning_rate": 1.692e-05, "loss": 2.372, "step": 429 }, { "epoch": 3.44, "grad_norm": 49.75531768798828, "learning_rate": 1.696e-05, "loss": 2.1887, "step": 430 }, { "epoch": 3.448, "grad_norm": 57.27313232421875, "learning_rate": 1.7000000000000003e-05, "loss": 2.2642, "step": 431 }, { "epoch": 3.456, "grad_norm": 263.57098388671875, "learning_rate": 1.704e-05, "loss": 2.4551, "step": 432 }, { "epoch": 3.464, "grad_norm": 64.50694274902344, "learning_rate": 1.7080000000000002e-05, "loss": 2.0773, "step": 433 }, { "epoch": 3.472, "grad_norm": 116.50428009033203, "learning_rate": 1.712e-05, "loss": 2.1034, "step": 434 }, { "epoch": 3.48, "grad_norm": 94.7918701171875, "learning_rate": 1.7160000000000002e-05, "loss": 2.1961, "step": 435 }, { "epoch": 3.488, "grad_norm": 98.9638671875, "learning_rate": 1.7199999999999998e-05, "loss": 1.8514, "step": 436 }, { "epoch": 3.496, "grad_norm": 162.33999633789062, "learning_rate": 1.724e-05, "loss": 1.8924, "step": 437 }, { "epoch": 3.504, "grad_norm": 48.784671783447266, "learning_rate": 1.728e-05, "loss": 2.6132, "step": 438 }, { "epoch": 3.512, "grad_norm": 46.43291473388672, "learning_rate": 1.732e-05, "loss": 2.6759, "step": 439 }, { "epoch": 3.52, "grad_norm": 38.868896484375, "learning_rate": 1.736e-05, "loss": 2.0814, "step": 440 }, { "epoch": 3.528, "grad_norm": 65.44410705566406, "learning_rate": 1.74e-05, "loss": 1.5951, "step": 441 }, { "epoch": 3.536, "grad_norm": 38.08775329589844, "learning_rate": 1.7440000000000002e-05, "loss": 1.7305, "step": 442 }, { "epoch": 3.544, "grad_norm": 669.3871459960938, "learning_rate": 1.7480000000000002e-05, "loss": 2.045, "step": 443 }, { "epoch": 3.552, "grad_norm": 85.85997772216797, "learning_rate": 1.752e-05, "loss": 2.5188, "step": 444 }, { "epoch": 3.56, "grad_norm": 130.9741973876953, "learning_rate": 1.756e-05, "loss": 1.7955, "step": 445 }, { "epoch": 3.568, "grad_norm": 66.5066146850586, "learning_rate": 1.76e-05, "loss": 2.0147, "step": 446 }, { "epoch": 3.576, "grad_norm": 934.9554443359375, "learning_rate": 1.764e-05, "loss": 1.9207, "step": 447 }, { "epoch": 3.584, "grad_norm": 64.02294158935547, "learning_rate": 1.7680000000000004e-05, "loss": 1.9017, "step": 448 }, { "epoch": 3.592, "grad_norm": 46.84848403930664, "learning_rate": 1.772e-05, "loss": 1.9083, "step": 449 }, { "epoch": 3.6, "grad_norm": 74.28286743164062, "learning_rate": 1.7760000000000003e-05, "loss": 1.8209, "step": 450 }, { "epoch": 3.608, "grad_norm": 44.26166915893555, "learning_rate": 1.78e-05, "loss": 1.8266, "step": 451 }, { "epoch": 3.616, "grad_norm": 39.96067428588867, "learning_rate": 1.7840000000000002e-05, "loss": 2.7177, "step": 452 }, { "epoch": 3.624, "grad_norm": 77.42720794677734, "learning_rate": 1.7879999999999998e-05, "loss": 1.5959, "step": 453 }, { "epoch": 3.632, "grad_norm": 82.22885131835938, "learning_rate": 1.792e-05, "loss": 1.5396, "step": 454 }, { "epoch": 3.64, "grad_norm": 23.79221534729004, "learning_rate": 1.796e-05, "loss": 1.9154, "step": 455 }, { "epoch": 3.648, "grad_norm": 40.04005813598633, "learning_rate": 1.8e-05, "loss": 2.2211, "step": 456 }, { "epoch": 3.656, "grad_norm": 63.09788513183594, "learning_rate": 1.804e-05, "loss": 1.9784, "step": 457 }, { "epoch": 3.664, "grad_norm": 40.587223052978516, "learning_rate": 1.808e-05, "loss": 2.3828, "step": 458 }, { "epoch": 3.672, "grad_norm": 71.45930480957031, "learning_rate": 1.812e-05, "loss": 1.8058, "step": 459 }, { "epoch": 3.68, "grad_norm": 43.120506286621094, "learning_rate": 1.8160000000000002e-05, "loss": 2.0262, "step": 460 }, { "epoch": 3.6879999999999997, "grad_norm": 111.38655853271484, "learning_rate": 1.8200000000000002e-05, "loss": 2.383, "step": 461 }, { "epoch": 3.6959999999999997, "grad_norm": 36.01112747192383, "learning_rate": 1.824e-05, "loss": 2.3778, "step": 462 }, { "epoch": 3.7039999999999997, "grad_norm": 105.91593170166016, "learning_rate": 1.828e-05, "loss": 2.379, "step": 463 }, { "epoch": 3.7119999999999997, "grad_norm": 36.97077560424805, "learning_rate": 1.832e-05, "loss": 1.9798, "step": 464 }, { "epoch": 3.7199999999999998, "grad_norm": 47.8660774230957, "learning_rate": 1.8360000000000004e-05, "loss": 2.6469, "step": 465 }, { "epoch": 3.7279999999999998, "grad_norm": 38.48366165161133, "learning_rate": 1.84e-05, "loss": 1.6367, "step": 466 }, { "epoch": 3.7359999999999998, "grad_norm": 42.10649490356445, "learning_rate": 1.8440000000000003e-05, "loss": 1.997, "step": 467 }, { "epoch": 3.7439999999999998, "grad_norm": 66.72259521484375, "learning_rate": 1.848e-05, "loss": 2.7433, "step": 468 }, { "epoch": 3.752, "grad_norm": 380.73944091796875, "learning_rate": 1.8520000000000002e-05, "loss": 1.8495, "step": 469 }, { "epoch": 3.76, "grad_norm": 74.97319030761719, "learning_rate": 1.856e-05, "loss": 2.0145, "step": 470 }, { "epoch": 3.768, "grad_norm": 1702.921875, "learning_rate": 1.86e-05, "loss": 1.9272, "step": 471 }, { "epoch": 3.776, "grad_norm": 44.063575744628906, "learning_rate": 1.864e-05, "loss": 2.4017, "step": 472 }, { "epoch": 3.784, "grad_norm": 30.48640251159668, "learning_rate": 1.868e-05, "loss": 1.7692, "step": 473 }, { "epoch": 3.792, "grad_norm": 308.44720458984375, "learning_rate": 1.872e-05, "loss": 2.0575, "step": 474 }, { "epoch": 3.8, "grad_norm": 43.01743698120117, "learning_rate": 1.876e-05, "loss": 1.8372, "step": 475 }, { "epoch": 3.808, "grad_norm": 32.24020004272461, "learning_rate": 1.88e-05, "loss": 1.8395, "step": 476 }, { "epoch": 3.816, "grad_norm": 50.06652069091797, "learning_rate": 1.8840000000000003e-05, "loss": 2.0794, "step": 477 }, { "epoch": 3.824, "grad_norm": 50.60537338256836, "learning_rate": 1.888e-05, "loss": 1.7722, "step": 478 }, { "epoch": 3.832, "grad_norm": 143.42892456054688, "learning_rate": 1.8920000000000002e-05, "loss": 2.0841, "step": 479 }, { "epoch": 3.84, "grad_norm": 73.85560607910156, "learning_rate": 1.896e-05, "loss": 1.9823, "step": 480 }, { "epoch": 3.848, "grad_norm": 390.13726806640625, "learning_rate": 1.9e-05, "loss": 2.274, "step": 481 }, { "epoch": 3.856, "grad_norm": 60.328128814697266, "learning_rate": 1.904e-05, "loss": 2.0291, "step": 482 }, { "epoch": 3.864, "grad_norm": 37.77629852294922, "learning_rate": 1.908e-05, "loss": 2.4037, "step": 483 }, { "epoch": 3.872, "grad_norm": 148.3131103515625, "learning_rate": 1.9120000000000003e-05, "loss": 2.1994, "step": 484 }, { "epoch": 3.88, "grad_norm": 513.0369262695312, "learning_rate": 1.916e-05, "loss": 1.5788, "step": 485 }, { "epoch": 3.888, "grad_norm": 62.42787551879883, "learning_rate": 1.9200000000000003e-05, "loss": 1.8401, "step": 486 }, { "epoch": 3.896, "grad_norm": 58.145938873291016, "learning_rate": 1.924e-05, "loss": 1.8581, "step": 487 }, { "epoch": 3.904, "grad_norm": 50.10246658325195, "learning_rate": 1.9280000000000002e-05, "loss": 1.9406, "step": 488 }, { "epoch": 3.912, "grad_norm": 41.68206024169922, "learning_rate": 1.932e-05, "loss": 2.0199, "step": 489 }, { "epoch": 3.92, "grad_norm": 67.7701644897461, "learning_rate": 1.936e-05, "loss": 1.6803, "step": 490 }, { "epoch": 3.928, "grad_norm": 311.30291748046875, "learning_rate": 1.94e-05, "loss": 2.0391, "step": 491 }, { "epoch": 3.936, "grad_norm": 55.410987854003906, "learning_rate": 1.944e-05, "loss": 2.9495, "step": 492 }, { "epoch": 3.944, "grad_norm": 127.67931365966797, "learning_rate": 1.948e-05, "loss": 2.3329, "step": 493 }, { "epoch": 3.952, "grad_norm": 64.11988830566406, "learning_rate": 1.9520000000000003e-05, "loss": 1.9151, "step": 494 }, { "epoch": 3.96, "grad_norm": 55.22039794921875, "learning_rate": 1.956e-05, "loss": 1.8609, "step": 495 }, { "epoch": 3.968, "grad_norm": 69.42479705810547, "learning_rate": 1.9600000000000002e-05, "loss": 1.9852, "step": 496 }, { "epoch": 3.976, "grad_norm": 133.1624755859375, "learning_rate": 1.9640000000000002e-05, "loss": 2.5183, "step": 497 }, { "epoch": 3.984, "grad_norm": 92.92858123779297, "learning_rate": 1.968e-05, "loss": 1.68, "step": 498 }, { "epoch": 3.992, "grad_norm": 45.002235412597656, "learning_rate": 1.972e-05, "loss": 2.4791, "step": 499 }, { "epoch": 4.0, "grad_norm": 74.32281494140625, "learning_rate": 1.976e-05, "loss": 1.5872, "step": 500 }, { "epoch": 4.0, "eval_loss": 2.2566823959350586, "eval_map": 0.0504, "eval_map_50": 0.1011, "eval_map_75": 0.0505, "eval_map_Coverall": 0.1759, "eval_map_Face_Shield": 0.0, "eval_map_Gloves": 0.0583, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.0176, "eval_map_large": 0.0443, "eval_map_medium": 0.025, "eval_map_small": 0.0005, "eval_mar_1": 0.0775, "eval_mar_10": 0.1779, "eval_mar_100": 0.2118, "eval_mar_100_Coverall": 0.6178, "eval_mar_100_Face_Shield": 0.0, "eval_mar_100_Gloves": 0.2049, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.2365, "eval_mar_large": 0.2243, "eval_mar_medium": 0.1272, "eval_mar_small": 0.0066, "eval_runtime": 2.4662, "eval_samples_per_second": 11.759, "eval_steps_per_second": 0.811, "step": 500 }, { "epoch": 4.008, "grad_norm": 80.60737609863281, "learning_rate": 1.9800000000000004e-05, "loss": 2.3175, "step": 501 }, { "epoch": 4.016, "grad_norm": 48.584896087646484, "learning_rate": 1.984e-05, "loss": 1.422, "step": 502 }, { "epoch": 4.024, "grad_norm": 88.95999908447266, "learning_rate": 1.9880000000000003e-05, "loss": 1.5595, "step": 503 }, { "epoch": 4.032, "grad_norm": 119.67271423339844, "learning_rate": 1.992e-05, "loss": 2.105, "step": 504 }, { "epoch": 4.04, "grad_norm": 30.43901824951172, "learning_rate": 1.9960000000000002e-05, "loss": 2.0252, "step": 505 }, { "epoch": 4.048, "grad_norm": 47.17656326293945, "learning_rate": 2e-05, "loss": 2.1499, "step": 506 }, { "epoch": 4.056, "grad_norm": 67.77314758300781, "learning_rate": 2.004e-05, "loss": 2.3696, "step": 507 }, { "epoch": 4.064, "grad_norm": 205.68992614746094, "learning_rate": 2.008e-05, "loss": 2.19, "step": 508 }, { "epoch": 4.072, "grad_norm": 61.11662292480469, "learning_rate": 2.012e-05, "loss": 1.9618, "step": 509 }, { "epoch": 4.08, "grad_norm": 225.96847534179688, "learning_rate": 2.016e-05, "loss": 1.9853, "step": 510 }, { "epoch": 4.088, "grad_norm": 69.38179779052734, "learning_rate": 2.0200000000000003e-05, "loss": 1.7857, "step": 511 }, { "epoch": 4.096, "grad_norm": 120.60948181152344, "learning_rate": 2.024e-05, "loss": 1.6835, "step": 512 }, { "epoch": 4.104, "grad_norm": 78.56946563720703, "learning_rate": 2.0280000000000002e-05, "loss": 1.7716, "step": 513 }, { "epoch": 4.112, "grad_norm": 56.34722900390625, "learning_rate": 2.032e-05, "loss": 2.3748, "step": 514 }, { "epoch": 4.12, "grad_norm": 101.75665283203125, "learning_rate": 2.036e-05, "loss": 2.1234, "step": 515 }, { "epoch": 4.128, "grad_norm": 39.63407897949219, "learning_rate": 2.04e-05, "loss": 3.3508, "step": 516 }, { "epoch": 4.136, "grad_norm": 97.73563385009766, "learning_rate": 2.044e-05, "loss": 2.229, "step": 517 }, { "epoch": 4.144, "grad_norm": 46.583587646484375, "learning_rate": 2.048e-05, "loss": 2.2101, "step": 518 }, { "epoch": 4.152, "grad_norm": 129.44204711914062, "learning_rate": 2.052e-05, "loss": 2.6183, "step": 519 }, { "epoch": 4.16, "grad_norm": 30.858440399169922, "learning_rate": 2.0560000000000003e-05, "loss": 2.2581, "step": 520 }, { "epoch": 4.168, "grad_norm": 55.34916687011719, "learning_rate": 2.06e-05, "loss": 1.8097, "step": 521 }, { "epoch": 4.176, "grad_norm": 99.40924835205078, "learning_rate": 2.0640000000000002e-05, "loss": 2.1767, "step": 522 }, { "epoch": 4.184, "grad_norm": 242.34439086914062, "learning_rate": 2.0680000000000002e-05, "loss": 2.2248, "step": 523 }, { "epoch": 4.192, "grad_norm": 195.09774780273438, "learning_rate": 2.072e-05, "loss": 1.9027, "step": 524 }, { "epoch": 4.2, "grad_norm": 40.320438385009766, "learning_rate": 2.076e-05, "loss": 2.0911, "step": 525 }, { "epoch": 4.208, "grad_norm": 70.13845825195312, "learning_rate": 2.08e-05, "loss": 1.7879, "step": 526 }, { "epoch": 4.216, "grad_norm": 140.98672485351562, "learning_rate": 2.084e-05, "loss": 2.4696, "step": 527 }, { "epoch": 4.224, "grad_norm": 59.824859619140625, "learning_rate": 2.0880000000000003e-05, "loss": 1.9397, "step": 528 }, { "epoch": 4.232, "grad_norm": 319.42132568359375, "learning_rate": 2.092e-05, "loss": 2.0969, "step": 529 }, { "epoch": 4.24, "grad_norm": 113.86160278320312, "learning_rate": 2.0960000000000003e-05, "loss": 2.0092, "step": 530 }, { "epoch": 4.248, "grad_norm": 48.53310775756836, "learning_rate": 2.1e-05, "loss": 1.9611, "step": 531 }, { "epoch": 4.256, "grad_norm": 56.22026443481445, "learning_rate": 2.1040000000000002e-05, "loss": 1.6513, "step": 532 }, { "epoch": 4.264, "grad_norm": 93.7584457397461, "learning_rate": 2.1079999999999998e-05, "loss": 1.7091, "step": 533 }, { "epoch": 4.272, "grad_norm": 91.20612335205078, "learning_rate": 2.112e-05, "loss": 2.2315, "step": 534 }, { "epoch": 4.28, "grad_norm": 25.922332763671875, "learning_rate": 2.116e-05, "loss": 2.3387, "step": 535 }, { "epoch": 4.288, "grad_norm": 77.85942077636719, "learning_rate": 2.12e-05, "loss": 1.9625, "step": 536 }, { "epoch": 4.296, "grad_norm": 57.410011291503906, "learning_rate": 2.124e-05, "loss": 1.9786, "step": 537 }, { "epoch": 4.304, "grad_norm": 68.6552963256836, "learning_rate": 2.128e-05, "loss": 1.8423, "step": 538 }, { "epoch": 4.312, "grad_norm": 39.10639953613281, "learning_rate": 2.1320000000000003e-05, "loss": 2.2998, "step": 539 }, { "epoch": 4.32, "grad_norm": 149.67198181152344, "learning_rate": 2.1360000000000002e-05, "loss": 1.9732, "step": 540 }, { "epoch": 4.328, "grad_norm": 34.52737808227539, "learning_rate": 2.1400000000000002e-05, "loss": 1.8131, "step": 541 }, { "epoch": 4.336, "grad_norm": 41.854862213134766, "learning_rate": 2.144e-05, "loss": 1.6819, "step": 542 }, { "epoch": 4.344, "grad_norm": 235.53233337402344, "learning_rate": 2.148e-05, "loss": 2.3574, "step": 543 }, { "epoch": 4.352, "grad_norm": 47.67107009887695, "learning_rate": 2.152e-05, "loss": 2.5212, "step": 544 }, { "epoch": 4.36, "grad_norm": 85.93669128417969, "learning_rate": 2.1560000000000004e-05, "loss": 2.1373, "step": 545 }, { "epoch": 4.368, "grad_norm": 86.72074890136719, "learning_rate": 2.16e-05, "loss": 1.8802, "step": 546 }, { "epoch": 4.376, "grad_norm": 58.07320785522461, "learning_rate": 2.1640000000000003e-05, "loss": 1.5375, "step": 547 }, { "epoch": 4.384, "grad_norm": 76.33931732177734, "learning_rate": 2.168e-05, "loss": 1.8824, "step": 548 }, { "epoch": 4.392, "grad_norm": 21.281105041503906, "learning_rate": 2.1720000000000002e-05, "loss": 1.8328, "step": 549 }, { "epoch": 4.4, "grad_norm": 44.512054443359375, "learning_rate": 2.176e-05, "loss": 2.3897, "step": 550 }, { "epoch": 4.408, "grad_norm": 33.80000686645508, "learning_rate": 2.18e-05, "loss": 2.1384, "step": 551 }, { "epoch": 4.416, "grad_norm": 106.264892578125, "learning_rate": 2.184e-05, "loss": 2.2906, "step": 552 }, { "epoch": 4.424, "grad_norm": 75.8896484375, "learning_rate": 2.188e-05, "loss": 2.0255, "step": 553 }, { "epoch": 4.432, "grad_norm": 56.707279205322266, "learning_rate": 2.192e-05, "loss": 1.9487, "step": 554 }, { "epoch": 4.44, "grad_norm": 127.5597915649414, "learning_rate": 2.196e-05, "loss": 1.4722, "step": 555 }, { "epoch": 4.448, "grad_norm": 85.55433654785156, "learning_rate": 2.2000000000000003e-05, "loss": 1.9349, "step": 556 }, { "epoch": 4.456, "grad_norm": 54.9367790222168, "learning_rate": 2.2040000000000002e-05, "loss": 1.6907, "step": 557 }, { "epoch": 4.464, "grad_norm": 144.7183380126953, "learning_rate": 2.2080000000000002e-05, "loss": 2.04, "step": 558 }, { "epoch": 4.4719999999999995, "grad_norm": 178.67376708984375, "learning_rate": 2.212e-05, "loss": 2.4213, "step": 559 }, { "epoch": 4.48, "grad_norm": 94.20587921142578, "learning_rate": 2.216e-05, "loss": 2.1176, "step": 560 }, { "epoch": 4.4879999999999995, "grad_norm": 800.4739379882812, "learning_rate": 2.22e-05, "loss": 3.507, "step": 561 }, { "epoch": 4.496, "grad_norm": 94.29956817626953, "learning_rate": 2.224e-05, "loss": 1.6827, "step": 562 }, { "epoch": 4.504, "grad_norm": 65.41382598876953, "learning_rate": 2.228e-05, "loss": 1.7732, "step": 563 }, { "epoch": 4.5120000000000005, "grad_norm": 93.53425598144531, "learning_rate": 2.2320000000000003e-05, "loss": 2.6208, "step": 564 }, { "epoch": 4.52, "grad_norm": 116.33560943603516, "learning_rate": 2.236e-05, "loss": 1.8739, "step": 565 }, { "epoch": 4.5280000000000005, "grad_norm": 90.45199584960938, "learning_rate": 2.2400000000000002e-05, "loss": 1.9647, "step": 566 }, { "epoch": 4.536, "grad_norm": 123.10913848876953, "learning_rate": 2.244e-05, "loss": 2.3764, "step": 567 }, { "epoch": 4.5440000000000005, "grad_norm": 183.09007263183594, "learning_rate": 2.248e-05, "loss": 2.5382, "step": 568 }, { "epoch": 4.552, "grad_norm": 208.2724609375, "learning_rate": 2.252e-05, "loss": 1.9033, "step": 569 }, { "epoch": 4.5600000000000005, "grad_norm": 32.76677322387695, "learning_rate": 2.256e-05, "loss": 1.8983, "step": 570 }, { "epoch": 4.568, "grad_norm": 110.23979187011719, "learning_rate": 2.26e-05, "loss": 1.9568, "step": 571 }, { "epoch": 4.576, "grad_norm": 65.74504089355469, "learning_rate": 2.264e-05, "loss": 1.7115, "step": 572 }, { "epoch": 4.584, "grad_norm": 38.89997863769531, "learning_rate": 2.268e-05, "loss": 1.8678, "step": 573 }, { "epoch": 4.592, "grad_norm": 79.4405288696289, "learning_rate": 2.2720000000000003e-05, "loss": 1.9565, "step": 574 }, { "epoch": 4.6, "grad_norm": 46.95753860473633, "learning_rate": 2.2760000000000002e-05, "loss": 1.3596, "step": 575 }, { "epoch": 4.608, "grad_norm": 77.14911651611328, "learning_rate": 2.2800000000000002e-05, "loss": 1.8879, "step": 576 }, { "epoch": 4.616, "grad_norm": 33.61161422729492, "learning_rate": 2.284e-05, "loss": 1.8085, "step": 577 }, { "epoch": 4.624, "grad_norm": 170.61602783203125, "learning_rate": 2.288e-05, "loss": 1.9572, "step": 578 }, { "epoch": 4.632, "grad_norm": 46.01333999633789, "learning_rate": 2.292e-05, "loss": 1.7009, "step": 579 }, { "epoch": 4.64, "grad_norm": 37.49150085449219, "learning_rate": 2.296e-05, "loss": 2.0882, "step": 580 }, { "epoch": 4.648, "grad_norm": 57.92176055908203, "learning_rate": 2.3000000000000003e-05, "loss": 1.9569, "step": 581 }, { "epoch": 4.656, "grad_norm": 48.382102966308594, "learning_rate": 2.304e-05, "loss": 2.327, "step": 582 }, { "epoch": 4.664, "grad_norm": 56.68610763549805, "learning_rate": 2.3080000000000003e-05, "loss": 2.0894, "step": 583 }, { "epoch": 4.672, "grad_norm": 64.96151733398438, "learning_rate": 2.312e-05, "loss": 2.1185, "step": 584 }, { "epoch": 4.68, "grad_norm": 189.9112548828125, "learning_rate": 2.3160000000000002e-05, "loss": 2.0488, "step": 585 }, { "epoch": 4.688, "grad_norm": 79.50534057617188, "learning_rate": 2.32e-05, "loss": 1.6033, "step": 586 }, { "epoch": 4.696, "grad_norm": 55.46330261230469, "learning_rate": 2.324e-05, "loss": 2.1228, "step": 587 }, { "epoch": 4.704, "grad_norm": 51.73411560058594, "learning_rate": 2.328e-05, "loss": 2.04, "step": 588 }, { "epoch": 4.712, "grad_norm": 42.52873611450195, "learning_rate": 2.332e-05, "loss": 1.8758, "step": 589 }, { "epoch": 4.72, "grad_norm": 64.91312408447266, "learning_rate": 2.336e-05, "loss": 2.4674, "step": 590 }, { "epoch": 4.728, "grad_norm": 213.42320251464844, "learning_rate": 2.3400000000000003e-05, "loss": 1.7408, "step": 591 }, { "epoch": 4.736, "grad_norm": 46.19402313232422, "learning_rate": 2.344e-05, "loss": 2.0617, "step": 592 }, { "epoch": 4.744, "grad_norm": 50.92826843261719, "learning_rate": 2.3480000000000002e-05, "loss": 1.6339, "step": 593 }, { "epoch": 4.752, "grad_norm": 28.262563705444336, "learning_rate": 2.3520000000000002e-05, "loss": 1.8065, "step": 594 }, { "epoch": 4.76, "grad_norm": 116.0830307006836, "learning_rate": 2.356e-05, "loss": 2.6692, "step": 595 }, { "epoch": 4.768, "grad_norm": 56.94371795654297, "learning_rate": 2.36e-05, "loss": 2.2028, "step": 596 }, { "epoch": 4.776, "grad_norm": 23.76677703857422, "learning_rate": 2.364e-05, "loss": 1.7302, "step": 597 }, { "epoch": 4.784, "grad_norm": 67.30767822265625, "learning_rate": 2.3680000000000004e-05, "loss": 1.61, "step": 598 }, { "epoch": 4.792, "grad_norm": 41.801856994628906, "learning_rate": 2.372e-05, "loss": 1.8674, "step": 599 }, { "epoch": 4.8, "grad_norm": 43.82069778442383, "learning_rate": 2.3760000000000003e-05, "loss": 1.9204, "step": 600 }, { "epoch": 4.808, "grad_norm": 30.101524353027344, "learning_rate": 2.38e-05, "loss": 2.6659, "step": 601 }, { "epoch": 4.816, "grad_norm": 109.13784790039062, "learning_rate": 2.3840000000000002e-05, "loss": 1.8188, "step": 602 }, { "epoch": 4.824, "grad_norm": 46.21516799926758, "learning_rate": 2.3880000000000002e-05, "loss": 1.8691, "step": 603 }, { "epoch": 4.832, "grad_norm": 41.279911041259766, "learning_rate": 2.392e-05, "loss": 2.1273, "step": 604 }, { "epoch": 4.84, "grad_norm": 55.902488708496094, "learning_rate": 2.396e-05, "loss": 2.0267, "step": 605 }, { "epoch": 4.848, "grad_norm": 59.820838928222656, "learning_rate": 2.4e-05, "loss": 1.9557, "step": 606 }, { "epoch": 4.856, "grad_norm": 86.19668579101562, "learning_rate": 2.404e-05, "loss": 1.7668, "step": 607 }, { "epoch": 4.864, "grad_norm": 28.528017044067383, "learning_rate": 2.408e-05, "loss": 1.6596, "step": 608 }, { "epoch": 4.872, "grad_norm": 47.33735656738281, "learning_rate": 2.412e-05, "loss": 2.3817, "step": 609 }, { "epoch": 4.88, "grad_norm": 97.68360900878906, "learning_rate": 2.4160000000000002e-05, "loss": 2.0873, "step": 610 }, { "epoch": 4.888, "grad_norm": 47.924476623535156, "learning_rate": 2.4200000000000002e-05, "loss": 1.6372, "step": 611 }, { "epoch": 4.896, "grad_norm": 232.7704315185547, "learning_rate": 2.4240000000000002e-05, "loss": 2.1177, "step": 612 }, { "epoch": 4.904, "grad_norm": 81.91019439697266, "learning_rate": 2.428e-05, "loss": 2.1823, "step": 613 }, { "epoch": 4.912, "grad_norm": 41.14436340332031, "learning_rate": 2.432e-05, "loss": 1.9079, "step": 614 }, { "epoch": 4.92, "grad_norm": 115.11103057861328, "learning_rate": 2.4360000000000004e-05, "loss": 1.3515, "step": 615 }, { "epoch": 4.928, "grad_norm": 68.58828735351562, "learning_rate": 2.44e-05, "loss": 2.0445, "step": 616 }, { "epoch": 4.936, "grad_norm": 35.59090805053711, "learning_rate": 2.4440000000000003e-05, "loss": 1.855, "step": 617 }, { "epoch": 4.944, "grad_norm": 42.57370376586914, "learning_rate": 2.448e-05, "loss": 1.6701, "step": 618 }, { "epoch": 4.952, "grad_norm": 1172.6253662109375, "learning_rate": 2.4520000000000002e-05, "loss": 2.8861, "step": 619 }, { "epoch": 4.96, "grad_norm": 132.1989288330078, "learning_rate": 2.4560000000000002e-05, "loss": 2.0963, "step": 620 }, { "epoch": 4.968, "grad_norm": 63.86048126220703, "learning_rate": 2.46e-05, "loss": 2.1212, "step": 621 }, { "epoch": 4.976, "grad_norm": 285.2614440917969, "learning_rate": 2.464e-05, "loss": 2.4567, "step": 622 }, { "epoch": 4.984, "grad_norm": 196.5530242919922, "learning_rate": 2.468e-05, "loss": 2.0756, "step": 623 }, { "epoch": 4.992, "grad_norm": 45.42646408081055, "learning_rate": 2.472e-05, "loss": 2.9623, "step": 624 }, { "epoch": 5.0, "grad_norm": 71.72699737548828, "learning_rate": 2.476e-05, "loss": 2.1086, "step": 625 }, { "epoch": 5.0, "eval_loss": 2.115382671356201, "eval_map": 0.1016, "eval_map_50": 0.1897, "eval_map_75": 0.0832, "eval_map_Coverall": 0.3934, "eval_map_Face_Shield": 0.0, "eval_map_Gloves": 0.0709, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.044, "eval_map_large": 0.1016, "eval_map_medium": 0.0314, "eval_map_small": 0.045, "eval_mar_1": 0.1129, "eval_mar_10": 0.223, "eval_mar_100": 0.2612, "eval_mar_100_Coverall": 0.72, "eval_mar_100_Face_Shield": 0.0, "eval_mar_100_Gloves": 0.2574, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.3288, "eval_mar_large": 0.2167, "eval_mar_medium": 0.1507, "eval_mar_small": 0.0826, "eval_runtime": 2.4395, "eval_samples_per_second": 11.888, "eval_steps_per_second": 0.82, "step": 625 }, { "epoch": 5.008, "grad_norm": 100.96086120605469, "learning_rate": 2.48e-05, "loss": 1.5348, "step": 626 }, { "epoch": 5.016, "grad_norm": 37.400848388671875, "learning_rate": 2.4840000000000003e-05, "loss": 1.761, "step": 627 }, { "epoch": 5.024, "grad_norm": 83.28484344482422, "learning_rate": 2.488e-05, "loss": 2.0042, "step": 628 }, { "epoch": 5.032, "grad_norm": 95.26739501953125, "learning_rate": 2.4920000000000002e-05, "loss": 1.9826, "step": 629 }, { "epoch": 5.04, "grad_norm": 114.31099700927734, "learning_rate": 2.496e-05, "loss": 1.7136, "step": 630 }, { "epoch": 5.048, "grad_norm": 114.0384750366211, "learning_rate": 2.5e-05, "loss": 1.4939, "step": 631 }, { "epoch": 5.056, "grad_norm": 53.58332061767578, "learning_rate": 2.504e-05, "loss": 1.4847, "step": 632 }, { "epoch": 5.064, "grad_norm": 32.61345672607422, "learning_rate": 2.5080000000000004e-05, "loss": 1.8904, "step": 633 }, { "epoch": 5.072, "grad_norm": 103.49251556396484, "learning_rate": 2.512e-05, "loss": 1.8552, "step": 634 }, { "epoch": 5.08, "grad_norm": 57.66875457763672, "learning_rate": 2.516e-05, "loss": 2.0577, "step": 635 }, { "epoch": 5.088, "grad_norm": 87.6977310180664, "learning_rate": 2.5200000000000003e-05, "loss": 1.7739, "step": 636 }, { "epoch": 5.096, "grad_norm": 36.98555374145508, "learning_rate": 2.5240000000000002e-05, "loss": 2.5033, "step": 637 }, { "epoch": 5.104, "grad_norm": 38.020668029785156, "learning_rate": 2.5280000000000005e-05, "loss": 1.5991, "step": 638 }, { "epoch": 5.112, "grad_norm": 678.7229614257812, "learning_rate": 2.5319999999999998e-05, "loss": 1.8902, "step": 639 }, { "epoch": 5.12, "grad_norm": 36.180179595947266, "learning_rate": 2.536e-05, "loss": 1.9766, "step": 640 }, { "epoch": 5.128, "grad_norm": 56.610172271728516, "learning_rate": 2.54e-05, "loss": 1.9261, "step": 641 }, { "epoch": 5.136, "grad_norm": 53.01420974731445, "learning_rate": 2.5440000000000004e-05, "loss": 2.3259, "step": 642 }, { "epoch": 5.144, "grad_norm": 61.25532913208008, "learning_rate": 2.5480000000000003e-05, "loss": 1.7844, "step": 643 }, { "epoch": 5.152, "grad_norm": 66.14318084716797, "learning_rate": 2.552e-05, "loss": 1.8547, "step": 644 }, { "epoch": 5.16, "grad_norm": 47.84146499633789, "learning_rate": 2.556e-05, "loss": 2.0839, "step": 645 }, { "epoch": 5.168, "grad_norm": 47.28257369995117, "learning_rate": 2.5600000000000002e-05, "loss": 2.0789, "step": 646 }, { "epoch": 5.176, "grad_norm": 57.30153274536133, "learning_rate": 2.5640000000000002e-05, "loss": 1.9627, "step": 647 }, { "epoch": 5.184, "grad_norm": 39.52972412109375, "learning_rate": 2.5679999999999998e-05, "loss": 2.4175, "step": 648 }, { "epoch": 5.192, "grad_norm": 75.2556381225586, "learning_rate": 2.572e-05, "loss": 2.0646, "step": 649 }, { "epoch": 5.2, "grad_norm": 138.92861938476562, "learning_rate": 2.576e-05, "loss": 2.352, "step": 650 }, { "epoch": 5.208, "grad_norm": 32.16020584106445, "learning_rate": 2.58e-05, "loss": 1.8737, "step": 651 }, { "epoch": 5.216, "grad_norm": 26.541704177856445, "learning_rate": 2.5840000000000003e-05, "loss": 2.6962, "step": 652 }, { "epoch": 5.224, "grad_norm": 59.88684844970703, "learning_rate": 2.588e-05, "loss": 1.9565, "step": 653 }, { "epoch": 5.232, "grad_norm": NaN, "learning_rate": 2.588e-05, "loss": 1.7086, "step": 654 }, { "epoch": 5.24, "grad_norm": 67.49646759033203, "learning_rate": 2.592e-05, "loss": 2.003, "step": 655 }, { "epoch": 5.248, "grad_norm": 63.23044967651367, "learning_rate": 2.5960000000000002e-05, "loss": 2.0379, "step": 656 }, { "epoch": 5.256, "grad_norm": 82.37909698486328, "learning_rate": 2.6000000000000002e-05, "loss": 1.6791, "step": 657 }, { "epoch": 5.264, "grad_norm": 34.796321868896484, "learning_rate": 2.6040000000000005e-05, "loss": 3.0469, "step": 658 }, { "epoch": 5.272, "grad_norm": 55.65940475463867, "learning_rate": 2.6079999999999998e-05, "loss": 1.768, "step": 659 }, { "epoch": 5.28, "grad_norm": 72.32231140136719, "learning_rate": 2.612e-05, "loss": 1.5865, "step": 660 }, { "epoch": 5.288, "grad_norm": 53.01453399658203, "learning_rate": 2.616e-05, "loss": 2.2534, "step": 661 }, { "epoch": 5.296, "grad_norm": 96.03971862792969, "learning_rate": 2.6200000000000003e-05, "loss": 2.5796, "step": 662 }, { "epoch": 5.304, "grad_norm": 89.77415466308594, "learning_rate": 2.6240000000000003e-05, "loss": 1.5631, "step": 663 }, { "epoch": 5.312, "grad_norm": 55.34154510498047, "learning_rate": 2.628e-05, "loss": 1.713, "step": 664 }, { "epoch": 5.32, "grad_norm": 43.34373474121094, "learning_rate": 2.632e-05, "loss": 2.158, "step": 665 }, { "epoch": 5.328, "grad_norm": 37.51273727416992, "learning_rate": 2.6360000000000002e-05, "loss": 2.0608, "step": 666 }, { "epoch": 5.336, "grad_norm": 71.1447982788086, "learning_rate": 2.64e-05, "loss": 1.816, "step": 667 }, { "epoch": 5.344, "grad_norm": 40.754878997802734, "learning_rate": 2.6440000000000004e-05, "loss": 2.1153, "step": 668 }, { "epoch": 5.352, "grad_norm": 66.25487518310547, "learning_rate": 2.648e-05, "loss": 1.7573, "step": 669 }, { "epoch": 5.36, "grad_norm": 73.14037322998047, "learning_rate": 2.652e-05, "loss": 2.0236, "step": 670 }, { "epoch": 5.368, "grad_norm": 55.108436584472656, "learning_rate": 2.6560000000000003e-05, "loss": 1.7006, "step": 671 }, { "epoch": 5.376, "grad_norm": 34.314857482910156, "learning_rate": 2.6600000000000003e-05, "loss": 1.9436, "step": 672 }, { "epoch": 5.384, "grad_norm": 35.01896286010742, "learning_rate": 2.6640000000000002e-05, "loss": 2.3385, "step": 673 }, { "epoch": 5.392, "grad_norm": 35.424163818359375, "learning_rate": 2.668e-05, "loss": 2.0586, "step": 674 }, { "epoch": 5.4, "grad_norm": 67.09303283691406, "learning_rate": 2.672e-05, "loss": 2.2533, "step": 675 }, { "epoch": 5.408, "grad_norm": 195.28477478027344, "learning_rate": 2.676e-05, "loss": 2.5729, "step": 676 }, { "epoch": 5.416, "grad_norm": 39.1812629699707, "learning_rate": 2.6800000000000004e-05, "loss": 1.6982, "step": 677 }, { "epoch": 5.424, "grad_norm": 67.39041900634766, "learning_rate": 2.6840000000000004e-05, "loss": 2.0437, "step": 678 }, { "epoch": 5.432, "grad_norm": 94.2929458618164, "learning_rate": 2.688e-05, "loss": 2.1173, "step": 679 }, { "epoch": 5.44, "grad_norm": 60.04130935668945, "learning_rate": 2.692e-05, "loss": 1.9551, "step": 680 }, { "epoch": 5.448, "grad_norm": 110.22356414794922, "learning_rate": 2.6960000000000003e-05, "loss": 1.791, "step": 681 }, { "epoch": 5.456, "grad_norm": 416.2041931152344, "learning_rate": 2.7000000000000002e-05, "loss": 2.3304, "step": 682 }, { "epoch": 5.464, "grad_norm": 132.83456420898438, "learning_rate": 2.704e-05, "loss": 1.9538, "step": 683 }, { "epoch": 5.4719999999999995, "grad_norm": 53.86763000488281, "learning_rate": 2.7079999999999998e-05, "loss": 1.9051, "step": 684 }, { "epoch": 5.48, "grad_norm": 56.2573356628418, "learning_rate": 2.712e-05, "loss": 2.1503, "step": 685 }, { "epoch": 5.4879999999999995, "grad_norm": 78.24242401123047, "learning_rate": 2.716e-05, "loss": 2.3773, "step": 686 }, { "epoch": 5.496, "grad_norm": 239.912353515625, "learning_rate": 2.7200000000000004e-05, "loss": 2.392, "step": 687 }, { "epoch": 5.504, "grad_norm": 33.721107482910156, "learning_rate": 2.724e-05, "loss": 2.2867, "step": 688 }, { "epoch": 5.5120000000000005, "grad_norm": 460.2245788574219, "learning_rate": 2.728e-05, "loss": 2.8137, "step": 689 }, { "epoch": 5.52, "grad_norm": 64.40107727050781, "learning_rate": 2.7320000000000003e-05, "loss": 1.7652, "step": 690 }, { "epoch": 5.5280000000000005, "grad_norm": 67.24168395996094, "learning_rate": 2.7360000000000002e-05, "loss": 2.2497, "step": 691 }, { "epoch": 5.536, "grad_norm": 321.380126953125, "learning_rate": 2.7400000000000002e-05, "loss": 1.9993, "step": 692 }, { "epoch": 5.5440000000000005, "grad_norm": 45.96550369262695, "learning_rate": 2.7439999999999998e-05, "loss": 2.8254, "step": 693 }, { "epoch": 5.552, "grad_norm": 40.67262268066406, "learning_rate": 2.748e-05, "loss": 1.4854, "step": 694 }, { "epoch": 5.5600000000000005, "grad_norm": 138.13134765625, "learning_rate": 2.752e-05, "loss": 2.541, "step": 695 }, { "epoch": 5.568, "grad_norm": 66.082275390625, "learning_rate": 2.7560000000000004e-05, "loss": 2.2738, "step": 696 }, { "epoch": 5.576, "grad_norm": 123.2436294555664, "learning_rate": 2.7600000000000003e-05, "loss": 2.1833, "step": 697 }, { "epoch": 5.584, "grad_norm": 53.49134063720703, "learning_rate": 2.764e-05, "loss": 2.1813, "step": 698 }, { "epoch": 5.592, "grad_norm": 113.68708038330078, "learning_rate": 2.768e-05, "loss": 1.7683, "step": 699 }, { "epoch": 5.6, "grad_norm": 61.2253303527832, "learning_rate": 2.7720000000000002e-05, "loss": 1.5781, "step": 700 }, { "epoch": 5.608, "grad_norm": 74.19840240478516, "learning_rate": 2.7760000000000002e-05, "loss": 1.7478, "step": 701 }, { "epoch": 5.616, "grad_norm": 48.844085693359375, "learning_rate": 2.7800000000000005e-05, "loss": 2.0018, "step": 702 }, { "epoch": 5.624, "grad_norm": 141.95913696289062, "learning_rate": 2.7839999999999998e-05, "loss": 1.8908, "step": 703 }, { "epoch": 5.632, "grad_norm": 70.16959381103516, "learning_rate": 2.788e-05, "loss": 1.968, "step": 704 }, { "epoch": 5.64, "grad_norm": 65.78260040283203, "learning_rate": 2.792e-05, "loss": 1.6748, "step": 705 }, { "epoch": 5.648, "grad_norm": 44.14716720581055, "learning_rate": 2.7960000000000003e-05, "loss": 2.1526, "step": 706 }, { "epoch": 5.656, "grad_norm": 44.41889572143555, "learning_rate": 2.8000000000000003e-05, "loss": 1.9866, "step": 707 }, { "epoch": 5.664, "grad_norm": 535.6444702148438, "learning_rate": 2.804e-05, "loss": 1.5102, "step": 708 }, { "epoch": 5.672, "grad_norm": 50.256866455078125, "learning_rate": 2.8080000000000002e-05, "loss": 2.4582, "step": 709 }, { "epoch": 5.68, "grad_norm": 43.96841812133789, "learning_rate": 2.8120000000000002e-05, "loss": 2.0046, "step": 710 }, { "epoch": 5.688, "grad_norm": 30.944812774658203, "learning_rate": 2.816e-05, "loss": 2.7059, "step": 711 }, { "epoch": 5.696, "grad_norm": 81.64017486572266, "learning_rate": 2.8199999999999998e-05, "loss": 1.8159, "step": 712 }, { "epoch": 5.704, "grad_norm": 232.30589294433594, "learning_rate": 2.824e-05, "loss": 2.1658, "step": 713 }, { "epoch": 5.712, "grad_norm": 31.29986572265625, "learning_rate": 2.828e-05, "loss": 1.5344, "step": 714 }, { "epoch": 5.72, "grad_norm": 57.97893142700195, "learning_rate": 2.8320000000000003e-05, "loss": 1.5438, "step": 715 }, { "epoch": 5.728, "grad_norm": 89.06587219238281, "learning_rate": 2.8360000000000003e-05, "loss": 1.5286, "step": 716 }, { "epoch": 5.736, "grad_norm": 399.02825927734375, "learning_rate": 2.84e-05, "loss": 1.6229, "step": 717 }, { "epoch": 5.744, "grad_norm": 70.0469741821289, "learning_rate": 2.844e-05, "loss": 2.1828, "step": 718 }, { "epoch": 5.752, "grad_norm": 46.6120491027832, "learning_rate": 2.8480000000000002e-05, "loss": 2.0946, "step": 719 }, { "epoch": 5.76, "grad_norm": 33.19400405883789, "learning_rate": 2.852e-05, "loss": 1.8109, "step": 720 }, { "epoch": 5.768, "grad_norm": 85.71753692626953, "learning_rate": 2.8560000000000004e-05, "loss": 2.3016, "step": 721 }, { "epoch": 5.776, "grad_norm": 161.98867797851562, "learning_rate": 2.86e-05, "loss": 2.0964, "step": 722 }, { "epoch": 5.784, "grad_norm": 34.39268493652344, "learning_rate": 2.864e-05, "loss": 2.389, "step": 723 }, { "epoch": 5.792, "grad_norm": 106.89543151855469, "learning_rate": 2.868e-05, "loss": 1.701, "step": 724 }, { "epoch": 5.8, "grad_norm": 117.99754333496094, "learning_rate": 2.8720000000000003e-05, "loss": 1.6346, "step": 725 }, { "epoch": 5.808, "grad_norm": 69.65553283691406, "learning_rate": 2.8760000000000002e-05, "loss": 3.5634, "step": 726 }, { "epoch": 5.816, "grad_norm": 115.25811004638672, "learning_rate": 2.88e-05, "loss": 1.7316, "step": 727 }, { "epoch": 5.824, "grad_norm": 135.9453125, "learning_rate": 2.8840000000000002e-05, "loss": 1.4709, "step": 728 }, { "epoch": 5.832, "grad_norm": 30.42026138305664, "learning_rate": 2.888e-05, "loss": 1.7636, "step": 729 }, { "epoch": 5.84, "grad_norm": 43.09831619262695, "learning_rate": 2.8920000000000004e-05, "loss": 1.5719, "step": 730 }, { "epoch": 5.848, "grad_norm": 40.176631927490234, "learning_rate": 2.8960000000000004e-05, "loss": 2.1209, "step": 731 }, { "epoch": 5.856, "grad_norm": 44.875614166259766, "learning_rate": 2.9e-05, "loss": 2.5626, "step": 732 }, { "epoch": 5.864, "grad_norm": 67.84209442138672, "learning_rate": 2.904e-05, "loss": 1.6419, "step": 733 }, { "epoch": 5.872, "grad_norm": 93.65338897705078, "learning_rate": 2.9080000000000003e-05, "loss": 2.0679, "step": 734 }, { "epoch": 5.88, "grad_norm": 179.48097229003906, "learning_rate": 2.9120000000000002e-05, "loss": 1.8993, "step": 735 }, { "epoch": 5.888, "grad_norm": 55.735595703125, "learning_rate": 2.9160000000000005e-05, "loss": 2.3934, "step": 736 }, { "epoch": 5.896, "grad_norm": 83.3863525390625, "learning_rate": 2.9199999999999998e-05, "loss": 2.1094, "step": 737 }, { "epoch": 5.904, "grad_norm": 57.49477005004883, "learning_rate": 2.924e-05, "loss": 2.0337, "step": 738 }, { "epoch": 5.912, "grad_norm": 66.31400299072266, "learning_rate": 2.928e-05, "loss": 2.0573, "step": 739 }, { "epoch": 5.92, "grad_norm": 67.07948303222656, "learning_rate": 2.9320000000000004e-05, "loss": 2.1104, "step": 740 }, { "epoch": 5.928, "grad_norm": 79.35523223876953, "learning_rate": 2.9360000000000003e-05, "loss": 2.5469, "step": 741 }, { "epoch": 5.936, "grad_norm": 44.89119338989258, "learning_rate": 2.94e-05, "loss": 1.7076, "step": 742 }, { "epoch": 5.944, "grad_norm": 49.35426712036133, "learning_rate": 2.944e-05, "loss": 2.4499, "step": 743 }, { "epoch": 5.952, "grad_norm": 60.17871856689453, "learning_rate": 2.9480000000000002e-05, "loss": 2.6319, "step": 744 }, { "epoch": 5.96, "grad_norm": 28.282245635986328, "learning_rate": 2.9520000000000002e-05, "loss": 1.756, "step": 745 }, { "epoch": 5.968, "grad_norm": 54.728694915771484, "learning_rate": 2.9559999999999998e-05, "loss": 2.1325, "step": 746 }, { "epoch": 5.976, "grad_norm": 48.61167526245117, "learning_rate": 2.96e-05, "loss": 1.9999, "step": 747 }, { "epoch": 5.984, "grad_norm": 35.608253479003906, "learning_rate": 2.964e-05, "loss": 2.3775, "step": 748 }, { "epoch": 5.992, "grad_norm": 36.86285400390625, "learning_rate": 2.9680000000000004e-05, "loss": 2.0193, "step": 749 }, { "epoch": 6.0, "grad_norm": 56.16119384765625, "learning_rate": 2.9720000000000003e-05, "loss": 1.6887, "step": 750 }, { "epoch": 6.0, "eval_loss": 2.026926040649414, "eval_map": 0.0912, "eval_map_50": 0.2122, "eval_map_75": 0.0668, "eval_map_Coverall": 0.3344, "eval_map_Face_Shield": 0.0, "eval_map_Gloves": 0.071, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.0508, "eval_map_large": 0.1176, "eval_map_medium": 0.0334, "eval_map_small": 0.0154, "eval_mar_1": 0.0877, "eval_mar_10": 0.2065, "eval_mar_100": 0.2218, "eval_mar_100_Coverall": 0.6444, "eval_mar_100_Face_Shield": 0.0, "eval_mar_100_Gloves": 0.1951, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.2692, "eval_mar_large": 0.2119, "eval_mar_medium": 0.1319, "eval_mar_small": 0.0249, "eval_runtime": 2.4565, "eval_samples_per_second": 11.805, "eval_steps_per_second": 0.814, "step": 750 }, { "epoch": 6.008, "grad_norm": 37.00269317626953, "learning_rate": 2.976e-05, "loss": 1.6787, "step": 751 }, { "epoch": 6.016, "grad_norm": 52.034202575683594, "learning_rate": 2.98e-05, "loss": 1.6443, "step": 752 }, { "epoch": 6.024, "grad_norm": 61.87123489379883, "learning_rate": 2.9840000000000002e-05, "loss": 1.7499, "step": 753 }, { "epoch": 6.032, "grad_norm": 260.2122497558594, "learning_rate": 2.9880000000000002e-05, "loss": 2.0281, "step": 754 }, { "epoch": 6.04, "grad_norm": 41.391021728515625, "learning_rate": 2.9920000000000005e-05, "loss": 1.8311, "step": 755 }, { "epoch": 6.048, "grad_norm": 37.87995529174805, "learning_rate": 2.9959999999999998e-05, "loss": 2.6857, "step": 756 }, { "epoch": 6.056, "grad_norm": 49.072513580322266, "learning_rate": 3e-05, "loss": 1.7276, "step": 757 }, { "epoch": 6.064, "grad_norm": 97.03428649902344, "learning_rate": 3.004e-05, "loss": 2.475, "step": 758 }, { "epoch": 6.072, "grad_norm": 281.4495849609375, "learning_rate": 3.0080000000000003e-05, "loss": 1.7587, "step": 759 }, { "epoch": 6.08, "grad_norm": 55.782588958740234, "learning_rate": 3.0120000000000003e-05, "loss": 1.8349, "step": 760 }, { "epoch": 6.088, "grad_norm": 65.4983139038086, "learning_rate": 3.016e-05, "loss": 2.2351, "step": 761 }, { "epoch": 6.096, "grad_norm": 33.71940612792969, "learning_rate": 3.02e-05, "loss": 1.9379, "step": 762 }, { "epoch": 6.104, "grad_norm": 98.54595947265625, "learning_rate": 3.0240000000000002e-05, "loss": 1.9802, "step": 763 }, { "epoch": 6.112, "grad_norm": 29.977455139160156, "learning_rate": 3.028e-05, "loss": 1.8764, "step": 764 }, { "epoch": 6.12, "grad_norm": 28.744855880737305, "learning_rate": 3.0320000000000004e-05, "loss": 2.1889, "step": 765 }, { "epoch": 6.128, "grad_norm": 56.34071350097656, "learning_rate": 3.036e-05, "loss": 2.0035, "step": 766 }, { "epoch": 6.136, "grad_norm": 50.181121826171875, "learning_rate": 3.04e-05, "loss": 1.3447, "step": 767 }, { "epoch": 6.144, "grad_norm": 48.741355895996094, "learning_rate": 3.0440000000000003e-05, "loss": 1.8007, "step": 768 }, { "epoch": 6.152, "grad_norm": 36.77521514892578, "learning_rate": 3.0480000000000003e-05, "loss": 1.6063, "step": 769 }, { "epoch": 6.16, "grad_norm": 61.04232406616211, "learning_rate": 3.0520000000000006e-05, "loss": 1.627, "step": 770 }, { "epoch": 6.168, "grad_norm": 110.06324005126953, "learning_rate": 3.056e-05, "loss": 2.396, "step": 771 }, { "epoch": 6.176, "grad_norm": 51.019752502441406, "learning_rate": 3.06e-05, "loss": 2.4565, "step": 772 }, { "epoch": 6.184, "grad_norm": 29.793045043945312, "learning_rate": 3.0640000000000005e-05, "loss": 1.68, "step": 773 }, { "epoch": 6.192, "grad_norm": 37.41502380371094, "learning_rate": 3.0680000000000004e-05, "loss": 2.2311, "step": 774 }, { "epoch": 6.2, "grad_norm": 56.29119873046875, "learning_rate": 3.072e-05, "loss": 1.9617, "step": 775 }, { "epoch": 6.208, "grad_norm": 50.467586517333984, "learning_rate": 3.076e-05, "loss": 1.73, "step": 776 }, { "epoch": 6.216, "grad_norm": 24.65096092224121, "learning_rate": 3.08e-05, "loss": 1.7147, "step": 777 }, { "epoch": 6.224, "grad_norm": 38.175254821777344, "learning_rate": 3.084e-05, "loss": 1.7508, "step": 778 }, { "epoch": 6.232, "grad_norm": 44.5760498046875, "learning_rate": 3.088e-05, "loss": 1.9436, "step": 779 }, { "epoch": 6.24, "grad_norm": 34.32352828979492, "learning_rate": 3.092e-05, "loss": 2.0105, "step": 780 }, { "epoch": 6.248, "grad_norm": 44.269920349121094, "learning_rate": 3.096e-05, "loss": 2.397, "step": 781 }, { "epoch": 6.256, "grad_norm": 27.11491584777832, "learning_rate": 3.1e-05, "loss": 1.5246, "step": 782 }, { "epoch": 6.264, "grad_norm": 33.385040283203125, "learning_rate": 3.104e-05, "loss": 1.8861, "step": 783 }, { "epoch": 6.272, "grad_norm": 27.721155166625977, "learning_rate": 3.108e-05, "loss": 2.3294, "step": 784 }, { "epoch": 6.28, "grad_norm": 74.89378356933594, "learning_rate": 3.112e-05, "loss": 1.3986, "step": 785 }, { "epoch": 6.288, "grad_norm": 30.124258041381836, "learning_rate": 3.116e-05, "loss": 2.2774, "step": 786 }, { "epoch": 6.296, "grad_norm": 104.47352600097656, "learning_rate": 3.12e-05, "loss": 1.6925, "step": 787 }, { "epoch": 6.304, "grad_norm": 28.549802780151367, "learning_rate": 3.1240000000000006e-05, "loss": 1.5174, "step": 788 }, { "epoch": 6.312, "grad_norm": 53.36457061767578, "learning_rate": 3.1280000000000005e-05, "loss": 1.8814, "step": 789 }, { "epoch": 6.32, "grad_norm": 38.45892333984375, "learning_rate": 3.132e-05, "loss": 1.975, "step": 790 }, { "epoch": 6.328, "grad_norm": 40.72148513793945, "learning_rate": 3.136e-05, "loss": 1.8055, "step": 791 }, { "epoch": 6.336, "grad_norm": 229.41111755371094, "learning_rate": 3.1400000000000004e-05, "loss": 1.9735, "step": 792 }, { "epoch": 6.344, "grad_norm": 31.23679542541504, "learning_rate": 3.1440000000000004e-05, "loss": 2.0317, "step": 793 }, { "epoch": 6.352, "grad_norm": 35.87202835083008, "learning_rate": 3.1480000000000004e-05, "loss": 1.428, "step": 794 }, { "epoch": 6.36, "grad_norm": 39.77350616455078, "learning_rate": 3.1519999999999996e-05, "loss": 2.0026, "step": 795 }, { "epoch": 6.368, "grad_norm": 69.70037078857422, "learning_rate": 3.156e-05, "loss": 2.272, "step": 796 }, { "epoch": 6.376, "grad_norm": 394.16070556640625, "learning_rate": 3.16e-05, "loss": 1.4847, "step": 797 }, { "epoch": 6.384, "grad_norm": 49.553985595703125, "learning_rate": 3.164e-05, "loss": 1.4156, "step": 798 }, { "epoch": 6.392, "grad_norm": 53.36570358276367, "learning_rate": 3.168e-05, "loss": 1.9614, "step": 799 }, { "epoch": 6.4, "grad_norm": 158.03587341308594, "learning_rate": 3.172e-05, "loss": 1.6053, "step": 800 }, { "epoch": 6.408, "grad_norm": 57.58442306518555, "learning_rate": 3.176e-05, "loss": 2.2114, "step": 801 }, { "epoch": 6.416, "grad_norm": 47.22075271606445, "learning_rate": 3.18e-05, "loss": 1.6455, "step": 802 }, { "epoch": 6.424, "grad_norm": 35.765098571777344, "learning_rate": 3.184e-05, "loss": 1.7163, "step": 803 }, { "epoch": 6.432, "grad_norm": 73.9415283203125, "learning_rate": 3.188e-05, "loss": 2.0281, "step": 804 }, { "epoch": 6.44, "grad_norm": 45.32304763793945, "learning_rate": 3.192e-05, "loss": 1.7758, "step": 805 }, { "epoch": 6.448, "grad_norm": 33.15341567993164, "learning_rate": 3.196e-05, "loss": 1.4819, "step": 806 }, { "epoch": 6.456, "grad_norm": 47.914363861083984, "learning_rate": 3.2000000000000005e-05, "loss": 1.7034, "step": 807 }, { "epoch": 6.464, "grad_norm": 47.035884857177734, "learning_rate": 3.2040000000000005e-05, "loss": 1.5323, "step": 808 }, { "epoch": 6.4719999999999995, "grad_norm": 50.204402923583984, "learning_rate": 3.208e-05, "loss": 2.0916, "step": 809 }, { "epoch": 6.48, "grad_norm": 78.0243911743164, "learning_rate": 3.212e-05, "loss": 2.3924, "step": 810 }, { "epoch": 6.4879999999999995, "grad_norm": 40.25130844116211, "learning_rate": 3.2160000000000004e-05, "loss": 1.7525, "step": 811 }, { "epoch": 6.496, "grad_norm": 280.83184814453125, "learning_rate": 3.2200000000000003e-05, "loss": 1.6169, "step": 812 }, { "epoch": 6.504, "grad_norm": 33.60234832763672, "learning_rate": 3.224e-05, "loss": 1.6261, "step": 813 }, { "epoch": 6.5120000000000005, "grad_norm": 70.78062438964844, "learning_rate": 3.2279999999999996e-05, "loss": 2.0529, "step": 814 }, { "epoch": 6.52, "grad_norm": 104.69824981689453, "learning_rate": 3.232e-05, "loss": 1.8286, "step": 815 }, { "epoch": 6.5280000000000005, "grad_norm": 39.926513671875, "learning_rate": 3.236e-05, "loss": 2.8486, "step": 816 }, { "epoch": 6.536, "grad_norm": 24.085594177246094, "learning_rate": 3.24e-05, "loss": 1.8675, "step": 817 }, { "epoch": 6.5440000000000005, "grad_norm": 27.58967399597168, "learning_rate": 3.244e-05, "loss": 1.4647, "step": 818 }, { "epoch": 6.552, "grad_norm": 36.66757583618164, "learning_rate": 3.248e-05, "loss": 1.2768, "step": 819 }, { "epoch": 6.5600000000000005, "grad_norm": 346.6940612792969, "learning_rate": 3.252e-05, "loss": 1.9605, "step": 820 }, { "epoch": 6.568, "grad_norm": 68.52937316894531, "learning_rate": 3.256e-05, "loss": 1.4437, "step": 821 }, { "epoch": 6.576, "grad_norm": 24.546598434448242, "learning_rate": 3.26e-05, "loss": 1.5018, "step": 822 }, { "epoch": 6.584, "grad_norm": 25.794708251953125, "learning_rate": 3.2640000000000006e-05, "loss": 1.6156, "step": 823 }, { "epoch": 6.592, "grad_norm": 27.41962242126465, "learning_rate": 3.268e-05, "loss": 1.5854, "step": 824 }, { "epoch": 6.6, "grad_norm": 34.994625091552734, "learning_rate": 3.272e-05, "loss": 2.2185, "step": 825 }, { "epoch": 6.608, "grad_norm": 23.28485107421875, "learning_rate": 3.2760000000000005e-05, "loss": 2.2038, "step": 826 }, { "epoch": 6.616, "grad_norm": 48.07870864868164, "learning_rate": 3.2800000000000004e-05, "loss": 1.2786, "step": 827 }, { "epoch": 6.624, "grad_norm": 32.225921630859375, "learning_rate": 3.2840000000000004e-05, "loss": 1.9265, "step": 828 }, { "epoch": 6.632, "grad_norm": 180.84640502929688, "learning_rate": 3.288e-05, "loss": 1.7042, "step": 829 }, { "epoch": 6.64, "grad_norm": 34.063072204589844, "learning_rate": 3.292e-05, "loss": 1.9538, "step": 830 }, { "epoch": 6.648, "grad_norm": 34.278587341308594, "learning_rate": 3.296e-05, "loss": 2.0022, "step": 831 }, { "epoch": 6.656, "grad_norm": 34.92667007446289, "learning_rate": 3.3e-05, "loss": 1.4918, "step": 832 }, { "epoch": 6.664, "grad_norm": 67.8975601196289, "learning_rate": 3.304e-05, "loss": 1.2802, "step": 833 }, { "epoch": 6.672, "grad_norm": 38.56691360473633, "learning_rate": 3.308e-05, "loss": 1.5629, "step": 834 }, { "epoch": 6.68, "grad_norm": 359.6405029296875, "learning_rate": 3.312e-05, "loss": 1.6529, "step": 835 }, { "epoch": 6.688, "grad_norm": 50.31883239746094, "learning_rate": 3.316e-05, "loss": 1.6835, "step": 836 }, { "epoch": 6.696, "grad_norm": 68.89288330078125, "learning_rate": 3.32e-05, "loss": 1.3653, "step": 837 }, { "epoch": 6.704, "grad_norm": 33.226131439208984, "learning_rate": 3.324e-05, "loss": 1.3256, "step": 838 }, { "epoch": 6.712, "grad_norm": 30.462921142578125, "learning_rate": 3.328e-05, "loss": 1.6727, "step": 839 }, { "epoch": 6.72, "grad_norm": 36.80095672607422, "learning_rate": 3.332e-05, "loss": 1.6279, "step": 840 }, { "epoch": 6.728, "grad_norm": 45.80922317504883, "learning_rate": 3.336e-05, "loss": 1.6932, "step": 841 }, { "epoch": 6.736, "grad_norm": 52.63370895385742, "learning_rate": 3.3400000000000005e-05, "loss": 1.528, "step": 842 }, { "epoch": 6.744, "grad_norm": 22.091928482055664, "learning_rate": 3.344e-05, "loss": 1.8878, "step": 843 }, { "epoch": 6.752, "grad_norm": 78.81912231445312, "learning_rate": 3.348e-05, "loss": 1.8102, "step": 844 }, { "epoch": 6.76, "grad_norm": 26.308622360229492, "learning_rate": 3.3520000000000004e-05, "loss": 1.4495, "step": 845 }, { "epoch": 6.768, "grad_norm": 70.02446746826172, "learning_rate": 3.3560000000000004e-05, "loss": 1.9649, "step": 846 }, { "epoch": 6.776, "grad_norm": 48.36985778808594, "learning_rate": 3.3600000000000004e-05, "loss": 2.4729, "step": 847 }, { "epoch": 6.784, "grad_norm": 64.89358520507812, "learning_rate": 3.3639999999999996e-05, "loss": 1.7615, "step": 848 }, { "epoch": 6.792, "grad_norm": 103.97611999511719, "learning_rate": 3.368e-05, "loss": 2.6963, "step": 849 }, { "epoch": 6.8, "grad_norm": 78.02941131591797, "learning_rate": 3.372e-05, "loss": 1.9794, "step": 850 }, { "epoch": 6.808, "grad_norm": 46.15909194946289, "learning_rate": 3.376e-05, "loss": 1.5916, "step": 851 }, { "epoch": 6.816, "grad_norm": 59.04781723022461, "learning_rate": 3.38e-05, "loss": 1.5902, "step": 852 }, { "epoch": 6.824, "grad_norm": 41.23713302612305, "learning_rate": 3.384e-05, "loss": 1.8113, "step": 853 }, { "epoch": 6.832, "grad_norm": 202.64974975585938, "learning_rate": 3.388e-05, "loss": 2.3346, "step": 854 }, { "epoch": 6.84, "grad_norm": 30.949831008911133, "learning_rate": 3.392e-05, "loss": 2.0405, "step": 855 }, { "epoch": 6.848, "grad_norm": 177.3353271484375, "learning_rate": 3.396e-05, "loss": 1.9104, "step": 856 }, { "epoch": 6.856, "grad_norm": 19.615352630615234, "learning_rate": 3.4000000000000007e-05, "loss": 1.6045, "step": 857 }, { "epoch": 6.864, "grad_norm": 59.836631774902344, "learning_rate": 3.404e-05, "loss": 1.3134, "step": 858 }, { "epoch": 6.872, "grad_norm": 18.840330123901367, "learning_rate": 3.408e-05, "loss": 2.4407, "step": 859 }, { "epoch": 6.88, "grad_norm": 37.49502944946289, "learning_rate": 3.412e-05, "loss": 1.5375, "step": 860 }, { "epoch": 6.888, "grad_norm": 46.22768020629883, "learning_rate": 3.4160000000000005e-05, "loss": 1.7155, "step": 861 }, { "epoch": 6.896, "grad_norm": 105.55973815917969, "learning_rate": 3.4200000000000005e-05, "loss": 2.0019, "step": 862 }, { "epoch": 6.904, "grad_norm": 20.27342987060547, "learning_rate": 3.424e-05, "loss": 2.1291, "step": 863 }, { "epoch": 6.912, "grad_norm": 49.382083892822266, "learning_rate": 3.4280000000000004e-05, "loss": 3.1574, "step": 864 }, { "epoch": 6.92, "grad_norm": 136.2052459716797, "learning_rate": 3.4320000000000003e-05, "loss": 1.6212, "step": 865 }, { "epoch": 6.928, "grad_norm": 33.64103698730469, "learning_rate": 3.436e-05, "loss": 1.7119, "step": 866 }, { "epoch": 6.936, "grad_norm": 27.418832778930664, "learning_rate": 3.4399999999999996e-05, "loss": 2.354, "step": 867 }, { "epoch": 6.944, "grad_norm": 20.91653823852539, "learning_rate": 3.444e-05, "loss": 1.6524, "step": 868 }, { "epoch": 6.952, "grad_norm": 32.768394470214844, "learning_rate": 3.448e-05, "loss": 1.733, "step": 869 }, { "epoch": 6.96, "grad_norm": 34.04643630981445, "learning_rate": 3.452e-05, "loss": 1.4773, "step": 870 }, { "epoch": 6.968, "grad_norm": 58.492759704589844, "learning_rate": 3.456e-05, "loss": 3.3553, "step": 871 }, { "epoch": 6.976, "grad_norm": 69.8660659790039, "learning_rate": 3.46e-05, "loss": 1.6898, "step": 872 }, { "epoch": 6.984, "grad_norm": 27.034568786621094, "learning_rate": 3.464e-05, "loss": 1.773, "step": 873 }, { "epoch": 6.992, "grad_norm": 134.58984375, "learning_rate": 3.468e-05, "loss": 1.4996, "step": 874 }, { "epoch": 7.0, "grad_norm": 30.29158592224121, "learning_rate": 3.472e-05, "loss": 1.2908, "step": 875 }, { "epoch": 7.0, "eval_loss": 1.8998314142227173, "eval_map": 0.0885, "eval_map_50": 0.1926, "eval_map_75": 0.0654, "eval_map_Coverall": 0.3367, "eval_map_Face_Shield": 0.0, "eval_map_Gloves": 0.0621, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.0437, "eval_map_large": 0.1175, "eval_map_medium": 0.0298, "eval_map_small": 0.0263, "eval_mar_1": 0.0956, "eval_mar_10": 0.2123, "eval_mar_100": 0.2234, "eval_mar_100_Coverall": 0.6356, "eval_mar_100_Face_Shield": 0.0, "eval_mar_100_Gloves": 0.2393, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.2423, "eval_mar_large": 0.207, "eval_mar_medium": 0.1367, "eval_mar_small": 0.0373, "eval_runtime": 2.4632, "eval_samples_per_second": 11.773, "eval_steps_per_second": 0.812, "step": 875 }, { "epoch": 7.008, "grad_norm": 47.4344482421875, "learning_rate": 3.4760000000000006e-05, "loss": 1.8925, "step": 876 }, { "epoch": 7.016, "grad_norm": 44.47873306274414, "learning_rate": 3.48e-05, "loss": 1.8632, "step": 877 }, { "epoch": 7.024, "grad_norm": 20.894041061401367, "learning_rate": 3.484e-05, "loss": 1.6381, "step": 878 }, { "epoch": 7.032, "grad_norm": 74.47354125976562, "learning_rate": 3.4880000000000005e-05, "loss": 1.4072, "step": 879 }, { "epoch": 7.04, "grad_norm": 23.064950942993164, "learning_rate": 3.4920000000000004e-05, "loss": 1.6069, "step": 880 }, { "epoch": 7.048, "grad_norm": 30.225820541381836, "learning_rate": 3.4960000000000004e-05, "loss": 1.7162, "step": 881 }, { "epoch": 7.056, "grad_norm": 55.147727966308594, "learning_rate": 3.5e-05, "loss": 1.5473, "step": 882 }, { "epoch": 7.064, "grad_norm": 39.45255661010742, "learning_rate": 3.504e-05, "loss": 1.5799, "step": 883 }, { "epoch": 7.072, "grad_norm": 45.714759826660156, "learning_rate": 3.508e-05, "loss": 2.0089, "step": 884 }, { "epoch": 7.08, "grad_norm": 82.72161865234375, "learning_rate": 3.512e-05, "loss": 1.5701, "step": 885 }, { "epoch": 7.088, "grad_norm": 42.97380828857422, "learning_rate": 3.516e-05, "loss": 1.6005, "step": 886 }, { "epoch": 7.096, "grad_norm": 30.271081924438477, "learning_rate": 3.52e-05, "loss": 1.5783, "step": 887 }, { "epoch": 7.104, "grad_norm": 33.89234161376953, "learning_rate": 3.524e-05, "loss": 1.8445, "step": 888 }, { "epoch": 7.112, "grad_norm": 32.68315505981445, "learning_rate": 3.528e-05, "loss": 1.4487, "step": 889 }, { "epoch": 7.12, "grad_norm": 60.22013473510742, "learning_rate": 3.532e-05, "loss": 1.4212, "step": 890 }, { "epoch": 7.128, "grad_norm": 18.583972930908203, "learning_rate": 3.536000000000001e-05, "loss": 1.4401, "step": 891 }, { "epoch": 7.136, "grad_norm": 29.127405166625977, "learning_rate": 3.54e-05, "loss": 1.462, "step": 892 }, { "epoch": 7.144, "grad_norm": 207.1803436279297, "learning_rate": 3.544e-05, "loss": 1.6891, "step": 893 }, { "epoch": 7.152, "grad_norm": 38.94334411621094, "learning_rate": 3.548e-05, "loss": 1.3524, "step": 894 }, { "epoch": 7.16, "grad_norm": 40.51328659057617, "learning_rate": 3.5520000000000006e-05, "loss": 1.4459, "step": 895 }, { "epoch": 7.168, "grad_norm": 48.84016799926758, "learning_rate": 3.5560000000000005e-05, "loss": 2.0899, "step": 896 }, { "epoch": 7.176, "grad_norm": 18.950902938842773, "learning_rate": 3.56e-05, "loss": 1.9922, "step": 897 }, { "epoch": 7.184, "grad_norm": 49.916847229003906, "learning_rate": 3.5640000000000004e-05, "loss": 1.626, "step": 898 }, { "epoch": 7.192, "grad_norm": 39.90169906616211, "learning_rate": 3.5680000000000004e-05, "loss": 1.6655, "step": 899 }, { "epoch": 7.2, "grad_norm": 30.10798454284668, "learning_rate": 3.5720000000000004e-05, "loss": 1.5768, "step": 900 }, { "epoch": 7.208, "grad_norm": 54.10240936279297, "learning_rate": 3.5759999999999996e-05, "loss": 1.6214, "step": 901 }, { "epoch": 7.216, "grad_norm": 105.29471588134766, "learning_rate": 3.58e-05, "loss": 1.9208, "step": 902 }, { "epoch": 7.224, "grad_norm": 35.83061981201172, "learning_rate": 3.584e-05, "loss": 1.8139, "step": 903 }, { "epoch": 7.232, "grad_norm": 40.128684997558594, "learning_rate": 3.588e-05, "loss": 1.7973, "step": 904 }, { "epoch": 7.24, "grad_norm": 72.54622650146484, "learning_rate": 3.592e-05, "loss": 1.656, "step": 905 }, { "epoch": 7.248, "grad_norm": 48.57894515991211, "learning_rate": 3.596e-05, "loss": 1.732, "step": 906 }, { "epoch": 7.256, "grad_norm": 34.783878326416016, "learning_rate": 3.6e-05, "loss": 1.9715, "step": 907 }, { "epoch": 7.264, "grad_norm": 45.945770263671875, "learning_rate": 3.604e-05, "loss": 1.712, "step": 908 }, { "epoch": 7.272, "grad_norm": 67.51863861083984, "learning_rate": 3.608e-05, "loss": 2.3981, "step": 909 }, { "epoch": 7.28, "grad_norm": 41.718101501464844, "learning_rate": 3.6120000000000007e-05, "loss": 1.5854, "step": 910 }, { "epoch": 7.288, "grad_norm": 32.507537841796875, "learning_rate": 3.616e-05, "loss": 1.1995, "step": 911 }, { "epoch": 7.296, "grad_norm": 27.511390686035156, "learning_rate": 3.62e-05, "loss": 2.0798, "step": 912 }, { "epoch": 7.304, "grad_norm": 151.48312377929688, "learning_rate": 3.624e-05, "loss": 1.9064, "step": 913 }, { "epoch": 7.312, "grad_norm": 59.865848541259766, "learning_rate": 3.6280000000000005e-05, "loss": 1.6076, "step": 914 }, { "epoch": 7.32, "grad_norm": 38.97804641723633, "learning_rate": 3.6320000000000005e-05, "loss": 1.3557, "step": 915 }, { "epoch": 7.328, "grad_norm": 40.548221588134766, "learning_rate": 3.636e-05, "loss": 1.756, "step": 916 }, { "epoch": 7.336, "grad_norm": 38.4267463684082, "learning_rate": 3.6400000000000004e-05, "loss": 1.847, "step": 917 }, { "epoch": 7.344, "grad_norm": 39.472679138183594, "learning_rate": 3.6440000000000003e-05, "loss": 1.2061, "step": 918 }, { "epoch": 7.352, "grad_norm": 38.166629791259766, "learning_rate": 3.648e-05, "loss": 1.429, "step": 919 }, { "epoch": 7.36, "grad_norm": 149.19168090820312, "learning_rate": 3.652e-05, "loss": 1.6558, "step": 920 }, { "epoch": 7.368, "grad_norm": 30.753847122192383, "learning_rate": 3.656e-05, "loss": 1.4747, "step": 921 }, { "epoch": 7.376, "grad_norm": 29.177013397216797, "learning_rate": 3.66e-05, "loss": 1.5601, "step": 922 }, { "epoch": 7.384, "grad_norm": 27.800640106201172, "learning_rate": 3.664e-05, "loss": 1.5839, "step": 923 }, { "epoch": 7.392, "grad_norm": 31.303089141845703, "learning_rate": 3.668e-05, "loss": 1.6412, "step": 924 }, { "epoch": 7.4, "grad_norm": 42.72087860107422, "learning_rate": 3.672000000000001e-05, "loss": 1.6152, "step": 925 }, { "epoch": 7.408, "grad_norm": 43.43278121948242, "learning_rate": 3.676e-05, "loss": 1.5797, "step": 926 }, { "epoch": 7.416, "grad_norm": 25.667875289916992, "learning_rate": 3.68e-05, "loss": 1.9665, "step": 927 }, { "epoch": 7.424, "grad_norm": 42.89405059814453, "learning_rate": 3.684e-05, "loss": 1.6918, "step": 928 }, { "epoch": 7.432, "grad_norm": 26.142980575561523, "learning_rate": 3.6880000000000006e-05, "loss": 1.1909, "step": 929 }, { "epoch": 7.44, "grad_norm": 28.227840423583984, "learning_rate": 3.692e-05, "loss": 1.3053, "step": 930 }, { "epoch": 7.448, "grad_norm": 42.96062469482422, "learning_rate": 3.696e-05, "loss": 1.6565, "step": 931 }, { "epoch": 7.456, "grad_norm": 43.218082427978516, "learning_rate": 3.7e-05, "loss": 1.98, "step": 932 }, { "epoch": 7.464, "grad_norm": 31.887794494628906, "learning_rate": 3.7040000000000005e-05, "loss": 2.0422, "step": 933 }, { "epoch": 7.4719999999999995, "grad_norm": 57.45805358886719, "learning_rate": 3.7080000000000004e-05, "loss": 1.8979, "step": 934 }, { "epoch": 7.48, "grad_norm": 27.358945846557617, "learning_rate": 3.712e-05, "loss": 1.9795, "step": 935 }, { "epoch": 7.4879999999999995, "grad_norm": 53.63346862792969, "learning_rate": 3.716e-05, "loss": 2.0296, "step": 936 }, { "epoch": 7.496, "grad_norm": 30.836729049682617, "learning_rate": 3.72e-05, "loss": 1.955, "step": 937 }, { "epoch": 7.504, "grad_norm": 102.89934539794922, "learning_rate": 3.724e-05, "loss": 1.5264, "step": 938 }, { "epoch": 7.5120000000000005, "grad_norm": 18.538991928100586, "learning_rate": 3.728e-05, "loss": 1.7551, "step": 939 }, { "epoch": 7.52, "grad_norm": 41.52400207519531, "learning_rate": 3.732e-05, "loss": 1.6328, "step": 940 }, { "epoch": 7.5280000000000005, "grad_norm": 27.784481048583984, "learning_rate": 3.736e-05, "loss": 2.3019, "step": 941 }, { "epoch": 7.536, "grad_norm": 67.23844146728516, "learning_rate": 3.74e-05, "loss": 1.5731, "step": 942 }, { "epoch": 7.5440000000000005, "grad_norm": 52.992610931396484, "learning_rate": 3.744e-05, "loss": 1.7094, "step": 943 }, { "epoch": 7.552, "grad_norm": 62.55741500854492, "learning_rate": 3.748000000000001e-05, "loss": 3.5473, "step": 944 }, { "epoch": 7.5600000000000005, "grad_norm": 134.02951049804688, "learning_rate": 3.752e-05, "loss": 1.7355, "step": 945 }, { "epoch": 7.568, "grad_norm": 79.98090362548828, "learning_rate": 3.756e-05, "loss": 1.7182, "step": 946 }, { "epoch": 7.576, "grad_norm": 135.75270080566406, "learning_rate": 3.76e-05, "loss": 2.1224, "step": 947 }, { "epoch": 7.584, "grad_norm": 29.803491592407227, "learning_rate": 3.7640000000000006e-05, "loss": 1.7856, "step": 948 }, { "epoch": 7.592, "grad_norm": 38.162662506103516, "learning_rate": 3.7680000000000005e-05, "loss": 1.7637, "step": 949 }, { "epoch": 7.6, "grad_norm": 56.51226043701172, "learning_rate": 3.772e-05, "loss": 1.4497, "step": 950 }, { "epoch": 7.608, "grad_norm": 58.04060363769531, "learning_rate": 3.776e-05, "loss": 2.0482, "step": 951 }, { "epoch": 7.616, "grad_norm": 35.6201171875, "learning_rate": 3.7800000000000004e-05, "loss": 1.4509, "step": 952 }, { "epoch": 7.624, "grad_norm": 59.43657684326172, "learning_rate": 3.7840000000000004e-05, "loss": 2.7095, "step": 953 }, { "epoch": 7.632, "grad_norm": 38.30772018432617, "learning_rate": 3.788e-05, "loss": 1.4304, "step": 954 }, { "epoch": 7.64, "grad_norm": 31.413480758666992, "learning_rate": 3.792e-05, "loss": 3.0652, "step": 955 }, { "epoch": 7.648, "grad_norm": 67.28840637207031, "learning_rate": 3.796e-05, "loss": 1.7567, "step": 956 }, { "epoch": 7.656, "grad_norm": 32.37002182006836, "learning_rate": 3.8e-05, "loss": 1.7667, "step": 957 }, { "epoch": 7.664, "grad_norm": 40.286285400390625, "learning_rate": 3.804e-05, "loss": 1.5812, "step": 958 }, { "epoch": 7.672, "grad_norm": 45.39670944213867, "learning_rate": 3.808e-05, "loss": 1.713, "step": 959 }, { "epoch": 7.68, "grad_norm": 55.57414627075195, "learning_rate": 3.812e-05, "loss": 1.9777, "step": 960 }, { "epoch": 7.688, "grad_norm": 30.062345504760742, "learning_rate": 3.816e-05, "loss": 1.9371, "step": 961 }, { "epoch": 7.696, "grad_norm": 91.90543365478516, "learning_rate": 3.82e-05, "loss": 1.9719, "step": 962 }, { "epoch": 7.704, "grad_norm": 25.928958892822266, "learning_rate": 3.8240000000000007e-05, "loss": 2.0121, "step": 963 }, { "epoch": 7.712, "grad_norm": 77.87445068359375, "learning_rate": 3.828e-05, "loss": 1.3824, "step": 964 }, { "epoch": 7.72, "grad_norm": 28.16839027404785, "learning_rate": 3.832e-05, "loss": 1.727, "step": 965 }, { "epoch": 7.728, "grad_norm": 81.16009521484375, "learning_rate": 3.836e-05, "loss": 1.6444, "step": 966 }, { "epoch": 7.736, "grad_norm": 17.24000358581543, "learning_rate": 3.8400000000000005e-05, "loss": 2.176, "step": 967 }, { "epoch": 7.744, "grad_norm": 20.20825958251953, "learning_rate": 3.8440000000000005e-05, "loss": 1.1214, "step": 968 }, { "epoch": 7.752, "grad_norm": 23.37070655822754, "learning_rate": 3.848e-05, "loss": 2.0379, "step": 969 }, { "epoch": 7.76, "grad_norm": 42.037906646728516, "learning_rate": 3.8520000000000004e-05, "loss": 1.7764, "step": 970 }, { "epoch": 7.768, "grad_norm": 183.8042755126953, "learning_rate": 3.8560000000000004e-05, "loss": 1.5349, "step": 971 }, { "epoch": 7.776, "grad_norm": 51.63947677612305, "learning_rate": 3.86e-05, "loss": 1.6337, "step": 972 }, { "epoch": 7.784, "grad_norm": 87.78170013427734, "learning_rate": 3.864e-05, "loss": 1.85, "step": 973 }, { "epoch": 7.792, "grad_norm": 155.0977020263672, "learning_rate": 3.868e-05, "loss": 1.8272, "step": 974 }, { "epoch": 7.8, "grad_norm": 34.60445022583008, "learning_rate": 3.872e-05, "loss": 2.3702, "step": 975 }, { "epoch": 7.808, "grad_norm": 30.64291763305664, "learning_rate": 3.876e-05, "loss": 2.0267, "step": 976 }, { "epoch": 7.816, "grad_norm": 357.1103515625, "learning_rate": 3.88e-05, "loss": 1.6919, "step": 977 }, { "epoch": 7.824, "grad_norm": 35.580204010009766, "learning_rate": 3.884e-05, "loss": 1.5908, "step": 978 }, { "epoch": 7.832, "grad_norm": 72.58586883544922, "learning_rate": 3.888e-05, "loss": 1.4529, "step": 979 }, { "epoch": 7.84, "grad_norm": 268.4779052734375, "learning_rate": 3.892e-05, "loss": 1.5971, "step": 980 }, { "epoch": 7.848, "grad_norm": 54.2479133605957, "learning_rate": 3.896e-05, "loss": 1.5486, "step": 981 }, { "epoch": 7.856, "grad_norm": 38.66508865356445, "learning_rate": 3.9000000000000006e-05, "loss": 1.9034, "step": 982 }, { "epoch": 7.864, "grad_norm": 43.80899429321289, "learning_rate": 3.9040000000000006e-05, "loss": 1.6559, "step": 983 }, { "epoch": 7.872, "grad_norm": 33.006134033203125, "learning_rate": 3.908e-05, "loss": 1.8262, "step": 984 }, { "epoch": 7.88, "grad_norm": 44.47646713256836, "learning_rate": 3.912e-05, "loss": 1.8179, "step": 985 }, { "epoch": 7.888, "grad_norm": 37.435707092285156, "learning_rate": 3.9160000000000005e-05, "loss": 2.0815, "step": 986 }, { "epoch": 7.896, "grad_norm": 30.157251358032227, "learning_rate": 3.9200000000000004e-05, "loss": 1.7394, "step": 987 }, { "epoch": 7.904, "grad_norm": 45.19995880126953, "learning_rate": 3.9240000000000004e-05, "loss": 1.5378, "step": 988 }, { "epoch": 7.912, "grad_norm": 28.39035987854004, "learning_rate": 3.9280000000000003e-05, "loss": 1.7125, "step": 989 }, { "epoch": 7.92, "grad_norm": 57.91436004638672, "learning_rate": 3.932e-05, "loss": 1.7837, "step": 990 }, { "epoch": 7.928, "grad_norm": 61.79312515258789, "learning_rate": 3.936e-05, "loss": 1.9813, "step": 991 }, { "epoch": 7.936, "grad_norm": 58.35183334350586, "learning_rate": 3.94e-05, "loss": 1.3195, "step": 992 }, { "epoch": 7.944, "grad_norm": 52.107757568359375, "learning_rate": 3.944e-05, "loss": 2.3043, "step": 993 }, { "epoch": 7.952, "grad_norm": 30.14992904663086, "learning_rate": 3.948e-05, "loss": 1.6567, "step": 994 }, { "epoch": 7.96, "grad_norm": 33.026695251464844, "learning_rate": 3.952e-05, "loss": 1.4411, "step": 995 }, { "epoch": 7.968, "grad_norm": 35.157867431640625, "learning_rate": 3.956e-05, "loss": 1.4638, "step": 996 }, { "epoch": 7.976, "grad_norm": 43.6325798034668, "learning_rate": 3.960000000000001e-05, "loss": 2.0347, "step": 997 }, { "epoch": 7.984, "grad_norm": 35.92350387573242, "learning_rate": 3.964e-05, "loss": 1.673, "step": 998 }, { "epoch": 7.992, "grad_norm": 54.867923736572266, "learning_rate": 3.968e-05, "loss": 1.7327, "step": 999 }, { "epoch": 8.0, "grad_norm": 36.39165496826172, "learning_rate": 3.972e-05, "loss": 1.5749, "step": 1000 }, { "epoch": 8.0, "eval_loss": 1.7741632461547852, "eval_map": 0.1502, "eval_map_50": 0.3005, "eval_map_75": 0.1163, "eval_map_Coverall": 0.4634, "eval_map_Face_Shield": 0.1045, "eval_map_Gloves": 0.0908, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.0925, "eval_map_large": 0.1845, "eval_map_medium": 0.0453, "eval_map_small": 0.0383, "eval_mar_1": 0.1326, "eval_mar_10": 0.2584, "eval_mar_100": 0.2748, "eval_mar_100_Coverall": 0.7133, "eval_mar_100_Face_Shield": 0.1412, "eval_mar_100_Gloves": 0.2656, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.2538, "eval_mar_large": 0.2813, "eval_mar_medium": 0.1249, "eval_mar_small": 0.0945, "eval_runtime": 2.4683, "eval_samples_per_second": 11.749, "eval_steps_per_second": 0.81, "step": 1000 }, { "epoch": 8.008, "grad_norm": 66.6650161743164, "learning_rate": 3.9760000000000006e-05, "loss": 1.7974, "step": 1001 }, { "epoch": 8.016, "grad_norm": 24.069700241088867, "learning_rate": 3.9800000000000005e-05, "loss": 1.9413, "step": 1002 }, { "epoch": 8.024, "grad_norm": 528.5872192382812, "learning_rate": 3.984e-05, "loss": 1.6176, "step": 1003 }, { "epoch": 8.032, "grad_norm": 35.021385192871094, "learning_rate": 3.988e-05, "loss": 2.3321, "step": 1004 }, { "epoch": 8.04, "grad_norm": 39.26355743408203, "learning_rate": 3.9920000000000004e-05, "loss": 1.767, "step": 1005 }, { "epoch": 8.048, "grad_norm": 73.69499206542969, "learning_rate": 3.9960000000000004e-05, "loss": 1.9372, "step": 1006 }, { "epoch": 8.056, "grad_norm": 53.97069549560547, "learning_rate": 4e-05, "loss": 1.5553, "step": 1007 }, { "epoch": 8.064, "grad_norm": 32.645179748535156, "learning_rate": 4.004e-05, "loss": 1.7968, "step": 1008 }, { "epoch": 8.072, "grad_norm": 47.23263168334961, "learning_rate": 4.008e-05, "loss": 1.8452, "step": 1009 }, { "epoch": 8.08, "grad_norm": 20.459716796875, "learning_rate": 4.012e-05, "loss": 1.7278, "step": 1010 }, { "epoch": 8.088, "grad_norm": 93.96044921875, "learning_rate": 4.016e-05, "loss": 1.9987, "step": 1011 }, { "epoch": 8.096, "grad_norm": 41.28712844848633, "learning_rate": 4.02e-05, "loss": 1.4535, "step": 1012 }, { "epoch": 8.104, "grad_norm": 40.5728645324707, "learning_rate": 4.024e-05, "loss": 1.6078, "step": 1013 }, { "epoch": 8.112, "grad_norm": 32.94222640991211, "learning_rate": 4.028e-05, "loss": 1.6518, "step": 1014 }, { "epoch": 8.12, "grad_norm": 33.99615478515625, "learning_rate": 4.032e-05, "loss": 1.6329, "step": 1015 }, { "epoch": 8.128, "grad_norm": 37.05217361450195, "learning_rate": 4.0360000000000007e-05, "loss": 1.944, "step": 1016 }, { "epoch": 8.136, "grad_norm": 30.24152946472168, "learning_rate": 4.0400000000000006e-05, "loss": 1.9372, "step": 1017 }, { "epoch": 8.144, "grad_norm": 28.103172302246094, "learning_rate": 4.044e-05, "loss": 1.7581, "step": 1018 }, { "epoch": 8.152, "grad_norm": 30.190780639648438, "learning_rate": 4.048e-05, "loss": 1.7373, "step": 1019 }, { "epoch": 8.16, "grad_norm": 41.4472541809082, "learning_rate": 4.0520000000000005e-05, "loss": 1.7203, "step": 1020 }, { "epoch": 8.168, "grad_norm": 24.095380783081055, "learning_rate": 4.0560000000000005e-05, "loss": 1.5679, "step": 1021 }, { "epoch": 8.176, "grad_norm": 54.51499557495117, "learning_rate": 4.0600000000000004e-05, "loss": 1.9151, "step": 1022 }, { "epoch": 8.184, "grad_norm": 35.895442962646484, "learning_rate": 4.064e-05, "loss": 1.467, "step": 1023 }, { "epoch": 8.192, "grad_norm": 33.756134033203125, "learning_rate": 4.0680000000000004e-05, "loss": 1.9084, "step": 1024 }, { "epoch": 8.2, "grad_norm": 131.79624938964844, "learning_rate": 4.072e-05, "loss": 1.5833, "step": 1025 }, { "epoch": 8.208, "grad_norm": 25.940467834472656, "learning_rate": 4.076e-05, "loss": 1.2165, "step": 1026 }, { "epoch": 8.216, "grad_norm": 27.924327850341797, "learning_rate": 4.08e-05, "loss": 1.4168, "step": 1027 }, { "epoch": 8.224, "grad_norm": 22.37205696105957, "learning_rate": 4.084e-05, "loss": 1.6689, "step": 1028 }, { "epoch": 8.232, "grad_norm": 33.517059326171875, "learning_rate": 4.088e-05, "loss": 1.7017, "step": 1029 }, { "epoch": 8.24, "grad_norm": 101.80235290527344, "learning_rate": 4.092e-05, "loss": 2.2809, "step": 1030 }, { "epoch": 8.248, "grad_norm": 32.00394058227539, "learning_rate": 4.096e-05, "loss": 1.5009, "step": 1031 }, { "epoch": 8.256, "grad_norm": 52.849586486816406, "learning_rate": 4.1e-05, "loss": 1.7388, "step": 1032 }, { "epoch": 8.264, "grad_norm": 24.23366928100586, "learning_rate": 4.104e-05, "loss": 1.6594, "step": 1033 }, { "epoch": 8.272, "grad_norm": 70.96336364746094, "learning_rate": 4.108e-05, "loss": 1.8525, "step": 1034 }, { "epoch": 8.28, "grad_norm": 35.937740325927734, "learning_rate": 4.1120000000000006e-05, "loss": 1.6464, "step": 1035 }, { "epoch": 8.288, "grad_norm": 30.451641082763672, "learning_rate": 4.1160000000000006e-05, "loss": 1.7638, "step": 1036 }, { "epoch": 8.296, "grad_norm": 40.47056579589844, "learning_rate": 4.12e-05, "loss": 2.1393, "step": 1037 }, { "epoch": 8.304, "grad_norm": 56.09366226196289, "learning_rate": 4.124e-05, "loss": 1.6239, "step": 1038 }, { "epoch": 8.312, "grad_norm": 23.95860481262207, "learning_rate": 4.1280000000000005e-05, "loss": 1.8853, "step": 1039 }, { "epoch": 8.32, "grad_norm": 46.34944534301758, "learning_rate": 4.1320000000000004e-05, "loss": 1.6069, "step": 1040 }, { "epoch": 8.328, "grad_norm": 46.233970642089844, "learning_rate": 4.1360000000000004e-05, "loss": 1.4775, "step": 1041 }, { "epoch": 8.336, "grad_norm": 37.54155731201172, "learning_rate": 4.14e-05, "loss": 2.1485, "step": 1042 }, { "epoch": 8.344, "grad_norm": 25.361961364746094, "learning_rate": 4.144e-05, "loss": 1.7033, "step": 1043 }, { "epoch": 8.352, "grad_norm": 25.03550148010254, "learning_rate": 4.148e-05, "loss": 1.5888, "step": 1044 }, { "epoch": 8.36, "grad_norm": 34.25558090209961, "learning_rate": 4.152e-05, "loss": 1.5767, "step": 1045 }, { "epoch": 8.368, "grad_norm": 55.45250701904297, "learning_rate": 4.156e-05, "loss": 1.39, "step": 1046 }, { "epoch": 8.376, "grad_norm": 22.730791091918945, "learning_rate": 4.16e-05, "loss": 2.0768, "step": 1047 }, { "epoch": 8.384, "grad_norm": 24.38344383239746, "learning_rate": 4.164e-05, "loss": 1.4756, "step": 1048 }, { "epoch": 8.392, "grad_norm": 82.42999267578125, "learning_rate": 4.168e-05, "loss": 1.2946, "step": 1049 }, { "epoch": 8.4, "grad_norm": 66.61372375488281, "learning_rate": 4.172e-05, "loss": 1.427, "step": 1050 }, { "epoch": 8.408, "grad_norm": 20.414756774902344, "learning_rate": 4.176000000000001e-05, "loss": 1.4824, "step": 1051 }, { "epoch": 8.416, "grad_norm": 46.838348388671875, "learning_rate": 4.18e-05, "loss": 1.414, "step": 1052 }, { "epoch": 8.424, "grad_norm": 16.47621726989746, "learning_rate": 4.184e-05, "loss": 1.3917, "step": 1053 }, { "epoch": 8.432, "grad_norm": 24.33968734741211, "learning_rate": 4.1880000000000006e-05, "loss": 1.7481, "step": 1054 }, { "epoch": 8.44, "grad_norm": 65.33405303955078, "learning_rate": 4.1920000000000005e-05, "loss": 1.9861, "step": 1055 }, { "epoch": 8.448, "grad_norm": 32.70745849609375, "learning_rate": 4.196e-05, "loss": 1.8059, "step": 1056 }, { "epoch": 8.456, "grad_norm": 31.917753219604492, "learning_rate": 4.2e-05, "loss": 1.6062, "step": 1057 }, { "epoch": 8.464, "grad_norm": 44.54636001586914, "learning_rate": 4.2040000000000004e-05, "loss": 1.516, "step": 1058 }, { "epoch": 8.472, "grad_norm": 51.77375411987305, "learning_rate": 4.2080000000000004e-05, "loss": 1.4032, "step": 1059 }, { "epoch": 8.48, "grad_norm": 30.577342987060547, "learning_rate": 4.212e-05, "loss": 1.6965, "step": 1060 }, { "epoch": 8.488, "grad_norm": 36.7684440612793, "learning_rate": 4.2159999999999996e-05, "loss": 1.1752, "step": 1061 }, { "epoch": 8.496, "grad_norm": 34.93931579589844, "learning_rate": 4.22e-05, "loss": 1.6112, "step": 1062 }, { "epoch": 8.504, "grad_norm": 26.884471893310547, "learning_rate": 4.224e-05, "loss": 1.6921, "step": 1063 }, { "epoch": 8.512, "grad_norm": 42.603515625, "learning_rate": 4.228e-05, "loss": 1.3449, "step": 1064 }, { "epoch": 8.52, "grad_norm": 48.5390625, "learning_rate": 4.232e-05, "loss": 2.4894, "step": 1065 }, { "epoch": 8.528, "grad_norm": 30.596040725708008, "learning_rate": 4.236e-05, "loss": 1.2284, "step": 1066 }, { "epoch": 8.536, "grad_norm": 47.68702697753906, "learning_rate": 4.24e-05, "loss": 1.4915, "step": 1067 }, { "epoch": 8.544, "grad_norm": 66.66346740722656, "learning_rate": 4.244e-05, "loss": 1.246, "step": 1068 }, { "epoch": 8.552, "grad_norm": 24.765411376953125, "learning_rate": 4.248e-05, "loss": 1.6113, "step": 1069 }, { "epoch": 8.56, "grad_norm": 58.8661994934082, "learning_rate": 4.2520000000000006e-05, "loss": 1.2473, "step": 1070 }, { "epoch": 8.568, "grad_norm": 28.36956024169922, "learning_rate": 4.256e-05, "loss": 1.5678, "step": 1071 }, { "epoch": 8.576, "grad_norm": 31.3658447265625, "learning_rate": 4.26e-05, "loss": 1.7041, "step": 1072 }, { "epoch": 8.584, "grad_norm": 44.01059341430664, "learning_rate": 4.2640000000000005e-05, "loss": 1.4021, "step": 1073 }, { "epoch": 8.592, "grad_norm": 68.8801040649414, "learning_rate": 4.2680000000000005e-05, "loss": 1.4278, "step": 1074 }, { "epoch": 8.6, "grad_norm": 82.04277038574219, "learning_rate": 4.2720000000000004e-05, "loss": 2.2082, "step": 1075 }, { "epoch": 8.608, "grad_norm": 22.641529083251953, "learning_rate": 4.276e-05, "loss": 2.0108, "step": 1076 }, { "epoch": 8.616, "grad_norm": 149.35231018066406, "learning_rate": 4.2800000000000004e-05, "loss": 1.3935, "step": 1077 }, { "epoch": 8.624, "grad_norm": 58.1551628112793, "learning_rate": 4.284e-05, "loss": 1.2403, "step": 1078 }, { "epoch": 8.632, "grad_norm": 32.97579574584961, "learning_rate": 4.288e-05, "loss": 1.5694, "step": 1079 }, { "epoch": 8.64, "grad_norm": 34.0920295715332, "learning_rate": 4.292e-05, "loss": 1.3486, "step": 1080 }, { "epoch": 8.648, "grad_norm": 28.659469604492188, "learning_rate": 4.296e-05, "loss": 1.5512, "step": 1081 }, { "epoch": 8.656, "grad_norm": 87.00048828125, "learning_rate": 4.3e-05, "loss": 2.1739, "step": 1082 }, { "epoch": 8.664, "grad_norm": 179.03179931640625, "learning_rate": 4.304e-05, "loss": 1.3304, "step": 1083 }, { "epoch": 8.672, "grad_norm": 33.203941345214844, "learning_rate": 4.308e-05, "loss": 1.8258, "step": 1084 }, { "epoch": 8.68, "grad_norm": 45.28661346435547, "learning_rate": 4.312000000000001e-05, "loss": 1.9004, "step": 1085 }, { "epoch": 8.688, "grad_norm": 63.150569915771484, "learning_rate": 4.316e-05, "loss": 1.8893, "step": 1086 }, { "epoch": 8.696, "grad_norm": 79.56739807128906, "learning_rate": 4.32e-05, "loss": 1.5202, "step": 1087 }, { "epoch": 8.704, "grad_norm": 27.167613983154297, "learning_rate": 4.324e-05, "loss": 1.8696, "step": 1088 }, { "epoch": 8.712, "grad_norm": 17.22864532470703, "learning_rate": 4.3280000000000006e-05, "loss": 1.6379, "step": 1089 }, { "epoch": 8.72, "grad_norm": 47.094966888427734, "learning_rate": 4.332e-05, "loss": 1.567, "step": 1090 }, { "epoch": 8.728, "grad_norm": 39.89628219604492, "learning_rate": 4.336e-05, "loss": 1.6262, "step": 1091 }, { "epoch": 8.736, "grad_norm": 25.6459903717041, "learning_rate": 4.3400000000000005e-05, "loss": 1.553, "step": 1092 }, { "epoch": 8.744, "grad_norm": 24.423330307006836, "learning_rate": 4.3440000000000004e-05, "loss": 1.6298, "step": 1093 }, { "epoch": 8.752, "grad_norm": 68.6763687133789, "learning_rate": 4.3480000000000004e-05, "loss": 1.4901, "step": 1094 }, { "epoch": 8.76, "grad_norm": 28.19183921813965, "learning_rate": 4.352e-05, "loss": 1.5415, "step": 1095 }, { "epoch": 8.768, "grad_norm": 26.14792251586914, "learning_rate": 4.356e-05, "loss": 2.4294, "step": 1096 }, { "epoch": 8.776, "grad_norm": 57.289581298828125, "learning_rate": 4.36e-05, "loss": 1.8867, "step": 1097 }, { "epoch": 8.784, "grad_norm": 27.538223266601562, "learning_rate": 4.364e-05, "loss": 1.2994, "step": 1098 }, { "epoch": 8.792, "grad_norm": 77.57711791992188, "learning_rate": 4.368e-05, "loss": 1.4106, "step": 1099 }, { "epoch": 8.8, "grad_norm": 24.999685287475586, "learning_rate": 4.372e-05, "loss": 1.2019, "step": 1100 }, { "epoch": 8.808, "grad_norm": 30.831369400024414, "learning_rate": 4.376e-05, "loss": 1.5888, "step": 1101 }, { "epoch": 8.816, "grad_norm": 87.1764907836914, "learning_rate": 4.38e-05, "loss": 1.8202, "step": 1102 }, { "epoch": 8.824, "grad_norm": 24.261428833007812, "learning_rate": 4.384e-05, "loss": 1.4268, "step": 1103 }, { "epoch": 8.832, "grad_norm": 107.11934661865234, "learning_rate": 4.388000000000001e-05, "loss": 1.7108, "step": 1104 }, { "epoch": 8.84, "grad_norm": 27.530845642089844, "learning_rate": 4.392e-05, "loss": 1.4198, "step": 1105 }, { "epoch": 8.848, "grad_norm": 31.605060577392578, "learning_rate": 4.396e-05, "loss": 2.965, "step": 1106 }, { "epoch": 8.856, "grad_norm": 31.422739028930664, "learning_rate": 4.4000000000000006e-05, "loss": 1.2453, "step": 1107 }, { "epoch": 8.864, "grad_norm": 16.70525550842285, "learning_rate": 4.4040000000000005e-05, "loss": 1.5469, "step": 1108 }, { "epoch": 8.872, "grad_norm": 33.1476936340332, "learning_rate": 4.4080000000000005e-05, "loss": 1.7837, "step": 1109 }, { "epoch": 8.88, "grad_norm": 28.167198181152344, "learning_rate": 4.412e-05, "loss": 1.338, "step": 1110 }, { "epoch": 8.888, "grad_norm": 19.688508987426758, "learning_rate": 4.4160000000000004e-05, "loss": 1.5323, "step": 1111 }, { "epoch": 8.896, "grad_norm": 41.49324417114258, "learning_rate": 4.4200000000000004e-05, "loss": 1.53, "step": 1112 }, { "epoch": 8.904, "grad_norm": 58.993595123291016, "learning_rate": 4.424e-05, "loss": 3.049, "step": 1113 }, { "epoch": 8.912, "grad_norm": 22.446685791015625, "learning_rate": 4.428e-05, "loss": 1.9352, "step": 1114 }, { "epoch": 8.92, "grad_norm": 170.25782775878906, "learning_rate": 4.432e-05, "loss": 1.6336, "step": 1115 }, { "epoch": 8.928, "grad_norm": 43.216400146484375, "learning_rate": 4.436e-05, "loss": 1.7982, "step": 1116 }, { "epoch": 8.936, "grad_norm": 66.08413696289062, "learning_rate": 4.44e-05, "loss": 1.9078, "step": 1117 }, { "epoch": 8.943999999999999, "grad_norm": 48.8390998840332, "learning_rate": 4.444e-05, "loss": 1.6426, "step": 1118 }, { "epoch": 8.952, "grad_norm": 55.04386901855469, "learning_rate": 4.448e-05, "loss": 1.6606, "step": 1119 }, { "epoch": 8.96, "grad_norm": 64.84349060058594, "learning_rate": 4.452e-05, "loss": 1.4536, "step": 1120 }, { "epoch": 8.968, "grad_norm": 91.57598876953125, "learning_rate": 4.456e-05, "loss": 1.7039, "step": 1121 }, { "epoch": 8.975999999999999, "grad_norm": 35.76445770263672, "learning_rate": 4.46e-05, "loss": 2.4835, "step": 1122 }, { "epoch": 8.984, "grad_norm": 72.70732116699219, "learning_rate": 4.4640000000000006e-05, "loss": 1.2273, "step": 1123 }, { "epoch": 8.992, "grad_norm": 52.42277145385742, "learning_rate": 4.468e-05, "loss": 1.7246, "step": 1124 }, { "epoch": 9.0, "grad_norm": 34.653568267822266, "learning_rate": 4.472e-05, "loss": 1.2428, "step": 1125 }, { "epoch": 9.0, "eval_loss": 1.793586254119873, "eval_map": 0.1445, "eval_map_50": 0.3254, "eval_map_75": 0.1044, "eval_map_Coverall": 0.375, "eval_map_Face_Shield": 0.0882, "eval_map_Gloves": 0.0875, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.1719, "eval_map_large": 0.1765, "eval_map_medium": 0.0754, "eval_map_small": 0.0504, "eval_mar_1": 0.1592, "eval_mar_10": 0.2824, "eval_mar_100": 0.2948, "eval_mar_100_Coverall": 0.6578, "eval_mar_100_Face_Shield": 0.2765, "eval_mar_100_Gloves": 0.2475, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.2923, "eval_mar_large": 0.3837, "eval_mar_medium": 0.1495, "eval_mar_small": 0.0686, "eval_runtime": 2.4568, "eval_samples_per_second": 11.804, "eval_steps_per_second": 0.814, "step": 1125 }, { "epoch": 9.008, "grad_norm": 43.769866943359375, "learning_rate": 4.4760000000000005e-05, "loss": 1.6026, "step": 1126 }, { "epoch": 9.016, "grad_norm": 28.596309661865234, "learning_rate": 4.4800000000000005e-05, "loss": 1.5903, "step": 1127 }, { "epoch": 9.024, "grad_norm": 32.6689567565918, "learning_rate": 4.4840000000000004e-05, "loss": 1.3519, "step": 1128 }, { "epoch": 9.032, "grad_norm": 106.22418212890625, "learning_rate": 4.488e-05, "loss": 1.9343, "step": 1129 }, { "epoch": 9.04, "grad_norm": 31.662519454956055, "learning_rate": 4.4920000000000004e-05, "loss": 1.9338, "step": 1130 }, { "epoch": 9.048, "grad_norm": 33.53565216064453, "learning_rate": 4.496e-05, "loss": 1.3943, "step": 1131 }, { "epoch": 9.056, "grad_norm": 41.16272735595703, "learning_rate": 4.5e-05, "loss": 2.0855, "step": 1132 }, { "epoch": 9.064, "grad_norm": 121.68843078613281, "learning_rate": 4.504e-05, "loss": 1.596, "step": 1133 }, { "epoch": 9.072, "grad_norm": 40.7050895690918, "learning_rate": 4.508e-05, "loss": 1.7486, "step": 1134 }, { "epoch": 9.08, "grad_norm": 55.394676208496094, "learning_rate": 4.512e-05, "loss": 1.9617, "step": 1135 }, { "epoch": 9.088, "grad_norm": 49.2972526550293, "learning_rate": 4.516e-05, "loss": 1.5633, "step": 1136 }, { "epoch": 9.096, "grad_norm": 34.75592803955078, "learning_rate": 4.52e-05, "loss": 1.6376, "step": 1137 }, { "epoch": 9.104, "grad_norm": 34.551509857177734, "learning_rate": 4.524000000000001e-05, "loss": 1.5779, "step": 1138 }, { "epoch": 9.112, "grad_norm": 65.46943664550781, "learning_rate": 4.528e-05, "loss": 1.6601, "step": 1139 }, { "epoch": 9.12, "grad_norm": 26.064285278320312, "learning_rate": 4.532e-05, "loss": 1.4285, "step": 1140 }, { "epoch": 9.128, "grad_norm": 229.73361206054688, "learning_rate": 4.536e-05, "loss": 1.5799, "step": 1141 }, { "epoch": 9.136, "grad_norm": 52.155006408691406, "learning_rate": 4.5400000000000006e-05, "loss": 1.2966, "step": 1142 }, { "epoch": 9.144, "grad_norm": 28.098772048950195, "learning_rate": 4.5440000000000005e-05, "loss": 1.8627, "step": 1143 }, { "epoch": 9.152, "grad_norm": 65.13367462158203, "learning_rate": 4.548e-05, "loss": 1.6407, "step": 1144 }, { "epoch": 9.16, "grad_norm": 77.72471618652344, "learning_rate": 4.5520000000000005e-05, "loss": 1.3917, "step": 1145 }, { "epoch": 9.168, "grad_norm": 30.34126091003418, "learning_rate": 4.5560000000000004e-05, "loss": 1.371, "step": 1146 }, { "epoch": 9.176, "grad_norm": 30.02289390563965, "learning_rate": 4.5600000000000004e-05, "loss": 1.2368, "step": 1147 }, { "epoch": 9.184, "grad_norm": 38.85751724243164, "learning_rate": 4.564e-05, "loss": 1.6429, "step": 1148 }, { "epoch": 9.192, "grad_norm": 37.68151092529297, "learning_rate": 4.568e-05, "loss": 1.7985, "step": 1149 }, { "epoch": 9.2, "grad_norm": 45.32236099243164, "learning_rate": 4.572e-05, "loss": 2.1263, "step": 1150 }, { "epoch": 9.208, "grad_norm": 31.35352325439453, "learning_rate": 4.576e-05, "loss": 1.4453, "step": 1151 }, { "epoch": 9.216, "grad_norm": 28.456159591674805, "learning_rate": 4.58e-05, "loss": 1.1699, "step": 1152 }, { "epoch": 9.224, "grad_norm": 68.10751342773438, "learning_rate": 4.584e-05, "loss": 1.5303, "step": 1153 }, { "epoch": 9.232, "grad_norm": 83.67204284667969, "learning_rate": 4.588e-05, "loss": 1.3408, "step": 1154 }, { "epoch": 9.24, "grad_norm": 37.69438552856445, "learning_rate": 4.592e-05, "loss": 1.9127, "step": 1155 }, { "epoch": 9.248, "grad_norm": 35.708229064941406, "learning_rate": 4.596e-05, "loss": 1.3243, "step": 1156 }, { "epoch": 9.256, "grad_norm": 24.54587745666504, "learning_rate": 4.600000000000001e-05, "loss": 1.398, "step": 1157 }, { "epoch": 9.264, "grad_norm": 77.52049255371094, "learning_rate": 4.604e-05, "loss": 1.9321, "step": 1158 }, { "epoch": 9.272, "grad_norm": 769.1162719726562, "learning_rate": 4.608e-05, "loss": 1.3672, "step": 1159 }, { "epoch": 9.28, "grad_norm": 68.36162567138672, "learning_rate": 4.612e-05, "loss": 1.1434, "step": 1160 }, { "epoch": 9.288, "grad_norm": 34.8567008972168, "learning_rate": 4.6160000000000005e-05, "loss": 1.2903, "step": 1161 }, { "epoch": 9.296, "grad_norm": 36.38835525512695, "learning_rate": 4.6200000000000005e-05, "loss": 1.9448, "step": 1162 }, { "epoch": 9.304, "grad_norm": 62.81006622314453, "learning_rate": 4.624e-05, "loss": 1.7873, "step": 1163 }, { "epoch": 9.312, "grad_norm": 28.95639419555664, "learning_rate": 4.6280000000000004e-05, "loss": 1.6532, "step": 1164 }, { "epoch": 9.32, "grad_norm": 67.7918930053711, "learning_rate": 4.6320000000000004e-05, "loss": 1.3912, "step": 1165 }, { "epoch": 9.328, "grad_norm": 65.89665985107422, "learning_rate": 4.636e-05, "loss": 1.8173, "step": 1166 }, { "epoch": 9.336, "grad_norm": 25.937631607055664, "learning_rate": 4.64e-05, "loss": 2.1452, "step": 1167 }, { "epoch": 9.344, "grad_norm": 48.43488311767578, "learning_rate": 4.644e-05, "loss": 2.276, "step": 1168 }, { "epoch": 9.352, "grad_norm": 49.78904724121094, "learning_rate": 4.648e-05, "loss": 1.1046, "step": 1169 }, { "epoch": 9.36, "grad_norm": 22.104991912841797, "learning_rate": 4.652e-05, "loss": 1.7428, "step": 1170 }, { "epoch": 9.368, "grad_norm": 23.655973434448242, "learning_rate": 4.656e-05, "loss": 2.0832, "step": 1171 }, { "epoch": 9.376, "grad_norm": 71.95767974853516, "learning_rate": 4.660000000000001e-05, "loss": 1.6312, "step": 1172 }, { "epoch": 9.384, "grad_norm": 72.10039520263672, "learning_rate": 4.664e-05, "loss": 1.4099, "step": 1173 }, { "epoch": 9.392, "grad_norm": 54.76110076904297, "learning_rate": 4.668e-05, "loss": 1.5134, "step": 1174 }, { "epoch": 9.4, "grad_norm": 30.848848342895508, "learning_rate": 4.672e-05, "loss": 1.3475, "step": 1175 }, { "epoch": 9.408, "grad_norm": 32.732120513916016, "learning_rate": 4.6760000000000006e-05, "loss": 1.3776, "step": 1176 }, { "epoch": 9.416, "grad_norm": 37.831180572509766, "learning_rate": 4.6800000000000006e-05, "loss": 1.3603, "step": 1177 }, { "epoch": 9.424, "grad_norm": 42.051239013671875, "learning_rate": 4.684e-05, "loss": 2.1165, "step": 1178 }, { "epoch": 9.432, "grad_norm": 29.240646362304688, "learning_rate": 4.688e-05, "loss": 1.5438, "step": 1179 }, { "epoch": 9.44, "grad_norm": 36.10321807861328, "learning_rate": 4.6920000000000005e-05, "loss": 1.667, "step": 1180 }, { "epoch": 9.448, "grad_norm": 25.1098690032959, "learning_rate": 4.6960000000000004e-05, "loss": 1.4075, "step": 1181 }, { "epoch": 9.456, "grad_norm": 97.07207489013672, "learning_rate": 4.7e-05, "loss": 1.5636, "step": 1182 }, { "epoch": 9.464, "grad_norm": 25.30231285095215, "learning_rate": 4.7040000000000004e-05, "loss": 1.6506, "step": 1183 }, { "epoch": 9.472, "grad_norm": 39.09775161743164, "learning_rate": 4.708e-05, "loss": 1.736, "step": 1184 }, { "epoch": 9.48, "grad_norm": 27.613128662109375, "learning_rate": 4.712e-05, "loss": 1.8124, "step": 1185 }, { "epoch": 9.488, "grad_norm": 43.04789733886719, "learning_rate": 4.716e-05, "loss": 1.5306, "step": 1186 }, { "epoch": 9.496, "grad_norm": 69.62361907958984, "learning_rate": 4.72e-05, "loss": 2.0104, "step": 1187 }, { "epoch": 9.504, "grad_norm": 110.43602752685547, "learning_rate": 4.724e-05, "loss": 1.7238, "step": 1188 }, { "epoch": 9.512, "grad_norm": 36.49103927612305, "learning_rate": 4.728e-05, "loss": 1.6405, "step": 1189 }, { "epoch": 9.52, "grad_norm": 31.574094772338867, "learning_rate": 4.732e-05, "loss": 1.5222, "step": 1190 }, { "epoch": 9.528, "grad_norm": 47.5517692565918, "learning_rate": 4.736000000000001e-05, "loss": 1.7976, "step": 1191 }, { "epoch": 9.536, "grad_norm": 119.22277069091797, "learning_rate": 4.74e-05, "loss": 2.1677, "step": 1192 }, { "epoch": 9.544, "grad_norm": 40.22673034667969, "learning_rate": 4.744e-05, "loss": 1.7268, "step": 1193 }, { "epoch": 9.552, "grad_norm": 33.564701080322266, "learning_rate": 4.748e-05, "loss": 1.5517, "step": 1194 }, { "epoch": 9.56, "grad_norm": 37.162166595458984, "learning_rate": 4.7520000000000006e-05, "loss": 1.6256, "step": 1195 }, { "epoch": 9.568, "grad_norm": 44.43046951293945, "learning_rate": 4.7560000000000005e-05, "loss": 1.4364, "step": 1196 }, { "epoch": 9.576, "grad_norm": 205.66708374023438, "learning_rate": 4.76e-05, "loss": 1.8824, "step": 1197 }, { "epoch": 9.584, "grad_norm": 75.77014923095703, "learning_rate": 4.7640000000000005e-05, "loss": 1.5575, "step": 1198 }, { "epoch": 9.592, "grad_norm": 33.05168151855469, "learning_rate": 4.7680000000000004e-05, "loss": 1.6067, "step": 1199 }, { "epoch": 9.6, "grad_norm": 69.47019958496094, "learning_rate": 4.7720000000000004e-05, "loss": 1.7618, "step": 1200 }, { "epoch": 9.608, "grad_norm": 131.83450317382812, "learning_rate": 4.7760000000000004e-05, "loss": 1.8812, "step": 1201 }, { "epoch": 9.616, "grad_norm": 90.1714859008789, "learning_rate": 4.78e-05, "loss": 1.9008, "step": 1202 }, { "epoch": 9.624, "grad_norm": 66.16236114501953, "learning_rate": 4.784e-05, "loss": 3.3674, "step": 1203 }, { "epoch": 9.632, "grad_norm": 45.53603744506836, "learning_rate": 4.788e-05, "loss": 2.0894, "step": 1204 }, { "epoch": 9.64, "grad_norm": 29.080001831054688, "learning_rate": 4.792e-05, "loss": 1.8269, "step": 1205 }, { "epoch": 9.648, "grad_norm": 48.125450134277344, "learning_rate": 4.796e-05, "loss": 1.5531, "step": 1206 }, { "epoch": 9.656, "grad_norm": 29.70354652404785, "learning_rate": 4.8e-05, "loss": 1.7262, "step": 1207 }, { "epoch": 9.664, "grad_norm": 42.73003005981445, "learning_rate": 4.804e-05, "loss": 1.5813, "step": 1208 }, { "epoch": 9.672, "grad_norm": 43.962039947509766, "learning_rate": 4.808e-05, "loss": 1.8105, "step": 1209 }, { "epoch": 9.68, "grad_norm": 36.06578826904297, "learning_rate": 4.812000000000001e-05, "loss": 1.6574, "step": 1210 }, { "epoch": 9.688, "grad_norm": 249.21298217773438, "learning_rate": 4.816e-05, "loss": 1.6234, "step": 1211 }, { "epoch": 9.696, "grad_norm": 44.33867645263672, "learning_rate": 4.82e-05, "loss": 1.8742, "step": 1212 }, { "epoch": 9.704, "grad_norm": 36.68864822387695, "learning_rate": 4.824e-05, "loss": 1.7099, "step": 1213 }, { "epoch": 9.712, "grad_norm": 32.42331314086914, "learning_rate": 4.8280000000000005e-05, "loss": 1.4917, "step": 1214 }, { "epoch": 9.72, "grad_norm": 90.9063720703125, "learning_rate": 4.8320000000000005e-05, "loss": 2.5452, "step": 1215 }, { "epoch": 9.728, "grad_norm": 39.939552307128906, "learning_rate": 4.836e-05, "loss": 1.5595, "step": 1216 }, { "epoch": 9.736, "grad_norm": 51.26702880859375, "learning_rate": 4.8400000000000004e-05, "loss": 1.4912, "step": 1217 }, { "epoch": 9.744, "grad_norm": 27.53841209411621, "learning_rate": 4.8440000000000004e-05, "loss": 1.5979, "step": 1218 }, { "epoch": 9.752, "grad_norm": 55.835784912109375, "learning_rate": 4.8480000000000003e-05, "loss": 1.6483, "step": 1219 }, { "epoch": 9.76, "grad_norm": 56.53507614135742, "learning_rate": 4.852e-05, "loss": 1.3578, "step": 1220 }, { "epoch": 9.768, "grad_norm": 200.9369354248047, "learning_rate": 4.856e-05, "loss": 1.3233, "step": 1221 }, { "epoch": 9.776, "grad_norm": 38.89691162109375, "learning_rate": 4.86e-05, "loss": 1.2175, "step": 1222 }, { "epoch": 9.784, "grad_norm": 26.65430450439453, "learning_rate": 4.864e-05, "loss": 1.4487, "step": 1223 }, { "epoch": 9.792, "grad_norm": 23.850017547607422, "learning_rate": 4.868e-05, "loss": 1.3213, "step": 1224 }, { "epoch": 9.8, "grad_norm": 44.25813293457031, "learning_rate": 4.872000000000001e-05, "loss": 2.0092, "step": 1225 }, { "epoch": 9.808, "grad_norm": 29.481464385986328, "learning_rate": 4.876e-05, "loss": 1.5658, "step": 1226 }, { "epoch": 9.816, "grad_norm": 38.0639762878418, "learning_rate": 4.88e-05, "loss": 1.7378, "step": 1227 }, { "epoch": 9.824, "grad_norm": 124.99840545654297, "learning_rate": 4.884e-05, "loss": 1.4363, "step": 1228 }, { "epoch": 9.832, "grad_norm": 38.168304443359375, "learning_rate": 4.8880000000000006e-05, "loss": 1.6264, "step": 1229 }, { "epoch": 9.84, "grad_norm": 25.367855072021484, "learning_rate": 4.8920000000000006e-05, "loss": 2.1324, "step": 1230 }, { "epoch": 9.848, "grad_norm": 36.316139221191406, "learning_rate": 4.896e-05, "loss": 1.1356, "step": 1231 }, { "epoch": 9.856, "grad_norm": 26.51616859436035, "learning_rate": 4.9e-05, "loss": 1.7667, "step": 1232 }, { "epoch": 9.864, "grad_norm": 48.68702697753906, "learning_rate": 4.9040000000000005e-05, "loss": 1.2496, "step": 1233 }, { "epoch": 9.872, "grad_norm": 26.97207260131836, "learning_rate": 4.9080000000000004e-05, "loss": 3.5286, "step": 1234 }, { "epoch": 9.88, "grad_norm": 93.05331420898438, "learning_rate": 4.9120000000000004e-05, "loss": 1.369, "step": 1235 }, { "epoch": 9.888, "grad_norm": 34.017555236816406, "learning_rate": 4.9160000000000004e-05, "loss": 1.4692, "step": 1236 }, { "epoch": 9.896, "grad_norm": 58.98887634277344, "learning_rate": 4.92e-05, "loss": 1.4661, "step": 1237 }, { "epoch": 9.904, "grad_norm": 30.69717788696289, "learning_rate": 4.924e-05, "loss": 1.128, "step": 1238 }, { "epoch": 9.912, "grad_norm": 26.50019645690918, "learning_rate": 4.928e-05, "loss": 1.4349, "step": 1239 }, { "epoch": 9.92, "grad_norm": 25.90105628967285, "learning_rate": 4.932e-05, "loss": 1.3591, "step": 1240 }, { "epoch": 9.928, "grad_norm": 472.3667907714844, "learning_rate": 4.936e-05, "loss": 2.2662, "step": 1241 }, { "epoch": 9.936, "grad_norm": 143.93865966796875, "learning_rate": 4.94e-05, "loss": 1.5256, "step": 1242 }, { "epoch": 9.943999999999999, "grad_norm": 40.69548797607422, "learning_rate": 4.944e-05, "loss": 1.8076, "step": 1243 }, { "epoch": 9.952, "grad_norm": 76.54237365722656, "learning_rate": 4.948000000000001e-05, "loss": 1.4924, "step": 1244 }, { "epoch": 9.96, "grad_norm": 29.390634536743164, "learning_rate": 4.952e-05, "loss": 1.2733, "step": 1245 }, { "epoch": 9.968, "grad_norm": 35.993106842041016, "learning_rate": 4.956e-05, "loss": 1.5338, "step": 1246 }, { "epoch": 9.975999999999999, "grad_norm": 44.64107894897461, "learning_rate": 4.96e-05, "loss": 1.6782, "step": 1247 }, { "epoch": 9.984, "grad_norm": 36.70760726928711, "learning_rate": 4.9640000000000006e-05, "loss": 1.921, "step": 1248 }, { "epoch": 9.992, "grad_norm": 38.18316650390625, "learning_rate": 4.9680000000000005e-05, "loss": 1.4717, "step": 1249 }, { "epoch": 10.0, "grad_norm": 31.457767486572266, "learning_rate": 4.972e-05, "loss": 1.6719, "step": 1250 }, { "epoch": 10.0, "eval_loss": 1.701431155204773, "eval_map": 0.1615, "eval_map_50": 0.3473, "eval_map_75": 0.1144, "eval_map_Coverall": 0.4379, "eval_map_Face_Shield": 0.0927, "eval_map_Gloves": 0.1082, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.1689, "eval_map_large": 0.1763, "eval_map_medium": 0.0959, "eval_map_small": 0.0505, "eval_mar_1": 0.1684, "eval_mar_10": 0.2952, "eval_mar_100": 0.3131, "eval_mar_100_Coverall": 0.6156, "eval_mar_100_Face_Shield": 0.3176, "eval_mar_100_Gloves": 0.2787, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.3538, "eval_mar_large": 0.3635, "eval_mar_medium": 0.2299, "eval_mar_small": 0.0952, "eval_runtime": 2.5679, "eval_samples_per_second": 11.293, "eval_steps_per_second": 0.779, "step": 1250 }, { "epoch": 10.008, "grad_norm": 26.28108024597168, "learning_rate": 4.976e-05, "loss": 1.3309, "step": 1251 }, { "epoch": 10.016, "grad_norm": 312.1358642578125, "learning_rate": 4.9800000000000004e-05, "loss": 1.7683, "step": 1252 }, { "epoch": 10.024, "grad_norm": 31.767045974731445, "learning_rate": 4.9840000000000004e-05, "loss": 1.7762, "step": 1253 }, { "epoch": 10.032, "grad_norm": 87.77415466308594, "learning_rate": 4.9880000000000004e-05, "loss": 1.5946, "step": 1254 }, { "epoch": 10.04, "grad_norm": 94.14738464355469, "learning_rate": 4.992e-05, "loss": 1.8982, "step": 1255 }, { "epoch": 10.048, "grad_norm": 45.06526565551758, "learning_rate": 4.996e-05, "loss": 1.5962, "step": 1256 }, { "epoch": 10.056, "grad_norm": 51.99589538574219, "learning_rate": 5e-05, "loss": 1.7479, "step": 1257 }, { "epoch": 10.064, "grad_norm": 110.78372192382812, "learning_rate": 4.999555555555556e-05, "loss": 1.8083, "step": 1258 }, { "epoch": 10.072, "grad_norm": 31.169227600097656, "learning_rate": 4.999111111111111e-05, "loss": 1.726, "step": 1259 }, { "epoch": 10.08, "grad_norm": 29.31698226928711, "learning_rate": 4.9986666666666674e-05, "loss": 1.6532, "step": 1260 }, { "epoch": 10.088, "grad_norm": 63.45892333984375, "learning_rate": 4.998222222222222e-05, "loss": 1.6657, "step": 1261 }, { "epoch": 10.096, "grad_norm": 22.021238327026367, "learning_rate": 4.997777777777778e-05, "loss": 1.3631, "step": 1262 }, { "epoch": 10.104, "grad_norm": 32.234954833984375, "learning_rate": 4.997333333333333e-05, "loss": 1.6019, "step": 1263 }, { "epoch": 10.112, "grad_norm": 30.342721939086914, "learning_rate": 4.996888888888889e-05, "loss": 2.0397, "step": 1264 }, { "epoch": 10.12, "grad_norm": 28.344493865966797, "learning_rate": 4.996444444444445e-05, "loss": 1.6199, "step": 1265 }, { "epoch": 10.128, "grad_norm": 38.5944709777832, "learning_rate": 4.996e-05, "loss": 1.5811, "step": 1266 }, { "epoch": 10.136, "grad_norm": 77.1325454711914, "learning_rate": 4.995555555555556e-05, "loss": 1.3273, "step": 1267 }, { "epoch": 10.144, "grad_norm": 81.08718872070312, "learning_rate": 4.995111111111111e-05, "loss": 1.5723, "step": 1268 }, { "epoch": 10.152, "grad_norm": 29.655155181884766, "learning_rate": 4.994666666666667e-05, "loss": 1.6696, "step": 1269 }, { "epoch": 10.16, "grad_norm": 43.077030181884766, "learning_rate": 4.994222222222222e-05, "loss": 1.2063, "step": 1270 }, { "epoch": 10.168, "grad_norm": 33.29952621459961, "learning_rate": 4.993777777777778e-05, "loss": 1.4054, "step": 1271 }, { "epoch": 10.176, "grad_norm": 28.39236831665039, "learning_rate": 4.993333333333334e-05, "loss": 1.7636, "step": 1272 }, { "epoch": 10.184, "grad_norm": 40.06325149536133, "learning_rate": 4.9928888888888893e-05, "loss": 1.617, "step": 1273 }, { "epoch": 10.192, "grad_norm": 29.089736938476562, "learning_rate": 4.992444444444445e-05, "loss": 1.5006, "step": 1274 }, { "epoch": 10.2, "grad_norm": 30.26708221435547, "learning_rate": 4.992e-05, "loss": 2.0417, "step": 1275 }, { "epoch": 10.208, "grad_norm": 27.85426902770996, "learning_rate": 4.991555555555556e-05, "loss": 1.1668, "step": 1276 }, { "epoch": 10.216, "grad_norm": 57.03729248046875, "learning_rate": 4.991111111111111e-05, "loss": 3.3846, "step": 1277 }, { "epoch": 10.224, "grad_norm": 39.3064079284668, "learning_rate": 4.990666666666667e-05, "loss": 1.4342, "step": 1278 }, { "epoch": 10.232, "grad_norm": 21.944520950317383, "learning_rate": 4.990222222222222e-05, "loss": 1.5266, "step": 1279 }, { "epoch": 10.24, "grad_norm": 89.94463348388672, "learning_rate": 4.9897777777777784e-05, "loss": 1.549, "step": 1280 }, { "epoch": 10.248, "grad_norm": 35.39846420288086, "learning_rate": 4.989333333333334e-05, "loss": 2.9046, "step": 1281 }, { "epoch": 10.256, "grad_norm": 17.033910751342773, "learning_rate": 4.9888888888888894e-05, "loss": 1.6599, "step": 1282 }, { "epoch": 10.264, "grad_norm": 41.21014404296875, "learning_rate": 4.988444444444444e-05, "loss": 1.196, "step": 1283 }, { "epoch": 10.272, "grad_norm": 28.811582565307617, "learning_rate": 4.9880000000000004e-05, "loss": 1.2208, "step": 1284 }, { "epoch": 10.28, "grad_norm": 52.59545135498047, "learning_rate": 4.987555555555556e-05, "loss": 1.8858, "step": 1285 }, { "epoch": 10.288, "grad_norm": 27.307754516601562, "learning_rate": 4.987111111111111e-05, "loss": 1.7305, "step": 1286 }, { "epoch": 10.296, "grad_norm": 46.8963623046875, "learning_rate": 4.986666666666667e-05, "loss": 1.4442, "step": 1287 }, { "epoch": 10.304, "grad_norm": 20.368301391601562, "learning_rate": 4.986222222222223e-05, "loss": 1.3958, "step": 1288 }, { "epoch": 10.312, "grad_norm": 36.945838928222656, "learning_rate": 4.985777777777778e-05, "loss": 1.344, "step": 1289 }, { "epoch": 10.32, "grad_norm": 37.49739456176758, "learning_rate": 4.985333333333333e-05, "loss": 1.3122, "step": 1290 }, { "epoch": 10.328, "grad_norm": 36.787322998046875, "learning_rate": 4.984888888888889e-05, "loss": 1.2615, "step": 1291 }, { "epoch": 10.336, "grad_norm": 35.35281753540039, "learning_rate": 4.984444444444445e-05, "loss": 1.7986, "step": 1292 }, { "epoch": 10.344, "grad_norm": 33.296756744384766, "learning_rate": 4.9840000000000004e-05, "loss": 1.456, "step": 1293 }, { "epoch": 10.352, "grad_norm": 19.74073600769043, "learning_rate": 4.983555555555556e-05, "loss": 1.698, "step": 1294 }, { "epoch": 10.36, "grad_norm": 68.77185821533203, "learning_rate": 4.9831111111111114e-05, "loss": 1.6628, "step": 1295 }, { "epoch": 10.368, "grad_norm": 26.92515754699707, "learning_rate": 4.982666666666667e-05, "loss": 1.6237, "step": 1296 }, { "epoch": 10.376, "grad_norm": 34.19001007080078, "learning_rate": 4.982222222222222e-05, "loss": 2.5966, "step": 1297 }, { "epoch": 10.384, "grad_norm": 46.39704132080078, "learning_rate": 4.981777777777778e-05, "loss": 2.4234, "step": 1298 }, { "epoch": 10.392, "grad_norm": 43.196414947509766, "learning_rate": 4.981333333333333e-05, "loss": 1.4004, "step": 1299 }, { "epoch": 10.4, "grad_norm": 47.37616729736328, "learning_rate": 4.9808888888888895e-05, "loss": 1.6325, "step": 1300 }, { "epoch": 10.408, "grad_norm": 38.126953125, "learning_rate": 4.980444444444445e-05, "loss": 1.0819, "step": 1301 }, { "epoch": 10.416, "grad_norm": 29.94117546081543, "learning_rate": 4.9800000000000004e-05, "loss": 1.524, "step": 1302 }, { "epoch": 10.424, "grad_norm": 28.295982360839844, "learning_rate": 4.979555555555556e-05, "loss": 1.5872, "step": 1303 }, { "epoch": 10.432, "grad_norm": 36.897212982177734, "learning_rate": 4.9791111111111114e-05, "loss": 1.3295, "step": 1304 }, { "epoch": 10.44, "grad_norm": 32.979496002197266, "learning_rate": 4.978666666666667e-05, "loss": 1.5799, "step": 1305 }, { "epoch": 10.448, "grad_norm": 63.2793083190918, "learning_rate": 4.9782222222222224e-05, "loss": 1.5955, "step": 1306 }, { "epoch": 10.456, "grad_norm": 62.010528564453125, "learning_rate": 4.977777777777778e-05, "loss": 1.5415, "step": 1307 }, { "epoch": 10.464, "grad_norm": 36.92646026611328, "learning_rate": 4.977333333333334e-05, "loss": 1.2912, "step": 1308 }, { "epoch": 10.472, "grad_norm": 26.198081970214844, "learning_rate": 4.9768888888888895e-05, "loss": 1.1728, "step": 1309 }, { "epoch": 10.48, "grad_norm": 42.78683853149414, "learning_rate": 4.976444444444445e-05, "loss": 1.3227, "step": 1310 }, { "epoch": 10.488, "grad_norm": 69.25888061523438, "learning_rate": 4.976e-05, "loss": 1.3153, "step": 1311 }, { "epoch": 10.496, "grad_norm": 58.258548736572266, "learning_rate": 4.975555555555555e-05, "loss": 1.4614, "step": 1312 }, { "epoch": 10.504, "grad_norm": 27.93482208251953, "learning_rate": 4.9751111111111114e-05, "loss": 1.7081, "step": 1313 }, { "epoch": 10.512, "grad_norm": 60.80561065673828, "learning_rate": 4.974666666666667e-05, "loss": 1.4804, "step": 1314 }, { "epoch": 10.52, "grad_norm": 81.0887451171875, "learning_rate": 4.9742222222222224e-05, "loss": 1.3318, "step": 1315 }, { "epoch": 10.528, "grad_norm": 41.46981430053711, "learning_rate": 4.973777777777778e-05, "loss": 1.6251, "step": 1316 }, { "epoch": 10.536, "grad_norm": 24.550296783447266, "learning_rate": 4.973333333333334e-05, "loss": 1.5092, "step": 1317 }, { "epoch": 10.544, "grad_norm": 34.15863800048828, "learning_rate": 4.972888888888889e-05, "loss": 0.9524, "step": 1318 }, { "epoch": 10.552, "grad_norm": 34.18900680541992, "learning_rate": 4.9724444444444443e-05, "loss": 1.3796, "step": 1319 }, { "epoch": 10.56, "grad_norm": 195.18858337402344, "learning_rate": 4.972e-05, "loss": 1.8284, "step": 1320 }, { "epoch": 10.568, "grad_norm": 27.89995002746582, "learning_rate": 4.971555555555556e-05, "loss": 1.6652, "step": 1321 }, { "epoch": 10.576, "grad_norm": 25.236284255981445, "learning_rate": 4.9711111111111115e-05, "loss": 1.5417, "step": 1322 }, { "epoch": 10.584, "grad_norm": 55.419700622558594, "learning_rate": 4.970666666666667e-05, "loss": 1.4304, "step": 1323 }, { "epoch": 10.592, "grad_norm": 22.82628059387207, "learning_rate": 4.9702222222222224e-05, "loss": 1.5273, "step": 1324 }, { "epoch": 10.6, "grad_norm": 18.201154708862305, "learning_rate": 4.969777777777778e-05, "loss": 1.3544, "step": 1325 }, { "epoch": 10.608, "grad_norm": 24.476383209228516, "learning_rate": 4.9693333333333334e-05, "loss": 2.2347, "step": 1326 }, { "epoch": 10.616, "grad_norm": 40.73676300048828, "learning_rate": 4.968888888888889e-05, "loss": 1.6885, "step": 1327 }, { "epoch": 10.624, "grad_norm": 33.41622543334961, "learning_rate": 4.9684444444444444e-05, "loss": 1.5572, "step": 1328 }, { "epoch": 10.632, "grad_norm": 46.05109786987305, "learning_rate": 4.9680000000000005e-05, "loss": 1.5068, "step": 1329 }, { "epoch": 10.64, "grad_norm": 30.77289581298828, "learning_rate": 4.967555555555556e-05, "loss": 1.7126, "step": 1330 }, { "epoch": 10.648, "grad_norm": 26.621286392211914, "learning_rate": 4.9671111111111115e-05, "loss": 1.2048, "step": 1331 }, { "epoch": 10.656, "grad_norm": 36.782588958740234, "learning_rate": 4.966666666666667e-05, "loss": 1.854, "step": 1332 }, { "epoch": 10.664, "grad_norm": 112.96279907226562, "learning_rate": 4.9662222222222225e-05, "loss": 1.4033, "step": 1333 }, { "epoch": 10.672, "grad_norm": 27.966630935668945, "learning_rate": 4.965777777777778e-05, "loss": 1.3869, "step": 1334 }, { "epoch": 10.68, "grad_norm": 25.132038116455078, "learning_rate": 4.9653333333333335e-05, "loss": 1.2688, "step": 1335 }, { "epoch": 10.688, "grad_norm": 31.35779571533203, "learning_rate": 4.964888888888889e-05, "loss": 1.453, "step": 1336 }, { "epoch": 10.696, "grad_norm": 21.762691497802734, "learning_rate": 4.964444444444445e-05, "loss": 1.3779, "step": 1337 }, { "epoch": 10.704, "grad_norm": 26.28360366821289, "learning_rate": 4.9640000000000006e-05, "loss": 1.3903, "step": 1338 }, { "epoch": 10.712, "grad_norm": 32.52082824707031, "learning_rate": 4.963555555555556e-05, "loss": 1.4273, "step": 1339 }, { "epoch": 10.72, "grad_norm": 54.13837814331055, "learning_rate": 4.963111111111111e-05, "loss": 1.5396, "step": 1340 }, { "epoch": 10.728, "grad_norm": 29.16974449157715, "learning_rate": 4.962666666666667e-05, "loss": 1.3555, "step": 1341 }, { "epoch": 10.736, "grad_norm": 59.25050354003906, "learning_rate": 4.9622222222222225e-05, "loss": 2.0851, "step": 1342 }, { "epoch": 10.744, "grad_norm": 57.004215240478516, "learning_rate": 4.961777777777778e-05, "loss": 1.3389, "step": 1343 }, { "epoch": 10.752, "grad_norm": 61.82130432128906, "learning_rate": 4.9613333333333335e-05, "loss": 1.6303, "step": 1344 }, { "epoch": 10.76, "grad_norm": 35.13813781738281, "learning_rate": 4.9608888888888897e-05, "loss": 1.7078, "step": 1345 }, { "epoch": 10.768, "grad_norm": 27.84050178527832, "learning_rate": 4.9604444444444445e-05, "loss": 1.641, "step": 1346 }, { "epoch": 10.776, "grad_norm": 46.35945510864258, "learning_rate": 4.96e-05, "loss": 2.2397, "step": 1347 }, { "epoch": 10.784, "grad_norm": 29.201814651489258, "learning_rate": 4.9595555555555554e-05, "loss": 1.4384, "step": 1348 }, { "epoch": 10.792, "grad_norm": 174.75106811523438, "learning_rate": 4.9591111111111116e-05, "loss": 1.9408, "step": 1349 }, { "epoch": 10.8, "grad_norm": 26.07990074157715, "learning_rate": 4.958666666666667e-05, "loss": 1.5709, "step": 1350 }, { "epoch": 10.808, "grad_norm": 25.013492584228516, "learning_rate": 4.9582222222222226e-05, "loss": 1.3574, "step": 1351 }, { "epoch": 10.816, "grad_norm": 33.89535140991211, "learning_rate": 4.957777777777778e-05, "loss": 1.7641, "step": 1352 }, { "epoch": 10.824, "grad_norm": 24.817052841186523, "learning_rate": 4.9573333333333335e-05, "loss": 1.857, "step": 1353 }, { "epoch": 10.832, "grad_norm": 43.47747802734375, "learning_rate": 4.956888888888889e-05, "loss": 1.8727, "step": 1354 }, { "epoch": 10.84, "grad_norm": 30.630903244018555, "learning_rate": 4.9564444444444445e-05, "loss": 1.5006, "step": 1355 }, { "epoch": 10.848, "grad_norm": 49.627201080322266, "learning_rate": 4.956e-05, "loss": 1.6645, "step": 1356 }, { "epoch": 10.856, "grad_norm": 43.03438186645508, "learning_rate": 4.955555555555556e-05, "loss": 1.6837, "step": 1357 }, { "epoch": 10.864, "grad_norm": 123.02262115478516, "learning_rate": 4.9551111111111116e-05, "loss": 1.362, "step": 1358 }, { "epoch": 10.872, "grad_norm": 34.00065231323242, "learning_rate": 4.954666666666667e-05, "loss": 1.3897, "step": 1359 }, { "epoch": 10.88, "grad_norm": 57.6186637878418, "learning_rate": 4.9542222222222226e-05, "loss": 1.6567, "step": 1360 }, { "epoch": 10.888, "grad_norm": 48.50279235839844, "learning_rate": 4.9537777777777774e-05, "loss": 1.3018, "step": 1361 }, { "epoch": 10.896, "grad_norm": 46.36789321899414, "learning_rate": 4.9533333333333336e-05, "loss": 1.4159, "step": 1362 }, { "epoch": 10.904, "grad_norm": 52.09309387207031, "learning_rate": 4.952888888888889e-05, "loss": 1.5805, "step": 1363 }, { "epoch": 10.912, "grad_norm": 29.042884826660156, "learning_rate": 4.9524444444444445e-05, "loss": 2.1593, "step": 1364 }, { "epoch": 10.92, "grad_norm": 29.634056091308594, "learning_rate": 4.952e-05, "loss": 1.4389, "step": 1365 }, { "epoch": 10.928, "grad_norm": 59.22450637817383, "learning_rate": 4.951555555555556e-05, "loss": 1.3781, "step": 1366 }, { "epoch": 10.936, "grad_norm": 65.6045913696289, "learning_rate": 4.951111111111112e-05, "loss": 1.458, "step": 1367 }, { "epoch": 10.943999999999999, "grad_norm": 40.17841720581055, "learning_rate": 4.9506666666666665e-05, "loss": 1.752, "step": 1368 }, { "epoch": 10.952, "grad_norm": 26.562210083007812, "learning_rate": 4.950222222222222e-05, "loss": 1.6765, "step": 1369 }, { "epoch": 10.96, "grad_norm": 53.741519927978516, "learning_rate": 4.949777777777778e-05, "loss": 1.8841, "step": 1370 }, { "epoch": 10.968, "grad_norm": 81.42264556884766, "learning_rate": 4.9493333333333336e-05, "loss": 1.3266, "step": 1371 }, { "epoch": 10.975999999999999, "grad_norm": 56.75428009033203, "learning_rate": 4.948888888888889e-05, "loss": 2.1478, "step": 1372 }, { "epoch": 10.984, "grad_norm": 31.6486873626709, "learning_rate": 4.9484444444444446e-05, "loss": 1.6514, "step": 1373 }, { "epoch": 10.992, "grad_norm": 59.99362564086914, "learning_rate": 4.948000000000001e-05, "loss": 2.7345, "step": 1374 }, { "epoch": 11.0, "grad_norm": 67.24237060546875, "learning_rate": 4.9475555555555555e-05, "loss": 1.7047, "step": 1375 }, { "epoch": 11.0, "eval_loss": 1.6398189067840576, "eval_map": 0.168, "eval_map_50": 0.3738, "eval_map_75": 0.1218, "eval_map_Coverall": 0.4209, "eval_map_Face_Shield": 0.1223, "eval_map_Gloves": 0.0862, "eval_map_Goggles": 0.0095, "eval_map_Mask": 0.2009, "eval_map_large": 0.2401, "eval_map_medium": 0.0917, "eval_map_small": 0.0938, "eval_mar_1": 0.157, "eval_mar_10": 0.3158, "eval_mar_100": 0.347, "eval_mar_100_Coverall": 0.6533, "eval_mar_100_Face_Shield": 0.3882, "eval_mar_100_Gloves": 0.2721, "eval_mar_100_Goggles": 0.0406, "eval_mar_100_Mask": 0.3808, "eval_mar_large": 0.457, "eval_mar_medium": 0.2779, "eval_mar_small": 0.1468, "eval_runtime": 2.5309, "eval_samples_per_second": 11.459, "eval_steps_per_second": 0.79, "step": 1375 }, { "epoch": 11.008, "grad_norm": 32.967647552490234, "learning_rate": 4.947111111111111e-05, "loss": 1.4624, "step": 1376 }, { "epoch": 11.016, "grad_norm": 53.62716293334961, "learning_rate": 4.9466666666666665e-05, "loss": 1.8353, "step": 1377 }, { "epoch": 11.024, "grad_norm": 58.461387634277344, "learning_rate": 4.946222222222223e-05, "loss": 1.7171, "step": 1378 }, { "epoch": 11.032, "grad_norm": 35.35238265991211, "learning_rate": 4.945777777777778e-05, "loss": 1.9522, "step": 1379 }, { "epoch": 11.04, "grad_norm": 34.10546112060547, "learning_rate": 4.9453333333333336e-05, "loss": 1.759, "step": 1380 }, { "epoch": 11.048, "grad_norm": 51.50071716308594, "learning_rate": 4.944888888888889e-05, "loss": 3.237, "step": 1381 }, { "epoch": 11.056, "grad_norm": 38.1030387878418, "learning_rate": 4.9444444444444446e-05, "loss": 2.2511, "step": 1382 }, { "epoch": 11.064, "grad_norm": 24.257244110107422, "learning_rate": 4.944e-05, "loss": 1.5375, "step": 1383 }, { "epoch": 11.072, "grad_norm": 41.71057891845703, "learning_rate": 4.9435555555555556e-05, "loss": 1.4586, "step": 1384 }, { "epoch": 11.08, "grad_norm": 23.718490600585938, "learning_rate": 4.943111111111111e-05, "loss": 1.4647, "step": 1385 }, { "epoch": 11.088, "grad_norm": 207.77474975585938, "learning_rate": 4.942666666666667e-05, "loss": 1.7047, "step": 1386 }, { "epoch": 11.096, "grad_norm": 52.20748519897461, "learning_rate": 4.942222222222223e-05, "loss": 1.2716, "step": 1387 }, { "epoch": 11.104, "grad_norm": 19.905315399169922, "learning_rate": 4.941777777777778e-05, "loss": 1.4687, "step": 1388 }, { "epoch": 11.112, "grad_norm": 45.92137908935547, "learning_rate": 4.941333333333334e-05, "loss": 2.5656, "step": 1389 }, { "epoch": 11.12, "grad_norm": 251.602294921875, "learning_rate": 4.940888888888889e-05, "loss": 1.4908, "step": 1390 }, { "epoch": 11.128, "grad_norm": 26.412832260131836, "learning_rate": 4.9404444444444447e-05, "loss": 1.4589, "step": 1391 }, { "epoch": 11.136, "grad_norm": 68.6321792602539, "learning_rate": 4.94e-05, "loss": 1.7893, "step": 1392 }, { "epoch": 11.144, "grad_norm": 28.627212524414062, "learning_rate": 4.9395555555555556e-05, "loss": 1.8168, "step": 1393 }, { "epoch": 11.152, "grad_norm": 22.952335357666016, "learning_rate": 4.939111111111112e-05, "loss": 1.3929, "step": 1394 }, { "epoch": 11.16, "grad_norm": 24.049701690673828, "learning_rate": 4.938666666666667e-05, "loss": 2.1225, "step": 1395 }, { "epoch": 11.168, "grad_norm": 368.28466796875, "learning_rate": 4.938222222222223e-05, "loss": 1.951, "step": 1396 }, { "epoch": 11.176, "grad_norm": 389.43548583984375, "learning_rate": 4.9377777777777776e-05, "loss": 1.9448, "step": 1397 }, { "epoch": 11.184, "grad_norm": 44.91801452636719, "learning_rate": 4.937333333333334e-05, "loss": 1.8905, "step": 1398 }, { "epoch": 11.192, "grad_norm": 21.267457962036133, "learning_rate": 4.936888888888889e-05, "loss": 1.7966, "step": 1399 }, { "epoch": 11.2, "grad_norm": 25.486042022705078, "learning_rate": 4.936444444444445e-05, "loss": 1.5335, "step": 1400 }, { "epoch": 11.208, "grad_norm": 35.81385803222656, "learning_rate": 4.936e-05, "loss": 1.4387, "step": 1401 }, { "epoch": 11.216, "grad_norm": 28.90375328063965, "learning_rate": 4.935555555555556e-05, "loss": 1.5113, "step": 1402 }, { "epoch": 11.224, "grad_norm": 25.951940536499023, "learning_rate": 4.935111111111111e-05, "loss": 1.6627, "step": 1403 }, { "epoch": 11.232, "grad_norm": 27.667448043823242, "learning_rate": 4.9346666666666666e-05, "loss": 1.2776, "step": 1404 }, { "epoch": 11.24, "grad_norm": 23.488658905029297, "learning_rate": 4.934222222222222e-05, "loss": 1.7906, "step": 1405 }, { "epoch": 11.248, "grad_norm": 53.552955627441406, "learning_rate": 4.933777777777778e-05, "loss": 1.6185, "step": 1406 }, { "epoch": 11.256, "grad_norm": 30.49761199951172, "learning_rate": 4.933333333333334e-05, "loss": 1.471, "step": 1407 }, { "epoch": 11.264, "grad_norm": 28.657718658447266, "learning_rate": 4.932888888888889e-05, "loss": 1.3913, "step": 1408 }, { "epoch": 11.272, "grad_norm": 32.217288970947266, "learning_rate": 4.932444444444445e-05, "loss": 1.3478, "step": 1409 }, { "epoch": 11.28, "grad_norm": 25.66849136352539, "learning_rate": 4.932e-05, "loss": 1.839, "step": 1410 }, { "epoch": 11.288, "grad_norm": 444.56781005859375, "learning_rate": 4.931555555555556e-05, "loss": 1.5625, "step": 1411 }, { "epoch": 11.296, "grad_norm": 56.29027557373047, "learning_rate": 4.931111111111111e-05, "loss": 1.7992, "step": 1412 }, { "epoch": 11.304, "grad_norm": 41.81000518798828, "learning_rate": 4.930666666666667e-05, "loss": 1.4548, "step": 1413 }, { "epoch": 11.312, "grad_norm": 26.56593132019043, "learning_rate": 4.930222222222222e-05, "loss": 1.6892, "step": 1414 }, { "epoch": 11.32, "grad_norm": 26.227924346923828, "learning_rate": 4.929777777777778e-05, "loss": 1.5859, "step": 1415 }, { "epoch": 11.328, "grad_norm": 32.35157012939453, "learning_rate": 4.929333333333334e-05, "loss": 3.4535, "step": 1416 }, { "epoch": 11.336, "grad_norm": 59.76622772216797, "learning_rate": 4.928888888888889e-05, "loss": 1.7784, "step": 1417 }, { "epoch": 11.344, "grad_norm": 48.57483673095703, "learning_rate": 4.928444444444444e-05, "loss": 1.8985, "step": 1418 }, { "epoch": 11.352, "grad_norm": 22.890666961669922, "learning_rate": 4.928e-05, "loss": 1.6533, "step": 1419 }, { "epoch": 11.36, "grad_norm": 43.89756774902344, "learning_rate": 4.927555555555556e-05, "loss": 1.733, "step": 1420 }, { "epoch": 11.368, "grad_norm": 42.32027816772461, "learning_rate": 4.927111111111111e-05, "loss": 1.4469, "step": 1421 }, { "epoch": 11.376, "grad_norm": 36.236412048339844, "learning_rate": 4.926666666666667e-05, "loss": 1.642, "step": 1422 }, { "epoch": 11.384, "grad_norm": 70.92009735107422, "learning_rate": 4.926222222222223e-05, "loss": 1.5037, "step": 1423 }, { "epoch": 11.392, "grad_norm": 77.42939758300781, "learning_rate": 4.9257777777777784e-05, "loss": 1.7922, "step": 1424 }, { "epoch": 11.4, "grad_norm": 32.716060638427734, "learning_rate": 4.925333333333333e-05, "loss": 1.8963, "step": 1425 }, { "epoch": 11.408, "grad_norm": 83.06886291503906, "learning_rate": 4.9248888888888886e-05, "loss": 2.4442, "step": 1426 }, { "epoch": 11.416, "grad_norm": 35.2176513671875, "learning_rate": 4.924444444444445e-05, "loss": 1.5593, "step": 1427 }, { "epoch": 11.424, "grad_norm": 31.10171890258789, "learning_rate": 4.924e-05, "loss": 1.6068, "step": 1428 }, { "epoch": 11.432, "grad_norm": 64.76337432861328, "learning_rate": 4.923555555555556e-05, "loss": 1.5909, "step": 1429 }, { "epoch": 11.44, "grad_norm": 43.413753509521484, "learning_rate": 4.923111111111111e-05, "loss": 1.5462, "step": 1430 }, { "epoch": 11.448, "grad_norm": 24.148683547973633, "learning_rate": 4.9226666666666674e-05, "loss": 1.7012, "step": 1431 }, { "epoch": 11.456, "grad_norm": 36.05037307739258, "learning_rate": 4.922222222222222e-05, "loss": 1.5607, "step": 1432 }, { "epoch": 11.464, "grad_norm": 24.222150802612305, "learning_rate": 4.921777777777778e-05, "loss": 1.5165, "step": 1433 }, { "epoch": 11.472, "grad_norm": 22.1521053314209, "learning_rate": 4.921333333333333e-05, "loss": 1.3359, "step": 1434 }, { "epoch": 11.48, "grad_norm": 19.584678649902344, "learning_rate": 4.9208888888888894e-05, "loss": 2.7484, "step": 1435 }, { "epoch": 11.488, "grad_norm": 56.141578674316406, "learning_rate": 4.920444444444445e-05, "loss": 1.9354, "step": 1436 }, { "epoch": 11.496, "grad_norm": 24.48398780822754, "learning_rate": 4.92e-05, "loss": 1.6946, "step": 1437 }, { "epoch": 11.504, "grad_norm": 52.974884033203125, "learning_rate": 4.919555555555556e-05, "loss": 1.6336, "step": 1438 }, { "epoch": 11.512, "grad_norm": 42.58856964111328, "learning_rate": 4.919111111111111e-05, "loss": 1.5425, "step": 1439 }, { "epoch": 11.52, "grad_norm": 48.67479705810547, "learning_rate": 4.918666666666667e-05, "loss": 1.5251, "step": 1440 }, { "epoch": 11.528, "grad_norm": 28.02264404296875, "learning_rate": 4.918222222222222e-05, "loss": 1.7233, "step": 1441 }, { "epoch": 11.536, "grad_norm": 38.225223541259766, "learning_rate": 4.917777777777778e-05, "loss": 1.5024, "step": 1442 }, { "epoch": 11.544, "grad_norm": 62.375614166259766, "learning_rate": 4.917333333333334e-05, "loss": 1.6143, "step": 1443 }, { "epoch": 11.552, "grad_norm": 30.81853675842285, "learning_rate": 4.9168888888888894e-05, "loss": 1.3495, "step": 1444 }, { "epoch": 11.56, "grad_norm": 34.007999420166016, "learning_rate": 4.916444444444445e-05, "loss": 1.6739, "step": 1445 }, { "epoch": 11.568, "grad_norm": 37.225364685058594, "learning_rate": 4.9160000000000004e-05, "loss": 1.4563, "step": 1446 }, { "epoch": 11.576, "grad_norm": 73.48602294921875, "learning_rate": 4.915555555555556e-05, "loss": 1.3155, "step": 1447 }, { "epoch": 11.584, "grad_norm": 44.163230895996094, "learning_rate": 4.915111111111111e-05, "loss": 1.3884, "step": 1448 }, { "epoch": 11.592, "grad_norm": 38.97467803955078, "learning_rate": 4.914666666666667e-05, "loss": 1.1053, "step": 1449 }, { "epoch": 11.6, "grad_norm": 33.998191833496094, "learning_rate": 4.914222222222222e-05, "loss": 1.7863, "step": 1450 }, { "epoch": 11.608, "grad_norm": 51.45821762084961, "learning_rate": 4.9137777777777785e-05, "loss": 1.3894, "step": 1451 }, { "epoch": 11.616, "grad_norm": 27.000385284423828, "learning_rate": 4.913333333333334e-05, "loss": 1.3329, "step": 1452 }, { "epoch": 11.624, "grad_norm": 21.566133499145508, "learning_rate": 4.912888888888889e-05, "loss": 1.7864, "step": 1453 }, { "epoch": 11.632, "grad_norm": 38.6767463684082, "learning_rate": 4.912444444444444e-05, "loss": 1.3731, "step": 1454 }, { "epoch": 11.64, "grad_norm": 37.365562438964844, "learning_rate": 4.9120000000000004e-05, "loss": 2.0585, "step": 1455 }, { "epoch": 11.648, "grad_norm": 119.91754913330078, "learning_rate": 4.911555555555556e-05, "loss": 1.8212, "step": 1456 }, { "epoch": 11.656, "grad_norm": 54.48188781738281, "learning_rate": 4.9111111111111114e-05, "loss": 1.3464, "step": 1457 }, { "epoch": 11.664, "grad_norm": 29.206451416015625, "learning_rate": 4.910666666666667e-05, "loss": 1.5528, "step": 1458 }, { "epoch": 11.672, "grad_norm": 42.9365119934082, "learning_rate": 4.910222222222223e-05, "loss": 1.6695, "step": 1459 }, { "epoch": 11.68, "grad_norm": 39.29865646362305, "learning_rate": 4.909777777777778e-05, "loss": 1.9065, "step": 1460 }, { "epoch": 11.688, "grad_norm": 25.202367782592773, "learning_rate": 4.909333333333333e-05, "loss": 1.6102, "step": 1461 }, { "epoch": 11.696, "grad_norm": 55.25210189819336, "learning_rate": 4.908888888888889e-05, "loss": 1.4393, "step": 1462 }, { "epoch": 11.704, "grad_norm": 79.27316284179688, "learning_rate": 4.908444444444445e-05, "loss": 1.8604, "step": 1463 }, { "epoch": 11.712, "grad_norm": 30.647672653198242, "learning_rate": 4.9080000000000004e-05, "loss": 1.2619, "step": 1464 }, { "epoch": 11.72, "grad_norm": 42.980716705322266, "learning_rate": 4.907555555555556e-05, "loss": 1.1612, "step": 1465 }, { "epoch": 11.728, "grad_norm": 21.224224090576172, "learning_rate": 4.9071111111111114e-05, "loss": 1.1274, "step": 1466 }, { "epoch": 11.736, "grad_norm": 36.74764633178711, "learning_rate": 4.906666666666667e-05, "loss": 1.3857, "step": 1467 }, { "epoch": 11.744, "grad_norm": 24.692378997802734, "learning_rate": 4.9062222222222224e-05, "loss": 1.6201, "step": 1468 }, { "epoch": 11.752, "grad_norm": 34.4458122253418, "learning_rate": 4.905777777777778e-05, "loss": 1.239, "step": 1469 }, { "epoch": 11.76, "grad_norm": 42.06615447998047, "learning_rate": 4.9053333333333333e-05, "loss": 1.4503, "step": 1470 }, { "epoch": 11.768, "grad_norm": 32.10176086425781, "learning_rate": 4.904888888888889e-05, "loss": 1.7337, "step": 1471 }, { "epoch": 11.776, "grad_norm": 34.04774856567383, "learning_rate": 4.904444444444445e-05, "loss": 1.8377, "step": 1472 }, { "epoch": 11.784, "grad_norm": 26.79412841796875, "learning_rate": 4.9040000000000005e-05, "loss": 1.14, "step": 1473 }, { "epoch": 11.792, "grad_norm": 37.609519958496094, "learning_rate": 4.903555555555556e-05, "loss": 1.9658, "step": 1474 }, { "epoch": 11.8, "grad_norm": 47.573707580566406, "learning_rate": 4.903111111111111e-05, "loss": 1.8546, "step": 1475 }, { "epoch": 11.808, "grad_norm": 35.293033599853516, "learning_rate": 4.902666666666667e-05, "loss": 1.439, "step": 1476 }, { "epoch": 11.816, "grad_norm": 58.38722610473633, "learning_rate": 4.9022222222222224e-05, "loss": 1.8217, "step": 1477 }, { "epoch": 11.824, "grad_norm": 61.71272659301758, "learning_rate": 4.901777777777778e-05, "loss": 1.7313, "step": 1478 }, { "epoch": 11.832, "grad_norm": 19.42742156982422, "learning_rate": 4.9013333333333334e-05, "loss": 1.1748, "step": 1479 }, { "epoch": 11.84, "grad_norm": 80.51416015625, "learning_rate": 4.9008888888888896e-05, "loss": 1.1844, "step": 1480 }, { "epoch": 11.848, "grad_norm": 42.267704010009766, "learning_rate": 4.900444444444445e-05, "loss": 1.7362, "step": 1481 }, { "epoch": 11.856, "grad_norm": 154.80323791503906, "learning_rate": 4.9e-05, "loss": 1.7754, "step": 1482 }, { "epoch": 11.864, "grad_norm": 110.03594207763672, "learning_rate": 4.899555555555555e-05, "loss": 1.2871, "step": 1483 }, { "epoch": 11.872, "grad_norm": 126.62879943847656, "learning_rate": 4.8991111111111115e-05, "loss": 1.8799, "step": 1484 }, { "epoch": 11.88, "grad_norm": 35.757362365722656, "learning_rate": 4.898666666666667e-05, "loss": 1.6458, "step": 1485 }, { "epoch": 11.888, "grad_norm": 105.08518981933594, "learning_rate": 4.8982222222222225e-05, "loss": 2.8677, "step": 1486 }, { "epoch": 11.896, "grad_norm": 72.41747283935547, "learning_rate": 4.897777777777778e-05, "loss": 1.5829, "step": 1487 }, { "epoch": 11.904, "grad_norm": 87.82933044433594, "learning_rate": 4.897333333333334e-05, "loss": 1.5585, "step": 1488 }, { "epoch": 11.912, "grad_norm": 108.43446350097656, "learning_rate": 4.896888888888889e-05, "loss": 1.7452, "step": 1489 }, { "epoch": 11.92, "grad_norm": 35.588985443115234, "learning_rate": 4.8964444444444444e-05, "loss": 1.5939, "step": 1490 }, { "epoch": 11.928, "grad_norm": 65.11099243164062, "learning_rate": 4.896e-05, "loss": 1.4262, "step": 1491 }, { "epoch": 11.936, "grad_norm": 47.839622497558594, "learning_rate": 4.895555555555556e-05, "loss": 1.7641, "step": 1492 }, { "epoch": 11.943999999999999, "grad_norm": 36.825836181640625, "learning_rate": 4.8951111111111115e-05, "loss": 1.4258, "step": 1493 }, { "epoch": 11.952, "grad_norm": 34.86724853515625, "learning_rate": 4.894666666666667e-05, "loss": 1.4869, "step": 1494 }, { "epoch": 11.96, "grad_norm": 125.651611328125, "learning_rate": 4.8942222222222225e-05, "loss": 1.5888, "step": 1495 }, { "epoch": 11.968, "grad_norm": 148.2510528564453, "learning_rate": 4.893777777777778e-05, "loss": 1.615, "step": 1496 }, { "epoch": 11.975999999999999, "grad_norm": 52.805870056152344, "learning_rate": 4.8933333333333335e-05, "loss": 1.2695, "step": 1497 }, { "epoch": 11.984, "grad_norm": 24.431133270263672, "learning_rate": 4.892888888888889e-05, "loss": 1.2172, "step": 1498 }, { "epoch": 11.992, "grad_norm": 42.26841735839844, "learning_rate": 4.8924444444444444e-05, "loss": 1.5386, "step": 1499 }, { "epoch": 12.0, "grad_norm": 25.232501983642578, "learning_rate": 4.8920000000000006e-05, "loss": 1.3811, "step": 1500 }, { "epoch": 12.0, "eval_loss": 1.6186023950576782, "eval_map": 0.2065, "eval_map_50": 0.3833, "eval_map_75": 0.1859, "eval_map_Coverall": 0.5278, "eval_map_Face_Shield": 0.1069, "eval_map_Gloves": 0.119, "eval_map_Goggles": 0.0213, "eval_map_Mask": 0.2576, "eval_map_large": 0.3094, "eval_map_medium": 0.123, "eval_map_small": 0.1817, "eval_mar_1": 0.2304, "eval_mar_10": 0.4163, "eval_mar_100": 0.4234, "eval_mar_100_Coverall": 0.7111, "eval_mar_100_Face_Shield": 0.5412, "eval_mar_100_Gloves": 0.2803, "eval_mar_100_Goggles": 0.1688, "eval_mar_100_Mask": 0.4154, "eval_mar_large": 0.4933, "eval_mar_medium": 0.2872, "eval_mar_small": 0.2391, "eval_runtime": 2.7186, "eval_samples_per_second": 10.667, "eval_steps_per_second": 0.736, "step": 1500 }, { "epoch": 12.008, "grad_norm": 23.168590545654297, "learning_rate": 4.891555555555556e-05, "loss": 1.6506, "step": 1501 }, { "epoch": 12.016, "grad_norm": 39.048343658447266, "learning_rate": 4.8911111111111116e-05, "loss": 1.422, "step": 1502 }, { "epoch": 12.024, "grad_norm": 69.68650817871094, "learning_rate": 4.890666666666667e-05, "loss": 1.6156, "step": 1503 }, { "epoch": 12.032, "grad_norm": 43.18834686279297, "learning_rate": 4.8902222222222225e-05, "loss": 1.6234, "step": 1504 }, { "epoch": 12.04, "grad_norm": 37.66554641723633, "learning_rate": 4.889777777777778e-05, "loss": 1.2066, "step": 1505 }, { "epoch": 12.048, "grad_norm": 31.733657836914062, "learning_rate": 4.8893333333333335e-05, "loss": 1.5758, "step": 1506 }, { "epoch": 12.056, "grad_norm": 66.21111297607422, "learning_rate": 4.888888888888889e-05, "loss": 1.7277, "step": 1507 }, { "epoch": 12.064, "grad_norm": 38.565391540527344, "learning_rate": 4.888444444444445e-05, "loss": 1.6439, "step": 1508 }, { "epoch": 12.072, "grad_norm": 33.50730895996094, "learning_rate": 4.8880000000000006e-05, "loss": 1.7754, "step": 1509 }, { "epoch": 12.08, "grad_norm": 29.266136169433594, "learning_rate": 4.8875555555555554e-05, "loss": 1.2509, "step": 1510 }, { "epoch": 12.088, "grad_norm": 41.877281188964844, "learning_rate": 4.887111111111111e-05, "loss": 1.6454, "step": 1511 }, { "epoch": 12.096, "grad_norm": 43.1926383972168, "learning_rate": 4.886666666666667e-05, "loss": 1.8011, "step": 1512 }, { "epoch": 12.104, "grad_norm": 30.70949363708496, "learning_rate": 4.8862222222222226e-05, "loss": 2.131, "step": 1513 }, { "epoch": 12.112, "grad_norm": 26.718965530395508, "learning_rate": 4.885777777777778e-05, "loss": 1.7657, "step": 1514 }, { "epoch": 12.12, "grad_norm": 69.09099578857422, "learning_rate": 4.8853333333333335e-05, "loss": 1.9251, "step": 1515 }, { "epoch": 12.128, "grad_norm": 51.333534240722656, "learning_rate": 4.884888888888889e-05, "loss": 1.4553, "step": 1516 }, { "epoch": 12.136, "grad_norm": 254.43777465820312, "learning_rate": 4.8844444444444445e-05, "loss": 1.361, "step": 1517 }, { "epoch": 12.144, "grad_norm": 37.495243072509766, "learning_rate": 4.884e-05, "loss": 2.1063, "step": 1518 }, { "epoch": 12.152, "grad_norm": 31.06138038635254, "learning_rate": 4.8835555555555555e-05, "loss": 1.3348, "step": 1519 }, { "epoch": 12.16, "grad_norm": 26.896949768066406, "learning_rate": 4.883111111111111e-05, "loss": 1.4889, "step": 1520 }, { "epoch": 12.168, "grad_norm": 49.75918960571289, "learning_rate": 4.882666666666667e-05, "loss": 1.765, "step": 1521 }, { "epoch": 12.176, "grad_norm": 26.259113311767578, "learning_rate": 4.8822222222222226e-05, "loss": 1.4919, "step": 1522 }, { "epoch": 12.184, "grad_norm": 47.17731857299805, "learning_rate": 4.881777777777778e-05, "loss": 2.5044, "step": 1523 }, { "epoch": 12.192, "grad_norm": 74.69692993164062, "learning_rate": 4.8813333333333336e-05, "loss": 1.8105, "step": 1524 }, { "epoch": 12.2, "grad_norm": 76.14169311523438, "learning_rate": 4.880888888888889e-05, "loss": 2.0083, "step": 1525 }, { "epoch": 12.208, "grad_norm": 82.32415008544922, "learning_rate": 4.8804444444444445e-05, "loss": 1.4537, "step": 1526 }, { "epoch": 12.216, "grad_norm": 93.12499237060547, "learning_rate": 4.88e-05, "loss": 1.5202, "step": 1527 }, { "epoch": 12.224, "grad_norm": 43.49966049194336, "learning_rate": 4.8795555555555555e-05, "loss": 1.5177, "step": 1528 }, { "epoch": 12.232, "grad_norm": 33.4676399230957, "learning_rate": 4.879111111111112e-05, "loss": 1.6158, "step": 1529 }, { "epoch": 12.24, "grad_norm": 61.083160400390625, "learning_rate": 4.878666666666667e-05, "loss": 1.5626, "step": 1530 }, { "epoch": 12.248, "grad_norm": 68.8067855834961, "learning_rate": 4.8782222222222226e-05, "loss": 1.6504, "step": 1531 }, { "epoch": 12.256, "grad_norm": 30.79984474182129, "learning_rate": 4.8777777777777775e-05, "loss": 1.9935, "step": 1532 }, { "epoch": 12.264, "grad_norm": 26.221717834472656, "learning_rate": 4.8773333333333336e-05, "loss": 1.868, "step": 1533 }, { "epoch": 12.272, "grad_norm": 80.66954803466797, "learning_rate": 4.876888888888889e-05, "loss": 1.7951, "step": 1534 }, { "epoch": 12.28, "grad_norm": 23.555269241333008, "learning_rate": 4.8764444444444446e-05, "loss": 1.8365, "step": 1535 }, { "epoch": 12.288, "grad_norm": 39.451480865478516, "learning_rate": 4.876e-05, "loss": 2.4429, "step": 1536 }, { "epoch": 12.296, "grad_norm": 57.74699020385742, "learning_rate": 4.875555555555556e-05, "loss": 1.6952, "step": 1537 }, { "epoch": 12.304, "grad_norm": 40.0276985168457, "learning_rate": 4.875111111111112e-05, "loss": 1.2445, "step": 1538 }, { "epoch": 12.312, "grad_norm": 37.49695587158203, "learning_rate": 4.8746666666666665e-05, "loss": 1.7837, "step": 1539 }, { "epoch": 12.32, "grad_norm": 116.8416748046875, "learning_rate": 4.874222222222222e-05, "loss": 1.9674, "step": 1540 }, { "epoch": 12.328, "grad_norm": 27.486581802368164, "learning_rate": 4.873777777777778e-05, "loss": 1.373, "step": 1541 }, { "epoch": 12.336, "grad_norm": 43.08470916748047, "learning_rate": 4.8733333333333337e-05, "loss": 1.6803, "step": 1542 }, { "epoch": 12.344, "grad_norm": 48.31136703491211, "learning_rate": 4.872888888888889e-05, "loss": 1.8714, "step": 1543 }, { "epoch": 12.352, "grad_norm": 314.6670227050781, "learning_rate": 4.8724444444444446e-05, "loss": 1.7965, "step": 1544 }, { "epoch": 12.36, "grad_norm": 48.02731704711914, "learning_rate": 4.872000000000001e-05, "loss": 1.3125, "step": 1545 }, { "epoch": 12.368, "grad_norm": 41.0709114074707, "learning_rate": 4.8715555555555556e-05, "loss": 2.0084, "step": 1546 }, { "epoch": 12.376, "grad_norm": 31.731592178344727, "learning_rate": 4.871111111111111e-05, "loss": 1.9937, "step": 1547 }, { "epoch": 12.384, "grad_norm": 174.5908966064453, "learning_rate": 4.8706666666666666e-05, "loss": 1.7261, "step": 1548 }, { "epoch": 12.392, "grad_norm": 130.61087036132812, "learning_rate": 4.870222222222223e-05, "loss": 1.5556, "step": 1549 }, { "epoch": 12.4, "grad_norm": 43.47474670410156, "learning_rate": 4.869777777777778e-05, "loss": 1.1857, "step": 1550 }, { "epoch": 12.408, "grad_norm": 159.7042694091797, "learning_rate": 4.869333333333334e-05, "loss": 1.5104, "step": 1551 }, { "epoch": 12.416, "grad_norm": 65.83192443847656, "learning_rate": 4.868888888888889e-05, "loss": 1.6723, "step": 1552 }, { "epoch": 12.424, "grad_norm": 60.747833251953125, "learning_rate": 4.868444444444445e-05, "loss": 1.6204, "step": 1553 }, { "epoch": 12.432, "grad_norm": 36.43121337890625, "learning_rate": 4.868e-05, "loss": 1.7976, "step": 1554 }, { "epoch": 12.44, "grad_norm": 67.05439758300781, "learning_rate": 4.8675555555555556e-05, "loss": 1.4098, "step": 1555 }, { "epoch": 12.448, "grad_norm": 24.644227981567383, "learning_rate": 4.867111111111111e-05, "loss": 1.6957, "step": 1556 }, { "epoch": 12.456, "grad_norm": 46.278228759765625, "learning_rate": 4.866666666666667e-05, "loss": 1.1876, "step": 1557 }, { "epoch": 12.464, "grad_norm": 39.466548919677734, "learning_rate": 4.866222222222223e-05, "loss": 1.6675, "step": 1558 }, { "epoch": 12.472, "grad_norm": 51.8426399230957, "learning_rate": 4.865777777777778e-05, "loss": 1.8563, "step": 1559 }, { "epoch": 12.48, "grad_norm": 53.37568283081055, "learning_rate": 4.865333333333334e-05, "loss": 1.5264, "step": 1560 }, { "epoch": 12.488, "grad_norm": 39.678897857666016, "learning_rate": 4.864888888888889e-05, "loss": 1.3205, "step": 1561 }, { "epoch": 12.496, "grad_norm": 29.30678367614746, "learning_rate": 4.864444444444445e-05, "loss": 1.5932, "step": 1562 }, { "epoch": 12.504, "grad_norm": 29.876901626586914, "learning_rate": 4.864e-05, "loss": 1.889, "step": 1563 }, { "epoch": 12.512, "grad_norm": 40.97187042236328, "learning_rate": 4.863555555555556e-05, "loss": 2.8041, "step": 1564 }, { "epoch": 12.52, "grad_norm": 29.288118362426758, "learning_rate": 4.863111111111112e-05, "loss": 2.1056, "step": 1565 }, { "epoch": 12.528, "grad_norm": 60.67669677734375, "learning_rate": 4.862666666666667e-05, "loss": 2.2139, "step": 1566 }, { "epoch": 12.536, "grad_norm": 28.878990173339844, "learning_rate": 4.862222222222222e-05, "loss": 1.2244, "step": 1567 }, { "epoch": 12.544, "grad_norm": 62.119136810302734, "learning_rate": 4.8617777777777776e-05, "loss": 1.2723, "step": 1568 }, { "epoch": 12.552, "grad_norm": 50.40243911743164, "learning_rate": 4.861333333333333e-05, "loss": 1.53, "step": 1569 }, { "epoch": 12.56, "grad_norm": 25.254005432128906, "learning_rate": 4.860888888888889e-05, "loss": 1.5376, "step": 1570 }, { "epoch": 12.568, "grad_norm": 33.354736328125, "learning_rate": 4.860444444444445e-05, "loss": 1.7443, "step": 1571 }, { "epoch": 12.576, "grad_norm": 31.052030563354492, "learning_rate": 4.86e-05, "loss": 1.446, "step": 1572 }, { "epoch": 12.584, "grad_norm": 34.65858459472656, "learning_rate": 4.859555555555556e-05, "loss": 1.6002, "step": 1573 }, { "epoch": 12.592, "grad_norm": 31.164566040039062, "learning_rate": 4.859111111111111e-05, "loss": 1.5237, "step": 1574 }, { "epoch": 12.6, "grad_norm": 29.190078735351562, "learning_rate": 4.858666666666667e-05, "loss": 1.5744, "step": 1575 }, { "epoch": 12.608, "grad_norm": 18.736042022705078, "learning_rate": 4.858222222222222e-05, "loss": 1.421, "step": 1576 }, { "epoch": 12.616, "grad_norm": 30.79888916015625, "learning_rate": 4.8577777777777776e-05, "loss": 1.5875, "step": 1577 }, { "epoch": 12.624, "grad_norm": 31.974958419799805, "learning_rate": 4.857333333333334e-05, "loss": 1.208, "step": 1578 }, { "epoch": 12.632, "grad_norm": 126.59354400634766, "learning_rate": 4.856888888888889e-05, "loss": 1.4626, "step": 1579 }, { "epoch": 12.64, "grad_norm": 28.430614471435547, "learning_rate": 4.856444444444445e-05, "loss": 1.3589, "step": 1580 }, { "epoch": 12.648, "grad_norm": 33.24238967895508, "learning_rate": 4.856e-05, "loss": 1.6569, "step": 1581 }, { "epoch": 12.656, "grad_norm": 37.70143127441406, "learning_rate": 4.855555555555556e-05, "loss": 1.5201, "step": 1582 }, { "epoch": 12.664, "grad_norm": 54.56144332885742, "learning_rate": 4.855111111111111e-05, "loss": 1.6838, "step": 1583 }, { "epoch": 12.672, "grad_norm": 24.919221878051758, "learning_rate": 4.854666666666667e-05, "loss": 2.1972, "step": 1584 }, { "epoch": 12.68, "grad_norm": 27.43465805053711, "learning_rate": 4.854222222222222e-05, "loss": 1.8217, "step": 1585 }, { "epoch": 12.688, "grad_norm": 20.11979866027832, "learning_rate": 4.8537777777777784e-05, "loss": 3.3402, "step": 1586 }, { "epoch": 12.696, "grad_norm": 44.63079071044922, "learning_rate": 4.853333333333334e-05, "loss": 1.5488, "step": 1587 }, { "epoch": 12.704, "grad_norm": 38.37836837768555, "learning_rate": 4.852888888888889e-05, "loss": 1.9041, "step": 1588 }, { "epoch": 12.712, "grad_norm": 43.6832389831543, "learning_rate": 4.852444444444444e-05, "loss": 1.6531, "step": 1589 }, { "epoch": 12.72, "grad_norm": 34.9025764465332, "learning_rate": 4.852e-05, "loss": 1.3937, "step": 1590 }, { "epoch": 12.728, "grad_norm": 110.5636215209961, "learning_rate": 4.851555555555556e-05, "loss": 2.5628, "step": 1591 }, { "epoch": 12.736, "grad_norm": 172.65628051757812, "learning_rate": 4.851111111111111e-05, "loss": 1.5375, "step": 1592 }, { "epoch": 12.744, "grad_norm": 72.83882141113281, "learning_rate": 4.850666666666667e-05, "loss": 1.7167, "step": 1593 }, { "epoch": 12.752, "grad_norm": 80.27349853515625, "learning_rate": 4.850222222222223e-05, "loss": 1.5024, "step": 1594 }, { "epoch": 12.76, "grad_norm": 27.96427345275879, "learning_rate": 4.8497777777777784e-05, "loss": 1.5841, "step": 1595 }, { "epoch": 12.768, "grad_norm": 28.983638763427734, "learning_rate": 4.849333333333333e-05, "loss": 2.0008, "step": 1596 }, { "epoch": 12.776, "grad_norm": 23.005142211914062, "learning_rate": 4.848888888888889e-05, "loss": 1.3811, "step": 1597 }, { "epoch": 12.784, "grad_norm": 22.8389835357666, "learning_rate": 4.848444444444445e-05, "loss": 1.5, "step": 1598 }, { "epoch": 12.792, "grad_norm": 27.945785522460938, "learning_rate": 4.8480000000000003e-05, "loss": 1.6337, "step": 1599 }, { "epoch": 12.8, "grad_norm": 29.70779800415039, "learning_rate": 4.847555555555556e-05, "loss": 1.1559, "step": 1600 }, { "epoch": 12.808, "grad_norm": 28.592899322509766, "learning_rate": 4.847111111111111e-05, "loss": 1.8823, "step": 1601 }, { "epoch": 12.816, "grad_norm": 30.68102264404297, "learning_rate": 4.8466666666666675e-05, "loss": 1.484, "step": 1602 }, { "epoch": 12.824, "grad_norm": 26.364891052246094, "learning_rate": 4.846222222222222e-05, "loss": 1.7617, "step": 1603 }, { "epoch": 12.832, "grad_norm": 34.65618133544922, "learning_rate": 4.845777777777778e-05, "loss": 1.6318, "step": 1604 }, { "epoch": 12.84, "grad_norm": 34.213321685791016, "learning_rate": 4.845333333333333e-05, "loss": 1.3303, "step": 1605 }, { "epoch": 12.848, "grad_norm": 43.37835693359375, "learning_rate": 4.8448888888888894e-05, "loss": 1.2391, "step": 1606 }, { "epoch": 12.856, "grad_norm": 27.17245864868164, "learning_rate": 4.844444444444445e-05, "loss": 1.6391, "step": 1607 }, { "epoch": 12.864, "grad_norm": 32.84590530395508, "learning_rate": 4.8440000000000004e-05, "loss": 1.5443, "step": 1608 }, { "epoch": 12.872, "grad_norm": 40.75654602050781, "learning_rate": 4.843555555555556e-05, "loss": 1.7122, "step": 1609 }, { "epoch": 12.88, "grad_norm": 34.62400817871094, "learning_rate": 4.8431111111111113e-05, "loss": 1.0966, "step": 1610 }, { "epoch": 12.888, "grad_norm": 23.866859436035156, "learning_rate": 4.842666666666667e-05, "loss": 1.3764, "step": 1611 }, { "epoch": 12.896, "grad_norm": 29.718366622924805, "learning_rate": 4.842222222222222e-05, "loss": 1.3077, "step": 1612 }, { "epoch": 12.904, "grad_norm": 32.09710693359375, "learning_rate": 4.841777777777778e-05, "loss": 1.3118, "step": 1613 }, { "epoch": 12.912, "grad_norm": 43.02253341674805, "learning_rate": 4.841333333333334e-05, "loss": 1.2138, "step": 1614 }, { "epoch": 12.92, "grad_norm": 42.241554260253906, "learning_rate": 4.8408888888888894e-05, "loss": 1.4116, "step": 1615 }, { "epoch": 12.928, "grad_norm": 83.4549789428711, "learning_rate": 4.840444444444445e-05, "loss": 2.3464, "step": 1616 }, { "epoch": 12.936, "grad_norm": 52.65144348144531, "learning_rate": 4.8400000000000004e-05, "loss": 1.4709, "step": 1617 }, { "epoch": 12.943999999999999, "grad_norm": 20.15341567993164, "learning_rate": 4.839555555555556e-05, "loss": 1.455, "step": 1618 }, { "epoch": 12.952, "grad_norm": 73.11742401123047, "learning_rate": 4.8391111111111114e-05, "loss": 1.3371, "step": 1619 }, { "epoch": 12.96, "grad_norm": 28.011749267578125, "learning_rate": 4.838666666666667e-05, "loss": 1.3039, "step": 1620 }, { "epoch": 12.968, "grad_norm": 31.393781661987305, "learning_rate": 4.8382222222222224e-05, "loss": 1.3109, "step": 1621 }, { "epoch": 12.975999999999999, "grad_norm": 29.577198028564453, "learning_rate": 4.837777777777778e-05, "loss": 1.1238, "step": 1622 }, { "epoch": 12.984, "grad_norm": 47.09393310546875, "learning_rate": 4.837333333333334e-05, "loss": 1.2008, "step": 1623 }, { "epoch": 12.992, "grad_norm": 23.713180541992188, "learning_rate": 4.836888888888889e-05, "loss": 1.4196, "step": 1624 }, { "epoch": 13.0, "grad_norm": 30.57044219970703, "learning_rate": 4.836444444444444e-05, "loss": 1.4551, "step": 1625 }, { "epoch": 13.0, "eval_loss": 1.5274765491485596, "eval_map": 0.1849, "eval_map_50": 0.4027, "eval_map_75": 0.1399, "eval_map_Coverall": 0.4948, "eval_map_Face_Shield": 0.126, "eval_map_Gloves": 0.126, "eval_map_Goggles": 0.0508, "eval_map_Mask": 0.1267, "eval_map_large": 0.2794, "eval_map_medium": 0.1251, "eval_map_small": 0.0842, "eval_mar_1": 0.2018, "eval_mar_10": 0.4068, "eval_mar_100": 0.424, "eval_mar_100_Coverall": 0.7311, "eval_mar_100_Face_Shield": 0.5353, "eval_mar_100_Gloves": 0.2525, "eval_mar_100_Goggles": 0.3125, "eval_mar_100_Mask": 0.2885, "eval_mar_large": 0.5881, "eval_mar_medium": 0.3069, "eval_mar_small": 0.1329, "eval_runtime": 2.4945, "eval_samples_per_second": 11.625, "eval_steps_per_second": 0.802, "step": 1625 }, { "epoch": 13.008, "grad_norm": 117.69596862792969, "learning_rate": 4.836e-05, "loss": 1.3604, "step": 1626 }, { "epoch": 13.016, "grad_norm": 37.04279708862305, "learning_rate": 4.835555555555556e-05, "loss": 1.2694, "step": 1627 }, { "epoch": 13.024, "grad_norm": 37.7051887512207, "learning_rate": 4.8351111111111114e-05, "loss": 1.1947, "step": 1628 }, { "epoch": 13.032, "grad_norm": 52.2422981262207, "learning_rate": 4.834666666666667e-05, "loss": 1.7146, "step": 1629 }, { "epoch": 13.04, "grad_norm": 28.35936164855957, "learning_rate": 4.8342222222222224e-05, "loss": 1.3535, "step": 1630 }, { "epoch": 13.048, "grad_norm": 63.156654357910156, "learning_rate": 4.833777777777778e-05, "loss": 1.1327, "step": 1631 }, { "epoch": 13.056, "grad_norm": 34.68851089477539, "learning_rate": 4.8333333333333334e-05, "loss": 1.4553, "step": 1632 }, { "epoch": 13.064, "grad_norm": 29.031015396118164, "learning_rate": 4.832888888888889e-05, "loss": 1.2449, "step": 1633 }, { "epoch": 13.072, "grad_norm": 37.12959671020508, "learning_rate": 4.832444444444444e-05, "loss": 1.8276, "step": 1634 }, { "epoch": 13.08, "grad_norm": 17.20656394958496, "learning_rate": 4.8320000000000005e-05, "loss": 1.5738, "step": 1635 }, { "epoch": 13.088, "grad_norm": 44.239681243896484, "learning_rate": 4.831555555555556e-05, "loss": 1.4978, "step": 1636 }, { "epoch": 13.096, "grad_norm": 39.9005241394043, "learning_rate": 4.8311111111111115e-05, "loss": 1.432, "step": 1637 }, { "epoch": 13.104, "grad_norm": 25.369693756103516, "learning_rate": 4.830666666666667e-05, "loss": 1.2691, "step": 1638 }, { "epoch": 13.112, "grad_norm": 38.23689651489258, "learning_rate": 4.8302222222222224e-05, "loss": 1.3618, "step": 1639 }, { "epoch": 13.12, "grad_norm": 48.00990676879883, "learning_rate": 4.829777777777778e-05, "loss": 1.4294, "step": 1640 }, { "epoch": 13.128, "grad_norm": 154.47840881347656, "learning_rate": 4.8293333333333334e-05, "loss": 1.1939, "step": 1641 }, { "epoch": 13.136, "grad_norm": 33.5096321105957, "learning_rate": 4.828888888888889e-05, "loss": 1.6728, "step": 1642 }, { "epoch": 13.144, "grad_norm": 21.528362274169922, "learning_rate": 4.828444444444445e-05, "loss": 1.2844, "step": 1643 }, { "epoch": 13.152, "grad_norm": 68.63953399658203, "learning_rate": 4.8280000000000005e-05, "loss": 1.2027, "step": 1644 }, { "epoch": 13.16, "grad_norm": 174.36895751953125, "learning_rate": 4.827555555555556e-05, "loss": 1.6804, "step": 1645 }, { "epoch": 13.168, "grad_norm": 119.14818572998047, "learning_rate": 4.827111111111111e-05, "loss": 1.4987, "step": 1646 }, { "epoch": 13.176, "grad_norm": 31.494951248168945, "learning_rate": 4.826666666666667e-05, "loss": 1.4116, "step": 1647 }, { "epoch": 13.184, "grad_norm": 43.167545318603516, "learning_rate": 4.8262222222222225e-05, "loss": 1.4676, "step": 1648 }, { "epoch": 13.192, "grad_norm": 61.773990631103516, "learning_rate": 4.825777777777778e-05, "loss": 1.2478, "step": 1649 }, { "epoch": 13.2, "grad_norm": 57.51377487182617, "learning_rate": 4.8253333333333334e-05, "loss": 1.6405, "step": 1650 }, { "epoch": 13.208, "grad_norm": 137.13671875, "learning_rate": 4.8248888888888896e-05, "loss": 1.903, "step": 1651 }, { "epoch": 13.216, "grad_norm": 35.82402420043945, "learning_rate": 4.824444444444445e-05, "loss": 1.4671, "step": 1652 }, { "epoch": 13.224, "grad_norm": 27.736406326293945, "learning_rate": 4.824e-05, "loss": 0.974, "step": 1653 }, { "epoch": 13.232, "grad_norm": 32.68211364746094, "learning_rate": 4.8235555555555554e-05, "loss": 1.2994, "step": 1654 }, { "epoch": 13.24, "grad_norm": 35.780033111572266, "learning_rate": 4.8231111111111115e-05, "loss": 0.9833, "step": 1655 }, { "epoch": 13.248, "grad_norm": 30.694067001342773, "learning_rate": 4.822666666666667e-05, "loss": 2.1475, "step": 1656 }, { "epoch": 13.256, "grad_norm": 17.492719650268555, "learning_rate": 4.8222222222222225e-05, "loss": 1.1124, "step": 1657 }, { "epoch": 13.264, "grad_norm": 28.84161949157715, "learning_rate": 4.821777777777778e-05, "loss": 1.1699, "step": 1658 }, { "epoch": 13.272, "grad_norm": 37.46064376831055, "learning_rate": 4.8213333333333335e-05, "loss": 1.1138, "step": 1659 }, { "epoch": 13.28, "grad_norm": 20.144588470458984, "learning_rate": 4.820888888888889e-05, "loss": 1.3862, "step": 1660 }, { "epoch": 13.288, "grad_norm": 39.07893371582031, "learning_rate": 4.8204444444444444e-05, "loss": 2.4144, "step": 1661 }, { "epoch": 13.296, "grad_norm": 22.14678382873535, "learning_rate": 4.82e-05, "loss": 1.5836, "step": 1662 }, { "epoch": 13.304, "grad_norm": 30.904993057250977, "learning_rate": 4.819555555555556e-05, "loss": 1.2812, "step": 1663 }, { "epoch": 13.312, "grad_norm": 50.85624313354492, "learning_rate": 4.8191111111111116e-05, "loss": 1.9092, "step": 1664 }, { "epoch": 13.32, "grad_norm": 41.945125579833984, "learning_rate": 4.818666666666667e-05, "loss": 1.2352, "step": 1665 }, { "epoch": 13.328, "grad_norm": 32.267581939697266, "learning_rate": 4.8182222222222225e-05, "loss": 1.0112, "step": 1666 }, { "epoch": 13.336, "grad_norm": 42.12167739868164, "learning_rate": 4.817777777777778e-05, "loss": 1.8994, "step": 1667 }, { "epoch": 13.344, "grad_norm": 63.09218215942383, "learning_rate": 4.8173333333333335e-05, "loss": 1.4543, "step": 1668 }, { "epoch": 13.352, "grad_norm": 18.7454776763916, "learning_rate": 4.816888888888889e-05, "loss": 1.2923, "step": 1669 }, { "epoch": 13.36, "grad_norm": 61.79342269897461, "learning_rate": 4.8164444444444445e-05, "loss": 1.2718, "step": 1670 }, { "epoch": 13.368, "grad_norm": 23.67089080810547, "learning_rate": 4.816e-05, "loss": 2.1815, "step": 1671 }, { "epoch": 13.376, "grad_norm": 21.612102508544922, "learning_rate": 4.815555555555556e-05, "loss": 0.9903, "step": 1672 }, { "epoch": 13.384, "grad_norm": 34.921566009521484, "learning_rate": 4.8151111111111116e-05, "loss": 1.8794, "step": 1673 }, { "epoch": 13.392, "grad_norm": 64.19129943847656, "learning_rate": 4.814666666666667e-05, "loss": 1.5359, "step": 1674 }, { "epoch": 13.4, "grad_norm": 35.16444778442383, "learning_rate": 4.814222222222222e-05, "loss": 1.2162, "step": 1675 }, { "epoch": 13.408, "grad_norm": 20.983930587768555, "learning_rate": 4.813777777777778e-05, "loss": 1.7423, "step": 1676 }, { "epoch": 13.416, "grad_norm": 80.36578369140625, "learning_rate": 4.8133333333333336e-05, "loss": 1.2709, "step": 1677 }, { "epoch": 13.424, "grad_norm": 312.67724609375, "learning_rate": 4.812888888888889e-05, "loss": 2.8645, "step": 1678 }, { "epoch": 13.432, "grad_norm": 35.45650863647461, "learning_rate": 4.8124444444444445e-05, "loss": 1.5578, "step": 1679 }, { "epoch": 13.44, "grad_norm": 32.03900146484375, "learning_rate": 4.812000000000001e-05, "loss": 1.191, "step": 1680 }, { "epoch": 13.448, "grad_norm": 49.747859954833984, "learning_rate": 4.8115555555555555e-05, "loss": 1.1263, "step": 1681 }, { "epoch": 13.456, "grad_norm": 113.69842529296875, "learning_rate": 4.811111111111111e-05, "loss": 2.2116, "step": 1682 }, { "epoch": 13.464, "grad_norm": 94.71424102783203, "learning_rate": 4.8106666666666665e-05, "loss": 1.4966, "step": 1683 }, { "epoch": 13.472, "grad_norm": 19.359590530395508, "learning_rate": 4.8102222222222226e-05, "loss": 1.0283, "step": 1684 }, { "epoch": 13.48, "grad_norm": 28.71954917907715, "learning_rate": 4.809777777777778e-05, "loss": 1.899, "step": 1685 }, { "epoch": 13.488, "grad_norm": 37.97574234008789, "learning_rate": 4.8093333333333336e-05, "loss": 1.3438, "step": 1686 }, { "epoch": 13.496, "grad_norm": 34.794864654541016, "learning_rate": 4.808888888888889e-05, "loss": 1.4266, "step": 1687 }, { "epoch": 13.504, "grad_norm": 51.38175582885742, "learning_rate": 4.8084444444444446e-05, "loss": 1.1553, "step": 1688 }, { "epoch": 13.512, "grad_norm": 38.431270599365234, "learning_rate": 4.808e-05, "loss": 1.1713, "step": 1689 }, { "epoch": 13.52, "grad_norm": 116.37841033935547, "learning_rate": 4.8075555555555555e-05, "loss": 1.6779, "step": 1690 }, { "epoch": 13.528, "grad_norm": 39.13779830932617, "learning_rate": 4.807111111111111e-05, "loss": 1.4678, "step": 1691 }, { "epoch": 13.536, "grad_norm": 44.3990478515625, "learning_rate": 4.806666666666667e-05, "loss": 1.251, "step": 1692 }, { "epoch": 13.544, "grad_norm": 41.35060501098633, "learning_rate": 4.8062222222222227e-05, "loss": 1.545, "step": 1693 }, { "epoch": 13.552, "grad_norm": 32.87495422363281, "learning_rate": 4.805777777777778e-05, "loss": 1.6143, "step": 1694 }, { "epoch": 13.56, "grad_norm": 31.451509475708008, "learning_rate": 4.8053333333333336e-05, "loss": 1.6091, "step": 1695 }, { "epoch": 13.568, "grad_norm": 73.0757064819336, "learning_rate": 4.804888888888889e-05, "loss": 3.0907, "step": 1696 }, { "epoch": 13.576, "grad_norm": 54.99778366088867, "learning_rate": 4.8044444444444446e-05, "loss": 1.2643, "step": 1697 }, { "epoch": 13.584, "grad_norm": 41.0217170715332, "learning_rate": 4.804e-05, "loss": 1.6758, "step": 1698 }, { "epoch": 13.592, "grad_norm": 29.875581741333008, "learning_rate": 4.8035555555555556e-05, "loss": 1.8483, "step": 1699 }, { "epoch": 13.6, "grad_norm": 23.131391525268555, "learning_rate": 4.803111111111112e-05, "loss": 1.3951, "step": 1700 }, { "epoch": 13.608, "grad_norm": 28.02032470703125, "learning_rate": 4.802666666666667e-05, "loss": 2.215, "step": 1701 }, { "epoch": 13.616, "grad_norm": 25.066680908203125, "learning_rate": 4.802222222222223e-05, "loss": 1.57, "step": 1702 }, { "epoch": 13.624, "grad_norm": 34.320926666259766, "learning_rate": 4.8017777777777775e-05, "loss": 1.8706, "step": 1703 }, { "epoch": 13.632, "grad_norm": 42.65947341918945, "learning_rate": 4.801333333333334e-05, "loss": 1.6803, "step": 1704 }, { "epoch": 13.64, "grad_norm": 133.70260620117188, "learning_rate": 4.800888888888889e-05, "loss": 1.5563, "step": 1705 }, { "epoch": 13.648, "grad_norm": 52.15818405151367, "learning_rate": 4.8004444444444446e-05, "loss": 1.0749, "step": 1706 }, { "epoch": 13.656, "grad_norm": 51.658538818359375, "learning_rate": 4.8e-05, "loss": 1.6634, "step": 1707 }, { "epoch": 13.664, "grad_norm": 26.996610641479492, "learning_rate": 4.799555555555556e-05, "loss": 0.9285, "step": 1708 }, { "epoch": 13.672, "grad_norm": 63.92081832885742, "learning_rate": 4.799111111111112e-05, "loss": 1.2542, "step": 1709 }, { "epoch": 13.68, "grad_norm": 46.89669418334961, "learning_rate": 4.7986666666666666e-05, "loss": 1.2695, "step": 1710 }, { "epoch": 13.688, "grad_norm": 32.33269119262695, "learning_rate": 4.798222222222222e-05, "loss": 1.9386, "step": 1711 }, { "epoch": 13.696, "grad_norm": 28.9832706451416, "learning_rate": 4.797777777777778e-05, "loss": 1.227, "step": 1712 }, { "epoch": 13.704, "grad_norm": 26.0871639251709, "learning_rate": 4.797333333333334e-05, "loss": 1.335, "step": 1713 }, { "epoch": 13.712, "grad_norm": 21.68472671508789, "learning_rate": 4.796888888888889e-05, "loss": 1.5559, "step": 1714 }, { "epoch": 13.72, "grad_norm": 55.038551330566406, "learning_rate": 4.796444444444445e-05, "loss": 1.2816, "step": 1715 }, { "epoch": 13.728, "grad_norm": 24.59486961364746, "learning_rate": 4.796e-05, "loss": 1.7357, "step": 1716 }, { "epoch": 13.736, "grad_norm": 30.64276885986328, "learning_rate": 4.7955555555555556e-05, "loss": 1.6698, "step": 1717 }, { "epoch": 13.744, "grad_norm": 40.119293212890625, "learning_rate": 4.795111111111111e-05, "loss": 1.3448, "step": 1718 }, { "epoch": 13.752, "grad_norm": 42.46113204956055, "learning_rate": 4.7946666666666666e-05, "loss": 1.5409, "step": 1719 }, { "epoch": 13.76, "grad_norm": 66.24905395507812, "learning_rate": 4.794222222222223e-05, "loss": 1.1127, "step": 1720 }, { "epoch": 13.768, "grad_norm": 343.10931396484375, "learning_rate": 4.793777777777778e-05, "loss": 1.6015, "step": 1721 }, { "epoch": 13.776, "grad_norm": 34.24440002441406, "learning_rate": 4.793333333333334e-05, "loss": 1.451, "step": 1722 }, { "epoch": 13.784, "grad_norm": 21.393787384033203, "learning_rate": 4.792888888888889e-05, "loss": 1.1674, "step": 1723 }, { "epoch": 13.792, "grad_norm": 20.256549835205078, "learning_rate": 4.792444444444445e-05, "loss": 1.2103, "step": 1724 }, { "epoch": 13.8, "grad_norm": 23.00281524658203, "learning_rate": 4.792e-05, "loss": 1.8653, "step": 1725 }, { "epoch": 13.808, "grad_norm": 30.68799591064453, "learning_rate": 4.791555555555556e-05, "loss": 1.4091, "step": 1726 }, { "epoch": 13.816, "grad_norm": 28.07398796081543, "learning_rate": 4.791111111111111e-05, "loss": 1.291, "step": 1727 }, { "epoch": 13.824, "grad_norm": 39.8812255859375, "learning_rate": 4.7906666666666667e-05, "loss": 1.5346, "step": 1728 }, { "epoch": 13.832, "grad_norm": 26.216766357421875, "learning_rate": 4.790222222222223e-05, "loss": 1.2558, "step": 1729 }, { "epoch": 13.84, "grad_norm": 36.16423797607422, "learning_rate": 4.789777777777778e-05, "loss": 1.7478, "step": 1730 }, { "epoch": 13.848, "grad_norm": 33.316978454589844, "learning_rate": 4.789333333333334e-05, "loss": 1.7753, "step": 1731 }, { "epoch": 13.856, "grad_norm": 38.93878936767578, "learning_rate": 4.7888888888888886e-05, "loss": 1.5244, "step": 1732 }, { "epoch": 13.864, "grad_norm": 33.247989654541016, "learning_rate": 4.788444444444445e-05, "loss": 1.6428, "step": 1733 }, { "epoch": 13.872, "grad_norm": 36.40557098388672, "learning_rate": 4.788e-05, "loss": 1.5213, "step": 1734 }, { "epoch": 13.88, "grad_norm": 33.080833435058594, "learning_rate": 4.787555555555556e-05, "loss": 1.5124, "step": 1735 }, { "epoch": 13.888, "grad_norm": 20.405921936035156, "learning_rate": 4.787111111111111e-05, "loss": 1.408, "step": 1736 }, { "epoch": 13.896, "grad_norm": 37.1115837097168, "learning_rate": 4.7866666666666674e-05, "loss": 2.0215, "step": 1737 }, { "epoch": 13.904, "grad_norm": 29.418970108032227, "learning_rate": 4.786222222222222e-05, "loss": 1.593, "step": 1738 }, { "epoch": 13.912, "grad_norm": 59.96388244628906, "learning_rate": 4.7857777777777777e-05, "loss": 1.4959, "step": 1739 }, { "epoch": 13.92, "grad_norm": 32.097408294677734, "learning_rate": 4.785333333333333e-05, "loss": 1.0847, "step": 1740 }, { "epoch": 13.928, "grad_norm": 22.386520385742188, "learning_rate": 4.784888888888889e-05, "loss": 1.7633, "step": 1741 }, { "epoch": 13.936, "grad_norm": 31.49173355102539, "learning_rate": 4.784444444444445e-05, "loss": 1.1058, "step": 1742 }, { "epoch": 13.943999999999999, "grad_norm": 18.426929473876953, "learning_rate": 4.784e-05, "loss": 1.3973, "step": 1743 }, { "epoch": 13.952, "grad_norm": 44.79645538330078, "learning_rate": 4.783555555555556e-05, "loss": 2.4458, "step": 1744 }, { "epoch": 13.96, "grad_norm": 51.03322219848633, "learning_rate": 4.783111111111111e-05, "loss": 1.5168, "step": 1745 }, { "epoch": 13.968, "grad_norm": 48.441097259521484, "learning_rate": 4.782666666666667e-05, "loss": 1.3369, "step": 1746 }, { "epoch": 13.975999999999999, "grad_norm": 42.83855438232422, "learning_rate": 4.782222222222222e-05, "loss": 1.314, "step": 1747 }, { "epoch": 13.984, "grad_norm": 28.40605926513672, "learning_rate": 4.781777777777778e-05, "loss": 1.7463, "step": 1748 }, { "epoch": 13.992, "grad_norm": 35.577850341796875, "learning_rate": 4.781333333333334e-05, "loss": 1.1869, "step": 1749 }, { "epoch": 14.0, "grad_norm": 47.600250244140625, "learning_rate": 4.7808888888888893e-05, "loss": 1.4029, "step": 1750 }, { "epoch": 14.0, "eval_loss": 1.5050063133239746, "eval_map": 0.2128, "eval_map_50": 0.4475, "eval_map_75": 0.1659, "eval_map_Coverall": 0.4712, "eval_map_Face_Shield": 0.1314, "eval_map_Gloves": 0.1376, "eval_map_Goggles": 0.0687, "eval_map_Mask": 0.2551, "eval_map_large": 0.2442, "eval_map_medium": 0.1483, "eval_map_small": 0.1071, "eval_mar_1": 0.2174, "eval_mar_10": 0.4283, "eval_mar_100": 0.4519, "eval_mar_100_Coverall": 0.6844, "eval_mar_100_Face_Shield": 0.5235, "eval_mar_100_Gloves": 0.2574, "eval_mar_100_Goggles": 0.3844, "eval_mar_100_Mask": 0.4096, "eval_mar_large": 0.5304, "eval_mar_medium": 0.3258, "eval_mar_small": 0.1861, "eval_runtime": 2.4791, "eval_samples_per_second": 11.698, "eval_steps_per_second": 0.807, "step": 1750 }, { "epoch": 14.008, "grad_norm": 26.732858657836914, "learning_rate": 4.780444444444445e-05, "loss": 1.3791, "step": 1751 }, { "epoch": 14.016, "grad_norm": 21.42131996154785, "learning_rate": 4.78e-05, "loss": 1.468, "step": 1752 }, { "epoch": 14.024, "grad_norm": 35.940948486328125, "learning_rate": 4.779555555555556e-05, "loss": 1.4412, "step": 1753 }, { "epoch": 14.032, "grad_norm": 29.50490951538086, "learning_rate": 4.779111111111111e-05, "loss": 1.6694, "step": 1754 }, { "epoch": 14.04, "grad_norm": 34.27052307128906, "learning_rate": 4.778666666666667e-05, "loss": 1.1227, "step": 1755 }, { "epoch": 14.048, "grad_norm": 20.495655059814453, "learning_rate": 4.778222222222222e-05, "loss": 1.5988, "step": 1756 }, { "epoch": 14.056, "grad_norm": 41.75017547607422, "learning_rate": 4.7777777777777784e-05, "loss": 1.2274, "step": 1757 }, { "epoch": 14.064, "grad_norm": 75.93653869628906, "learning_rate": 4.777333333333334e-05, "loss": 1.7702, "step": 1758 }, { "epoch": 14.072, "grad_norm": 36.14499282836914, "learning_rate": 4.7768888888888894e-05, "loss": 1.352, "step": 1759 }, { "epoch": 14.08, "grad_norm": 18.484086990356445, "learning_rate": 4.776444444444444e-05, "loss": 1.4277, "step": 1760 }, { "epoch": 14.088, "grad_norm": 42.35851287841797, "learning_rate": 4.7760000000000004e-05, "loss": 1.1978, "step": 1761 }, { "epoch": 14.096, "grad_norm": 28.448150634765625, "learning_rate": 4.775555555555556e-05, "loss": 1.6881, "step": 1762 }, { "epoch": 14.104, "grad_norm": 17.97579002380371, "learning_rate": 4.775111111111111e-05, "loss": 1.43, "step": 1763 }, { "epoch": 14.112, "grad_norm": 26.349641799926758, "learning_rate": 4.774666666666667e-05, "loss": 1.3865, "step": 1764 }, { "epoch": 14.12, "grad_norm": 43.00653839111328, "learning_rate": 4.774222222222223e-05, "loss": 1.233, "step": 1765 }, { "epoch": 14.128, "grad_norm": 30.044464111328125, "learning_rate": 4.7737777777777785e-05, "loss": 1.7376, "step": 1766 }, { "epoch": 14.136, "grad_norm": 39.896976470947266, "learning_rate": 4.773333333333333e-05, "loss": 1.9345, "step": 1767 }, { "epoch": 14.144, "grad_norm": 27.48199462890625, "learning_rate": 4.772888888888889e-05, "loss": 1.4348, "step": 1768 }, { "epoch": 14.152, "grad_norm": 101.60724639892578, "learning_rate": 4.772444444444445e-05, "loss": 1.5073, "step": 1769 }, { "epoch": 14.16, "grad_norm": 30.8208065032959, "learning_rate": 4.7720000000000004e-05, "loss": 1.2728, "step": 1770 }, { "epoch": 14.168, "grad_norm": 29.820528030395508, "learning_rate": 4.771555555555556e-05, "loss": 1.4421, "step": 1771 }, { "epoch": 14.176, "grad_norm": 18.56369972229004, "learning_rate": 4.7711111111111114e-05, "loss": 1.2776, "step": 1772 }, { "epoch": 14.184, "grad_norm": 25.18110466003418, "learning_rate": 4.770666666666667e-05, "loss": 1.3729, "step": 1773 }, { "epoch": 14.192, "grad_norm": 38.26350402832031, "learning_rate": 4.770222222222222e-05, "loss": 1.6573, "step": 1774 }, { "epoch": 14.2, "grad_norm": 16.696102142333984, "learning_rate": 4.769777777777778e-05, "loss": 1.6492, "step": 1775 }, { "epoch": 14.208, "grad_norm": 24.591102600097656, "learning_rate": 4.769333333333333e-05, "loss": 1.382, "step": 1776 }, { "epoch": 14.216, "grad_norm": 61.41107177734375, "learning_rate": 4.768888888888889e-05, "loss": 1.6045, "step": 1777 }, { "epoch": 14.224, "grad_norm": 31.69384002685547, "learning_rate": 4.768444444444445e-05, "loss": 1.0453, "step": 1778 }, { "epoch": 14.232, "grad_norm": 39.4390983581543, "learning_rate": 4.7680000000000004e-05, "loss": 1.5521, "step": 1779 }, { "epoch": 14.24, "grad_norm": 24.988037109375, "learning_rate": 4.767555555555556e-05, "loss": 1.1081, "step": 1780 }, { "epoch": 14.248, "grad_norm": 50.235321044921875, "learning_rate": 4.7671111111111114e-05, "loss": 1.0969, "step": 1781 }, { "epoch": 14.256, "grad_norm": 41.04383087158203, "learning_rate": 4.766666666666667e-05, "loss": 2.5774, "step": 1782 }, { "epoch": 14.264, "grad_norm": 31.11138916015625, "learning_rate": 4.7662222222222224e-05, "loss": 1.1952, "step": 1783 }, { "epoch": 14.272, "grad_norm": 31.691486358642578, "learning_rate": 4.765777777777778e-05, "loss": 1.8692, "step": 1784 }, { "epoch": 14.28, "grad_norm": 20.66181755065918, "learning_rate": 4.765333333333333e-05, "loss": 1.218, "step": 1785 }, { "epoch": 14.288, "grad_norm": 18.727266311645508, "learning_rate": 4.7648888888888895e-05, "loss": 1.4488, "step": 1786 }, { "epoch": 14.296, "grad_norm": 17.82120132446289, "learning_rate": 4.764444444444445e-05, "loss": 1.3967, "step": 1787 }, { "epoch": 14.304, "grad_norm": 31.512653350830078, "learning_rate": 4.7640000000000005e-05, "loss": 1.4934, "step": 1788 }, { "epoch": 14.312, "grad_norm": 44.41647720336914, "learning_rate": 4.763555555555555e-05, "loss": 1.4573, "step": 1789 }, { "epoch": 14.32, "grad_norm": 20.90293312072754, "learning_rate": 4.7631111111111114e-05, "loss": 1.7709, "step": 1790 }, { "epoch": 14.328, "grad_norm": 46.4466552734375, "learning_rate": 4.762666666666667e-05, "loss": 1.228, "step": 1791 }, { "epoch": 14.336, "grad_norm": 32.725196838378906, "learning_rate": 4.7622222222222224e-05, "loss": 1.167, "step": 1792 }, { "epoch": 14.344, "grad_norm": 40.56018829345703, "learning_rate": 4.761777777777778e-05, "loss": 1.1016, "step": 1793 }, { "epoch": 14.352, "grad_norm": 37.5616569519043, "learning_rate": 4.761333333333334e-05, "loss": 1.1832, "step": 1794 }, { "epoch": 14.36, "grad_norm": 26.824554443359375, "learning_rate": 4.760888888888889e-05, "loss": 1.2034, "step": 1795 }, { "epoch": 14.368, "grad_norm": 54.400753021240234, "learning_rate": 4.7604444444444443e-05, "loss": 1.171, "step": 1796 }, { "epoch": 14.376, "grad_norm": 55.76409149169922, "learning_rate": 4.76e-05, "loss": 1.4062, "step": 1797 }, { "epoch": 14.384, "grad_norm": 53.08872604370117, "learning_rate": 4.759555555555556e-05, "loss": 1.6188, "step": 1798 }, { "epoch": 14.392, "grad_norm": 36.29209899902344, "learning_rate": 4.7591111111111115e-05, "loss": 1.2614, "step": 1799 }, { "epoch": 14.4, "grad_norm": 42.61629104614258, "learning_rate": 4.758666666666667e-05, "loss": 1.4367, "step": 1800 }, { "epoch": 14.408, "grad_norm": 77.10282897949219, "learning_rate": 4.7582222222222224e-05, "loss": 1.5477, "step": 1801 }, { "epoch": 14.416, "grad_norm": 40.96506118774414, "learning_rate": 4.757777777777778e-05, "loss": 1.4992, "step": 1802 }, { "epoch": 14.424, "grad_norm": 21.79606056213379, "learning_rate": 4.7573333333333334e-05, "loss": 1.6041, "step": 1803 }, { "epoch": 14.432, "grad_norm": 52.99570083618164, "learning_rate": 4.756888888888889e-05, "loss": 1.4294, "step": 1804 }, { "epoch": 14.44, "grad_norm": 76.87078857421875, "learning_rate": 4.7564444444444444e-05, "loss": 1.4808, "step": 1805 }, { "epoch": 14.448, "grad_norm": 40.63459014892578, "learning_rate": 4.7560000000000005e-05, "loss": 1.4102, "step": 1806 }, { "epoch": 14.456, "grad_norm": 23.722064971923828, "learning_rate": 4.755555555555556e-05, "loss": 1.7479, "step": 1807 }, { "epoch": 14.464, "grad_norm": 50.48766326904297, "learning_rate": 4.7551111111111115e-05, "loss": 1.4718, "step": 1808 }, { "epoch": 14.472, "grad_norm": 27.206375122070312, "learning_rate": 4.754666666666667e-05, "loss": 1.1344, "step": 1809 }, { "epoch": 14.48, "grad_norm": 30.985170364379883, "learning_rate": 4.7542222222222225e-05, "loss": 1.1444, "step": 1810 }, { "epoch": 14.488, "grad_norm": 45.58635330200195, "learning_rate": 4.753777777777778e-05, "loss": 1.7299, "step": 1811 }, { "epoch": 14.496, "grad_norm": 23.530561447143555, "learning_rate": 4.7533333333333334e-05, "loss": 1.455, "step": 1812 }, { "epoch": 14.504, "grad_norm": 29.641510009765625, "learning_rate": 4.752888888888889e-05, "loss": 1.4471, "step": 1813 }, { "epoch": 14.512, "grad_norm": 21.98538589477539, "learning_rate": 4.752444444444445e-05, "loss": 3.4771, "step": 1814 }, { "epoch": 14.52, "grad_norm": 39.017982482910156, "learning_rate": 4.7520000000000006e-05, "loss": 1.4721, "step": 1815 }, { "epoch": 14.528, "grad_norm": 24.57597541809082, "learning_rate": 4.751555555555556e-05, "loss": 1.4939, "step": 1816 }, { "epoch": 14.536, "grad_norm": 33.603004455566406, "learning_rate": 4.751111111111111e-05, "loss": 1.5369, "step": 1817 }, { "epoch": 14.544, "grad_norm": 23.777482986450195, "learning_rate": 4.750666666666667e-05, "loss": 1.0569, "step": 1818 }, { "epoch": 14.552, "grad_norm": 45.74203109741211, "learning_rate": 4.7502222222222225e-05, "loss": 0.892, "step": 1819 }, { "epoch": 14.56, "grad_norm": 36.72651672363281, "learning_rate": 4.749777777777778e-05, "loss": 1.5445, "step": 1820 }, { "epoch": 14.568, "grad_norm": 35.781253814697266, "learning_rate": 4.7493333333333335e-05, "loss": 1.3435, "step": 1821 }, { "epoch": 14.576, "grad_norm": 25.0784969329834, "learning_rate": 4.7488888888888897e-05, "loss": 1.2841, "step": 1822 }, { "epoch": 14.584, "grad_norm": 32.749755859375, "learning_rate": 4.748444444444445e-05, "loss": 1.4518, "step": 1823 }, { "epoch": 14.592, "grad_norm": 29.31585121154785, "learning_rate": 4.748e-05, "loss": 1.3363, "step": 1824 }, { "epoch": 14.6, "grad_norm": 33.097801208496094, "learning_rate": 4.7475555555555554e-05, "loss": 1.7451, "step": 1825 }, { "epoch": 14.608, "grad_norm": 27.782501220703125, "learning_rate": 4.747111111111111e-05, "loss": 1.3689, "step": 1826 }, { "epoch": 14.616, "grad_norm": 30.67123794555664, "learning_rate": 4.746666666666667e-05, "loss": 1.158, "step": 1827 }, { "epoch": 14.624, "grad_norm": 30.88614845275879, "learning_rate": 4.7462222222222226e-05, "loss": 1.5759, "step": 1828 }, { "epoch": 14.632, "grad_norm": 32.88775634765625, "learning_rate": 4.745777777777778e-05, "loss": 1.4537, "step": 1829 }, { "epoch": 14.64, "grad_norm": 64.63571166992188, "learning_rate": 4.7453333333333335e-05, "loss": 1.5357, "step": 1830 }, { "epoch": 14.648, "grad_norm": 58.794837951660156, "learning_rate": 4.744888888888889e-05, "loss": 2.1455, "step": 1831 }, { "epoch": 14.656, "grad_norm": 26.34687042236328, "learning_rate": 4.7444444444444445e-05, "loss": 1.1216, "step": 1832 }, { "epoch": 14.664, "grad_norm": 151.66908264160156, "learning_rate": 4.744e-05, "loss": 1.1863, "step": 1833 }, { "epoch": 14.672, "grad_norm": 23.405391693115234, "learning_rate": 4.7435555555555555e-05, "loss": 1.6233, "step": 1834 }, { "epoch": 14.68, "grad_norm": 71.15482330322266, "learning_rate": 4.7431111111111116e-05, "loss": 1.645, "step": 1835 }, { "epoch": 14.688, "grad_norm": 23.94234275817871, "learning_rate": 4.742666666666667e-05, "loss": 1.6721, "step": 1836 }, { "epoch": 14.696, "grad_norm": 26.95927619934082, "learning_rate": 4.7422222222222226e-05, "loss": 1.2325, "step": 1837 }, { "epoch": 14.704, "grad_norm": 26.262882232666016, "learning_rate": 4.741777777777778e-05, "loss": 1.4765, "step": 1838 }, { "epoch": 14.712, "grad_norm": 35.909000396728516, "learning_rate": 4.7413333333333336e-05, "loss": 2.115, "step": 1839 }, { "epoch": 14.72, "grad_norm": 22.934677124023438, "learning_rate": 4.740888888888889e-05, "loss": 1.3985, "step": 1840 }, { "epoch": 14.728, "grad_norm": 78.62682342529297, "learning_rate": 4.7404444444444445e-05, "loss": 1.3348, "step": 1841 }, { "epoch": 14.736, "grad_norm": 27.520366668701172, "learning_rate": 4.74e-05, "loss": 1.3666, "step": 1842 }, { "epoch": 14.744, "grad_norm": 38.49928283691406, "learning_rate": 4.739555555555556e-05, "loss": 1.3929, "step": 1843 }, { "epoch": 14.752, "grad_norm": 24.963743209838867, "learning_rate": 4.739111111111112e-05, "loss": 1.5951, "step": 1844 }, { "epoch": 14.76, "grad_norm": 25.836040496826172, "learning_rate": 4.7386666666666665e-05, "loss": 1.1673, "step": 1845 }, { "epoch": 14.768, "grad_norm": 20.58270835876465, "learning_rate": 4.738222222222222e-05, "loss": 1.395, "step": 1846 }, { "epoch": 14.776, "grad_norm": 121.81427001953125, "learning_rate": 4.737777777777778e-05, "loss": 1.8082, "step": 1847 }, { "epoch": 14.784, "grad_norm": 59.60789108276367, "learning_rate": 4.7373333333333336e-05, "loss": 1.2239, "step": 1848 }, { "epoch": 14.792, "grad_norm": 35.40538787841797, "learning_rate": 4.736888888888889e-05, "loss": 1.1596, "step": 1849 }, { "epoch": 14.8, "grad_norm": 32.59342956542969, "learning_rate": 4.7364444444444446e-05, "loss": 1.1046, "step": 1850 }, { "epoch": 14.808, "grad_norm": 21.72887420654297, "learning_rate": 4.736000000000001e-05, "loss": 1.5406, "step": 1851 }, { "epoch": 14.816, "grad_norm": 96.1569595336914, "learning_rate": 4.7355555555555555e-05, "loss": 1.3783, "step": 1852 }, { "epoch": 14.824, "grad_norm": 25.65863800048828, "learning_rate": 4.735111111111111e-05, "loss": 1.3785, "step": 1853 }, { "epoch": 14.832, "grad_norm": 27.605167388916016, "learning_rate": 4.7346666666666665e-05, "loss": 1.297, "step": 1854 }, { "epoch": 14.84, "grad_norm": 24.357242584228516, "learning_rate": 4.734222222222223e-05, "loss": 1.5303, "step": 1855 }, { "epoch": 14.848, "grad_norm": 32.08120346069336, "learning_rate": 4.733777777777778e-05, "loss": 1.257, "step": 1856 }, { "epoch": 14.856, "grad_norm": 22.66448974609375, "learning_rate": 4.7333333333333336e-05, "loss": 1.2583, "step": 1857 }, { "epoch": 14.864, "grad_norm": 25.340557098388672, "learning_rate": 4.732888888888889e-05, "loss": 1.0662, "step": 1858 }, { "epoch": 14.872, "grad_norm": 21.60244369506836, "learning_rate": 4.7324444444444446e-05, "loss": 1.4071, "step": 1859 }, { "epoch": 14.88, "grad_norm": 24.708234786987305, "learning_rate": 4.732e-05, "loss": 1.3088, "step": 1860 }, { "epoch": 14.888, "grad_norm": 32.252784729003906, "learning_rate": 4.7315555555555556e-05, "loss": 1.4875, "step": 1861 }, { "epoch": 14.896, "grad_norm": 30.594221115112305, "learning_rate": 4.731111111111111e-05, "loss": 1.2552, "step": 1862 }, { "epoch": 14.904, "grad_norm": 40.249698638916016, "learning_rate": 4.730666666666667e-05, "loss": 1.2739, "step": 1863 }, { "epoch": 14.912, "grad_norm": 36.409217834472656, "learning_rate": 4.730222222222223e-05, "loss": 1.2273, "step": 1864 }, { "epoch": 14.92, "grad_norm": 38.01579666137695, "learning_rate": 4.729777777777778e-05, "loss": 1.1968, "step": 1865 }, { "epoch": 14.928, "grad_norm": 40.87963104248047, "learning_rate": 4.729333333333334e-05, "loss": 1.4619, "step": 1866 }, { "epoch": 14.936, "grad_norm": 23.435361862182617, "learning_rate": 4.728888888888889e-05, "loss": 3.0193, "step": 1867 }, { "epoch": 14.943999999999999, "grad_norm": 32.52157974243164, "learning_rate": 4.7284444444444446e-05, "loss": 2.142, "step": 1868 }, { "epoch": 14.952, "grad_norm": 70.07328796386719, "learning_rate": 4.728e-05, "loss": 1.0747, "step": 1869 }, { "epoch": 14.96, "grad_norm": 33.210941314697266, "learning_rate": 4.7275555555555556e-05, "loss": 1.3792, "step": 1870 }, { "epoch": 14.968, "grad_norm": 17.130428314208984, "learning_rate": 4.727111111111112e-05, "loss": 1.2545, "step": 1871 }, { "epoch": 14.975999999999999, "grad_norm": 23.925312042236328, "learning_rate": 4.726666666666667e-05, "loss": 2.4242, "step": 1872 }, { "epoch": 14.984, "grad_norm": 57.42038345336914, "learning_rate": 4.726222222222223e-05, "loss": 1.2532, "step": 1873 }, { "epoch": 14.992, "grad_norm": 95.65567016601562, "learning_rate": 4.7257777777777776e-05, "loss": 1.1708, "step": 1874 }, { "epoch": 15.0, "grad_norm": 25.934764862060547, "learning_rate": 4.725333333333334e-05, "loss": 1.5264, "step": 1875 }, { "epoch": 15.0, "eval_loss": 1.468224287033081, "eval_map": 0.2162, "eval_map_50": 0.4191, "eval_map_75": 0.1737, "eval_map_Coverall": 0.5008, "eval_map_Face_Shield": 0.1703, "eval_map_Gloves": 0.1321, "eval_map_Goggles": 0.0622, "eval_map_Mask": 0.2154, "eval_map_large": 0.3101, "eval_map_medium": 0.1395, "eval_map_small": 0.1, "eval_mar_1": 0.2461, "eval_mar_10": 0.4133, "eval_mar_100": 0.433, "eval_mar_100_Coverall": 0.64, "eval_mar_100_Face_Shield": 0.5353, "eval_mar_100_Gloves": 0.3607, "eval_mar_100_Goggles": 0.2656, "eval_mar_100_Mask": 0.3635, "eval_mar_large": 0.5735, "eval_mar_medium": 0.3102, "eval_mar_small": 0.1991, "eval_runtime": 2.5735, "eval_samples_per_second": 11.269, "eval_steps_per_second": 0.777, "step": 1875 }, { "epoch": 15.008, "grad_norm": 21.42755126953125, "learning_rate": 4.724888888888889e-05, "loss": 3.0243, "step": 1876 }, { "epoch": 15.016, "grad_norm": 82.28884887695312, "learning_rate": 4.724444444444445e-05, "loss": 1.2972, "step": 1877 }, { "epoch": 15.024, "grad_norm": 39.804508209228516, "learning_rate": 4.724e-05, "loss": 1.766, "step": 1878 }, { "epoch": 15.032, "grad_norm": 45.73950958251953, "learning_rate": 4.7235555555555557e-05, "loss": 1.1225, "step": 1879 }, { "epoch": 15.04, "grad_norm": 52.87825012207031, "learning_rate": 4.723111111111112e-05, "loss": 1.5055, "step": 1880 }, { "epoch": 15.048, "grad_norm": 32.17901611328125, "learning_rate": 4.7226666666666666e-05, "loss": 1.4125, "step": 1881 }, { "epoch": 15.056, "grad_norm": 32.14524459838867, "learning_rate": 4.722222222222222e-05, "loss": 1.3219, "step": 1882 }, { "epoch": 15.064, "grad_norm": 32.16480255126953, "learning_rate": 4.7217777777777776e-05, "loss": 1.47, "step": 1883 }, { "epoch": 15.072, "grad_norm": 45.97079086303711, "learning_rate": 4.721333333333334e-05, "loss": 2.3235, "step": 1884 }, { "epoch": 15.08, "grad_norm": 55.33319091796875, "learning_rate": 4.720888888888889e-05, "loss": 1.5758, "step": 1885 }, { "epoch": 15.088, "grad_norm": 78.23754119873047, "learning_rate": 4.720444444444445e-05, "loss": 1.6032, "step": 1886 }, { "epoch": 15.096, "grad_norm": 106.9758529663086, "learning_rate": 4.72e-05, "loss": 1.3708, "step": 1887 }, { "epoch": 15.104, "grad_norm": 26.51177406311035, "learning_rate": 4.719555555555556e-05, "loss": 1.3366, "step": 1888 }, { "epoch": 15.112, "grad_norm": 24.129854202270508, "learning_rate": 4.719111111111111e-05, "loss": 1.1706, "step": 1889 }, { "epoch": 15.12, "grad_norm": 17.621318817138672, "learning_rate": 4.718666666666667e-05, "loss": 1.2592, "step": 1890 }, { "epoch": 15.128, "grad_norm": 37.28101348876953, "learning_rate": 4.718222222222222e-05, "loss": 1.5335, "step": 1891 }, { "epoch": 15.136, "grad_norm": 27.494972229003906, "learning_rate": 4.717777777777778e-05, "loss": 0.9829, "step": 1892 }, { "epoch": 15.144, "grad_norm": 40.967342376708984, "learning_rate": 4.717333333333334e-05, "loss": 1.3918, "step": 1893 }, { "epoch": 15.152, "grad_norm": 53.2779541015625, "learning_rate": 4.716888888888889e-05, "loss": 1.7766, "step": 1894 }, { "epoch": 15.16, "grad_norm": 31.547197341918945, "learning_rate": 4.716444444444445e-05, "loss": 1.2529, "step": 1895 }, { "epoch": 15.168, "grad_norm": 39.689048767089844, "learning_rate": 4.716e-05, "loss": 1.1451, "step": 1896 }, { "epoch": 15.176, "grad_norm": 32.06098937988281, "learning_rate": 4.715555555555556e-05, "loss": 2.039, "step": 1897 }, { "epoch": 15.184, "grad_norm": 25.533641815185547, "learning_rate": 4.715111111111111e-05, "loss": 1.1445, "step": 1898 }, { "epoch": 15.192, "grad_norm": 31.90944480895996, "learning_rate": 4.714666666666667e-05, "loss": 1.3585, "step": 1899 }, { "epoch": 15.2, "grad_norm": 30.337852478027344, "learning_rate": 4.714222222222223e-05, "loss": 1.0469, "step": 1900 }, { "epoch": 15.208, "grad_norm": 69.83970642089844, "learning_rate": 4.7137777777777783e-05, "loss": 1.434, "step": 1901 }, { "epoch": 15.216, "grad_norm": 26.025238037109375, "learning_rate": 4.713333333333333e-05, "loss": 1.1709, "step": 1902 }, { "epoch": 15.224, "grad_norm": 52.89051055908203, "learning_rate": 4.7128888888888886e-05, "loss": 1.602, "step": 1903 }, { "epoch": 15.232, "grad_norm": 34.346736907958984, "learning_rate": 4.712444444444445e-05, "loss": 1.7124, "step": 1904 }, { "epoch": 15.24, "grad_norm": 42.829627990722656, "learning_rate": 4.712e-05, "loss": 1.1665, "step": 1905 }, { "epoch": 15.248, "grad_norm": 48.72191619873047, "learning_rate": 4.711555555555556e-05, "loss": 1.5411, "step": 1906 }, { "epoch": 15.256, "grad_norm": 26.738248825073242, "learning_rate": 4.711111111111111e-05, "loss": 1.5767, "step": 1907 }, { "epoch": 15.264, "grad_norm": 60.833614349365234, "learning_rate": 4.7106666666666674e-05, "loss": 1.8238, "step": 1908 }, { "epoch": 15.272, "grad_norm": 27.615434646606445, "learning_rate": 4.710222222222222e-05, "loss": 1.4957, "step": 1909 }, { "epoch": 15.28, "grad_norm": 25.38152313232422, "learning_rate": 4.709777777777778e-05, "loss": 1.4396, "step": 1910 }, { "epoch": 15.288, "grad_norm": 53.027740478515625, "learning_rate": 4.709333333333333e-05, "loss": 1.6884, "step": 1911 }, { "epoch": 15.296, "grad_norm": 30.36159324645996, "learning_rate": 4.7088888888888894e-05, "loss": 1.3856, "step": 1912 }, { "epoch": 15.304, "grad_norm": 45.79081726074219, "learning_rate": 4.708444444444445e-05, "loss": 1.6688, "step": 1913 }, { "epoch": 15.312, "grad_norm": 49.738155364990234, "learning_rate": 4.708e-05, "loss": 1.6318, "step": 1914 }, { "epoch": 15.32, "grad_norm": 28.836702346801758, "learning_rate": 4.707555555555556e-05, "loss": 1.4632, "step": 1915 }, { "epoch": 15.328, "grad_norm": 65.53352355957031, "learning_rate": 4.707111111111111e-05, "loss": 1.4599, "step": 1916 }, { "epoch": 15.336, "grad_norm": 67.98013305664062, "learning_rate": 4.706666666666667e-05, "loss": 1.5476, "step": 1917 }, { "epoch": 15.344, "grad_norm": 45.70663070678711, "learning_rate": 4.706222222222222e-05, "loss": 1.9656, "step": 1918 }, { "epoch": 15.352, "grad_norm": 20.31675148010254, "learning_rate": 4.705777777777778e-05, "loss": 1.4038, "step": 1919 }, { "epoch": 15.36, "grad_norm": 60.90163040161133, "learning_rate": 4.705333333333334e-05, "loss": 1.3996, "step": 1920 }, { "epoch": 15.368, "grad_norm": 47.18254470825195, "learning_rate": 4.7048888888888894e-05, "loss": 1.308, "step": 1921 }, { "epoch": 15.376, "grad_norm": 40.9570198059082, "learning_rate": 4.704444444444445e-05, "loss": 1.7831, "step": 1922 }, { "epoch": 15.384, "grad_norm": 33.18278121948242, "learning_rate": 4.7040000000000004e-05, "loss": 1.1863, "step": 1923 }, { "epoch": 15.392, "grad_norm": 47.5422248840332, "learning_rate": 4.703555555555556e-05, "loss": 1.5371, "step": 1924 }, { "epoch": 15.4, "grad_norm": 35.099822998046875, "learning_rate": 4.703111111111111e-05, "loss": 0.9877, "step": 1925 }, { "epoch": 15.408, "grad_norm": 39.85926818847656, "learning_rate": 4.702666666666667e-05, "loss": 1.0616, "step": 1926 }, { "epoch": 15.416, "grad_norm": 20.98746681213379, "learning_rate": 4.702222222222222e-05, "loss": 1.3074, "step": 1927 }, { "epoch": 15.424, "grad_norm": 37.15505599975586, "learning_rate": 4.701777777777778e-05, "loss": 1.1139, "step": 1928 }, { "epoch": 15.432, "grad_norm": 36.23846435546875, "learning_rate": 4.701333333333334e-05, "loss": 1.4242, "step": 1929 }, { "epoch": 15.44, "grad_norm": 40.55739974975586, "learning_rate": 4.7008888888888894e-05, "loss": 1.5563, "step": 1930 }, { "epoch": 15.448, "grad_norm": 15.130134582519531, "learning_rate": 4.700444444444444e-05, "loss": 1.4282, "step": 1931 }, { "epoch": 15.456, "grad_norm": 242.4135284423828, "learning_rate": 4.7e-05, "loss": 1.7367, "step": 1932 }, { "epoch": 15.464, "grad_norm": 29.302839279174805, "learning_rate": 4.699555555555556e-05, "loss": 1.1548, "step": 1933 }, { "epoch": 15.472, "grad_norm": 24.461837768554688, "learning_rate": 4.6991111111111114e-05, "loss": 1.3431, "step": 1934 }, { "epoch": 15.48, "grad_norm": 42.36631393432617, "learning_rate": 4.698666666666667e-05, "loss": 1.4283, "step": 1935 }, { "epoch": 15.488, "grad_norm": 28.27139663696289, "learning_rate": 4.6982222222222223e-05, "loss": 1.271, "step": 1936 }, { "epoch": 15.496, "grad_norm": 33.043968200683594, "learning_rate": 4.6977777777777785e-05, "loss": 1.8571, "step": 1937 }, { "epoch": 15.504, "grad_norm": 115.76950073242188, "learning_rate": 4.697333333333333e-05, "loss": 1.4831, "step": 1938 }, { "epoch": 15.512, "grad_norm": 21.303890228271484, "learning_rate": 4.696888888888889e-05, "loss": 1.0648, "step": 1939 }, { "epoch": 15.52, "grad_norm": 408.48736572265625, "learning_rate": 4.696444444444444e-05, "loss": 1.6786, "step": 1940 }, { "epoch": 15.528, "grad_norm": 135.24826049804688, "learning_rate": 4.6960000000000004e-05, "loss": 1.6117, "step": 1941 }, { "epoch": 15.536, "grad_norm": 33.27419662475586, "learning_rate": 4.695555555555556e-05, "loss": 1.1257, "step": 1942 }, { "epoch": 15.544, "grad_norm": 61.132606506347656, "learning_rate": 4.6951111111111114e-05, "loss": 1.4042, "step": 1943 }, { "epoch": 15.552, "grad_norm": 39.06184005737305, "learning_rate": 4.694666666666667e-05, "loss": 1.2817, "step": 1944 }, { "epoch": 15.56, "grad_norm": 25.154130935668945, "learning_rate": 4.6942222222222224e-05, "loss": 1.6558, "step": 1945 }, { "epoch": 15.568, "grad_norm": 21.17144012451172, "learning_rate": 4.693777777777778e-05, "loss": 1.7284, "step": 1946 }, { "epoch": 15.576, "grad_norm": 25.191762924194336, "learning_rate": 4.6933333333333333e-05, "loss": 2.0444, "step": 1947 }, { "epoch": 15.584, "grad_norm": 33.884727478027344, "learning_rate": 4.692888888888889e-05, "loss": 1.2247, "step": 1948 }, { "epoch": 15.592, "grad_norm": 22.87972068786621, "learning_rate": 4.692444444444445e-05, "loss": 2.8938, "step": 1949 }, { "epoch": 15.6, "grad_norm": 53.560813903808594, "learning_rate": 4.6920000000000005e-05, "loss": 1.6879, "step": 1950 }, { "epoch": 15.608, "grad_norm": 35.392948150634766, "learning_rate": 4.691555555555556e-05, "loss": 1.2727, "step": 1951 }, { "epoch": 15.616, "grad_norm": 50.21064758300781, "learning_rate": 4.6911111111111114e-05, "loss": 1.4201, "step": 1952 }, { "epoch": 15.624, "grad_norm": 88.343505859375, "learning_rate": 4.690666666666667e-05, "loss": 1.9871, "step": 1953 }, { "epoch": 15.632, "grad_norm": 44.36085510253906, "learning_rate": 4.6902222222222224e-05, "loss": 1.5701, "step": 1954 }, { "epoch": 15.64, "grad_norm": 20.943113327026367, "learning_rate": 4.689777777777778e-05, "loss": 1.3146, "step": 1955 }, { "epoch": 15.648, "grad_norm": 36.03797912597656, "learning_rate": 4.6893333333333334e-05, "loss": 1.9565, "step": 1956 }, { "epoch": 15.656, "grad_norm": 119.6890869140625, "learning_rate": 4.6888888888888895e-05, "loss": 1.2489, "step": 1957 }, { "epoch": 15.664, "grad_norm": 67.83956146240234, "learning_rate": 4.688444444444445e-05, "loss": 1.4199, "step": 1958 }, { "epoch": 15.672, "grad_norm": 58.13846969604492, "learning_rate": 4.688e-05, "loss": 1.703, "step": 1959 }, { "epoch": 15.68, "grad_norm": 47.006534576416016, "learning_rate": 4.687555555555555e-05, "loss": 1.666, "step": 1960 }, { "epoch": 15.688, "grad_norm": 25.111318588256836, "learning_rate": 4.6871111111111115e-05, "loss": 1.473, "step": 1961 }, { "epoch": 15.696, "grad_norm": 32.59654998779297, "learning_rate": 4.686666666666667e-05, "loss": 1.1654, "step": 1962 }, { "epoch": 15.704, "grad_norm": 25.23141098022461, "learning_rate": 4.6862222222222225e-05, "loss": 1.1585, "step": 1963 }, { "epoch": 15.712, "grad_norm": 26.250146865844727, "learning_rate": 4.685777777777778e-05, "loss": 1.8592, "step": 1964 }, { "epoch": 15.72, "grad_norm": 33.14158630371094, "learning_rate": 4.685333333333334e-05, "loss": 0.8687, "step": 1965 }, { "epoch": 15.728, "grad_norm": 44.146018981933594, "learning_rate": 4.684888888888889e-05, "loss": 1.8348, "step": 1966 }, { "epoch": 15.736, "grad_norm": 50.86652755737305, "learning_rate": 4.6844444444444444e-05, "loss": 1.6191, "step": 1967 }, { "epoch": 15.744, "grad_norm": 59.21434020996094, "learning_rate": 4.684e-05, "loss": 1.3023, "step": 1968 }, { "epoch": 15.752, "grad_norm": 59.75230407714844, "learning_rate": 4.683555555555556e-05, "loss": 1.6344, "step": 1969 }, { "epoch": 15.76, "grad_norm": 49.02849197387695, "learning_rate": 4.6831111111111115e-05, "loss": 2.4352, "step": 1970 }, { "epoch": 15.768, "grad_norm": 42.3071403503418, "learning_rate": 4.682666666666667e-05, "loss": 1.7866, "step": 1971 }, { "epoch": 15.776, "grad_norm": 110.83995819091797, "learning_rate": 4.6822222222222225e-05, "loss": 1.5634, "step": 1972 }, { "epoch": 15.784, "grad_norm": 95.73385620117188, "learning_rate": 4.681777777777778e-05, "loss": 1.311, "step": 1973 }, { "epoch": 15.792, "grad_norm": 53.8675422668457, "learning_rate": 4.6813333333333335e-05, "loss": 1.86, "step": 1974 }, { "epoch": 15.8, "grad_norm": 29.204320907592773, "learning_rate": 4.680888888888889e-05, "loss": 1.9315, "step": 1975 }, { "epoch": 15.808, "grad_norm": 41.8610954284668, "learning_rate": 4.6804444444444444e-05, "loss": 1.3976, "step": 1976 }, { "epoch": 15.816, "grad_norm": 48.008583068847656, "learning_rate": 4.6800000000000006e-05, "loss": 1.2434, "step": 1977 }, { "epoch": 15.824, "grad_norm": 52.74129104614258, "learning_rate": 4.679555555555556e-05, "loss": 1.7497, "step": 1978 }, { "epoch": 15.832, "grad_norm": 39.38664627075195, "learning_rate": 4.6791111111111116e-05, "loss": 1.9839, "step": 1979 }, { "epoch": 15.84, "grad_norm": 26.104084014892578, "learning_rate": 4.678666666666667e-05, "loss": 1.6175, "step": 1980 }, { "epoch": 15.848, "grad_norm": 239.34266662597656, "learning_rate": 4.678222222222222e-05, "loss": 1.3556, "step": 1981 }, { "epoch": 15.856, "grad_norm": 39.92780685424805, "learning_rate": 4.677777777777778e-05, "loss": 1.5137, "step": 1982 }, { "epoch": 15.864, "grad_norm": 92.13284301757812, "learning_rate": 4.6773333333333335e-05, "loss": 1.437, "step": 1983 }, { "epoch": 15.872, "grad_norm": 41.77146530151367, "learning_rate": 4.676888888888889e-05, "loss": 1.3326, "step": 1984 }, { "epoch": 15.88, "grad_norm": 21.635583877563477, "learning_rate": 4.6764444444444445e-05, "loss": 1.1785, "step": 1985 }, { "epoch": 15.888, "grad_norm": 31.978614807128906, "learning_rate": 4.6760000000000006e-05, "loss": 1.4091, "step": 1986 }, { "epoch": 15.896, "grad_norm": 34.574546813964844, "learning_rate": 4.675555555555556e-05, "loss": 1.3518, "step": 1987 }, { "epoch": 15.904, "grad_norm": 46.668819427490234, "learning_rate": 4.675111111111111e-05, "loss": 1.6659, "step": 1988 }, { "epoch": 15.912, "grad_norm": 38.506988525390625, "learning_rate": 4.6746666666666664e-05, "loss": 1.403, "step": 1989 }, { "epoch": 15.92, "grad_norm": 22.13770866394043, "learning_rate": 4.6742222222222226e-05, "loss": 1.1891, "step": 1990 }, { "epoch": 15.928, "grad_norm": 21.060239791870117, "learning_rate": 4.673777777777778e-05, "loss": 1.4954, "step": 1991 }, { "epoch": 15.936, "grad_norm": 32.979557037353516, "learning_rate": 4.6733333333333335e-05, "loss": 1.7917, "step": 1992 }, { "epoch": 15.943999999999999, "grad_norm": 71.8815689086914, "learning_rate": 4.672888888888889e-05, "loss": 1.6733, "step": 1993 }, { "epoch": 15.952, "grad_norm": 25.117582321166992, "learning_rate": 4.672444444444445e-05, "loss": 1.8159, "step": 1994 }, { "epoch": 15.96, "grad_norm": 53.44597244262695, "learning_rate": 4.672e-05, "loss": 1.0062, "step": 1995 }, { "epoch": 15.968, "grad_norm": 27.510923385620117, "learning_rate": 4.6715555555555555e-05, "loss": 1.1425, "step": 1996 }, { "epoch": 15.975999999999999, "grad_norm": 44.284889221191406, "learning_rate": 4.671111111111111e-05, "loss": 1.9067, "step": 1997 }, { "epoch": 15.984, "grad_norm": 26.613677978515625, "learning_rate": 4.670666666666667e-05, "loss": 1.2034, "step": 1998 }, { "epoch": 15.992, "grad_norm": 25.58790397644043, "learning_rate": 4.6702222222222226e-05, "loss": 1.2237, "step": 1999 }, { "epoch": 16.0, "grad_norm": 64.11408996582031, "learning_rate": 4.669777777777778e-05, "loss": 1.441, "step": 2000 }, { "epoch": 16.0, "eval_loss": 1.5064910650253296, "eval_map": 0.2131, "eval_map_50": 0.4481, "eval_map_75": 0.1571, "eval_map_Coverall": 0.505, "eval_map_Face_Shield": 0.1106, "eval_map_Gloves": 0.1473, "eval_map_Goggles": 0.0771, "eval_map_Mask": 0.2255, "eval_map_large": 0.3191, "eval_map_medium": 0.1152, "eval_map_small": 0.1481, "eval_mar_1": 0.2389, "eval_mar_10": 0.4093, "eval_mar_100": 0.4344, "eval_mar_100_Coverall": 0.6867, "eval_mar_100_Face_Shield": 0.5059, "eval_mar_100_Gloves": 0.3246, "eval_mar_100_Goggles": 0.3125, "eval_mar_100_Mask": 0.3423, "eval_mar_large": 0.5527, "eval_mar_medium": 0.3081, "eval_mar_small": 0.2371, "eval_runtime": 2.5643, "eval_samples_per_second": 11.309, "eval_steps_per_second": 0.78, "step": 2000 }, { "epoch": 16.008, "grad_norm": 49.25587463378906, "learning_rate": 4.6693333333333336e-05, "loss": 1.2533, "step": 2001 }, { "epoch": 16.016, "grad_norm": 34.290672302246094, "learning_rate": 4.668888888888889e-05, "loss": 1.1459, "step": 2002 }, { "epoch": 16.024, "grad_norm": 43.35966873168945, "learning_rate": 4.6684444444444445e-05, "loss": 1.5417, "step": 2003 }, { "epoch": 16.032, "grad_norm": 35.281612396240234, "learning_rate": 4.668e-05, "loss": 1.9008, "step": 2004 }, { "epoch": 16.04, "grad_norm": 33.8704948425293, "learning_rate": 4.6675555555555555e-05, "loss": 1.4163, "step": 2005 }, { "epoch": 16.048, "grad_norm": 20.158714294433594, "learning_rate": 4.667111111111112e-05, "loss": 1.3736, "step": 2006 }, { "epoch": 16.056, "grad_norm": 31.257862091064453, "learning_rate": 4.666666666666667e-05, "loss": 1.0871, "step": 2007 }, { "epoch": 16.064, "grad_norm": 31.850692749023438, "learning_rate": 4.6662222222222226e-05, "loss": 1.4301, "step": 2008 }, { "epoch": 16.072, "grad_norm": 23.55552864074707, "learning_rate": 4.665777777777778e-05, "loss": 1.2705, "step": 2009 }, { "epoch": 16.08, "grad_norm": 15.655172348022461, "learning_rate": 4.6653333333333336e-05, "loss": 1.4045, "step": 2010 }, { "epoch": 16.088, "grad_norm": 33.19257354736328, "learning_rate": 4.664888888888889e-05, "loss": 1.3708, "step": 2011 }, { "epoch": 16.096, "grad_norm": 23.85597801208496, "learning_rate": 4.6644444444444446e-05, "loss": 1.4221, "step": 2012 }, { "epoch": 16.104, "grad_norm": 20.844114303588867, "learning_rate": 4.664e-05, "loss": 1.2582, "step": 2013 }, { "epoch": 16.112, "grad_norm": 23.546031951904297, "learning_rate": 4.663555555555556e-05, "loss": 1.3751, "step": 2014 }, { "epoch": 16.12, "grad_norm": 20.43274688720703, "learning_rate": 4.663111111111112e-05, "loss": 1.3637, "step": 2015 }, { "epoch": 16.128, "grad_norm": 20.8396053314209, "learning_rate": 4.6626666666666665e-05, "loss": 1.0875, "step": 2016 }, { "epoch": 16.136, "grad_norm": 16.327022552490234, "learning_rate": 4.662222222222222e-05, "loss": 1.4048, "step": 2017 }, { "epoch": 16.144, "grad_norm": 31.83888816833496, "learning_rate": 4.661777777777778e-05, "loss": 1.4367, "step": 2018 }, { "epoch": 16.152, "grad_norm": 32.0489616394043, "learning_rate": 4.6613333333333337e-05, "loss": 1.2863, "step": 2019 }, { "epoch": 16.16, "grad_norm": 56.43332290649414, "learning_rate": 4.660888888888889e-05, "loss": 1.4115, "step": 2020 }, { "epoch": 16.168, "grad_norm": 43.75410842895508, "learning_rate": 4.6604444444444446e-05, "loss": 1.4962, "step": 2021 }, { "epoch": 16.176, "grad_norm": 34.103485107421875, "learning_rate": 4.660000000000001e-05, "loss": 1.305, "step": 2022 }, { "epoch": 16.184, "grad_norm": 35.78275680541992, "learning_rate": 4.6595555555555556e-05, "loss": 1.3977, "step": 2023 }, { "epoch": 16.192, "grad_norm": 26.15697479248047, "learning_rate": 4.659111111111111e-05, "loss": 1.4542, "step": 2024 }, { "epoch": 16.2, "grad_norm": 172.3627166748047, "learning_rate": 4.6586666666666666e-05, "loss": 1.6253, "step": 2025 }, { "epoch": 16.208, "grad_norm": 43.95888900756836, "learning_rate": 4.658222222222223e-05, "loss": 2.4758, "step": 2026 }, { "epoch": 16.216, "grad_norm": 18.22405242919922, "learning_rate": 4.657777777777778e-05, "loss": 1.1312, "step": 2027 }, { "epoch": 16.224, "grad_norm": 18.257104873657227, "learning_rate": 4.657333333333334e-05, "loss": 1.3303, "step": 2028 }, { "epoch": 16.232, "grad_norm": 33.433616638183594, "learning_rate": 4.656888888888889e-05, "loss": 1.0826, "step": 2029 }, { "epoch": 16.24, "grad_norm": 30.369293212890625, "learning_rate": 4.6564444444444447e-05, "loss": 2.2694, "step": 2030 }, { "epoch": 16.248, "grad_norm": 30.952491760253906, "learning_rate": 4.656e-05, "loss": 1.1469, "step": 2031 }, { "epoch": 16.256, "grad_norm": 24.94999122619629, "learning_rate": 4.6555555555555556e-05, "loss": 1.76, "step": 2032 }, { "epoch": 16.264, "grad_norm": 23.69525146484375, "learning_rate": 4.655111111111111e-05, "loss": 1.4312, "step": 2033 }, { "epoch": 16.272, "grad_norm": 20.955781936645508, "learning_rate": 4.6546666666666666e-05, "loss": 1.7045, "step": 2034 }, { "epoch": 16.28, "grad_norm": 33.90179443359375, "learning_rate": 4.654222222222223e-05, "loss": 0.9593, "step": 2035 }, { "epoch": 16.288, "grad_norm": 29.731170654296875, "learning_rate": 4.653777777777778e-05, "loss": 1.7079, "step": 2036 }, { "epoch": 16.296, "grad_norm": 54.39741897583008, "learning_rate": 4.653333333333334e-05, "loss": 1.2977, "step": 2037 }, { "epoch": 16.304, "grad_norm": 26.419490814208984, "learning_rate": 4.6528888888888885e-05, "loss": 1.6639, "step": 2038 }, { "epoch": 16.312, "grad_norm": 42.083675384521484, "learning_rate": 4.652444444444445e-05, "loss": 0.902, "step": 2039 }, { "epoch": 16.32, "grad_norm": 28.282840728759766, "learning_rate": 4.652e-05, "loss": 1.2734, "step": 2040 }, { "epoch": 16.328, "grad_norm": 33.21522903442383, "learning_rate": 4.651555555555556e-05, "loss": 1.1955, "step": 2041 }, { "epoch": 16.336, "grad_norm": 16.443822860717773, "learning_rate": 4.651111111111111e-05, "loss": 0.8782, "step": 2042 }, { "epoch": 16.344, "grad_norm": 43.91071319580078, "learning_rate": 4.650666666666667e-05, "loss": 2.1134, "step": 2043 }, { "epoch": 16.352, "grad_norm": 74.40498352050781, "learning_rate": 4.650222222222223e-05, "loss": 1.3172, "step": 2044 }, { "epoch": 16.36, "grad_norm": 18.848222732543945, "learning_rate": 4.6497777777777776e-05, "loss": 1.2895, "step": 2045 }, { "epoch": 16.368, "grad_norm": 24.037960052490234, "learning_rate": 4.649333333333333e-05, "loss": 1.4406, "step": 2046 }, { "epoch": 16.376, "grad_norm": 39.36971664428711, "learning_rate": 4.648888888888889e-05, "loss": 1.4846, "step": 2047 }, { "epoch": 16.384, "grad_norm": 17.410799026489258, "learning_rate": 4.648444444444445e-05, "loss": 1.4693, "step": 2048 }, { "epoch": 16.392, "grad_norm": 28.919410705566406, "learning_rate": 4.648e-05, "loss": 1.2813, "step": 2049 }, { "epoch": 16.4, "grad_norm": 22.89698028564453, "learning_rate": 4.647555555555556e-05, "loss": 1.7515, "step": 2050 }, { "epoch": 16.408, "grad_norm": 61.72162628173828, "learning_rate": 4.647111111111111e-05, "loss": 3.1682, "step": 2051 }, { "epoch": 16.416, "grad_norm": 49.907649993896484, "learning_rate": 4.646666666666667e-05, "loss": 1.0789, "step": 2052 }, { "epoch": 16.424, "grad_norm": 29.778711318969727, "learning_rate": 4.646222222222222e-05, "loss": 1.2999, "step": 2053 }, { "epoch": 16.432, "grad_norm": 49.02151870727539, "learning_rate": 4.6457777777777776e-05, "loss": 1.0373, "step": 2054 }, { "epoch": 16.44, "grad_norm": 32.88031005859375, "learning_rate": 4.645333333333334e-05, "loss": 1.1468, "step": 2055 }, { "epoch": 16.448, "grad_norm": 36.61318588256836, "learning_rate": 4.644888888888889e-05, "loss": 1.7341, "step": 2056 }, { "epoch": 16.456, "grad_norm": 40.154659271240234, "learning_rate": 4.644444444444445e-05, "loss": 1.0959, "step": 2057 }, { "epoch": 16.464, "grad_norm": 60.82568359375, "learning_rate": 4.644e-05, "loss": 1.3558, "step": 2058 }, { "epoch": 16.472, "grad_norm": 26.469911575317383, "learning_rate": 4.643555555555556e-05, "loss": 1.1725, "step": 2059 }, { "epoch": 16.48, "grad_norm": 22.02050018310547, "learning_rate": 4.643111111111111e-05, "loss": 1.6032, "step": 2060 }, { "epoch": 16.488, "grad_norm": 23.73678970336914, "learning_rate": 4.642666666666667e-05, "loss": 1.582, "step": 2061 }, { "epoch": 16.496, "grad_norm": 25.166658401489258, "learning_rate": 4.642222222222222e-05, "loss": 1.3376, "step": 2062 }, { "epoch": 16.504, "grad_norm": 25.43703842163086, "learning_rate": 4.6417777777777784e-05, "loss": 1.1819, "step": 2063 }, { "epoch": 16.512, "grad_norm": 23.870311737060547, "learning_rate": 4.641333333333334e-05, "loss": 1.3805, "step": 2064 }, { "epoch": 16.52, "grad_norm": 37.61258316040039, "learning_rate": 4.640888888888889e-05, "loss": 1.3898, "step": 2065 }, { "epoch": 16.528, "grad_norm": 25.386707305908203, "learning_rate": 4.640444444444445e-05, "loss": 1.5745, "step": 2066 }, { "epoch": 16.536, "grad_norm": 35.779117584228516, "learning_rate": 4.64e-05, "loss": 1.4175, "step": 2067 }, { "epoch": 16.544, "grad_norm": 135.1598663330078, "learning_rate": 4.639555555555556e-05, "loss": 1.3965, "step": 2068 }, { "epoch": 16.552, "grad_norm": 17.8762149810791, "learning_rate": 4.639111111111111e-05, "loss": 1.7441, "step": 2069 }, { "epoch": 16.56, "grad_norm": 34.29193115234375, "learning_rate": 4.638666666666667e-05, "loss": 1.259, "step": 2070 }, { "epoch": 16.568, "grad_norm": 30.305864334106445, "learning_rate": 4.638222222222223e-05, "loss": 1.5184, "step": 2071 }, { "epoch": 16.576, "grad_norm": 28.04138946533203, "learning_rate": 4.6377777777777784e-05, "loss": 1.2271, "step": 2072 }, { "epoch": 16.584, "grad_norm": 22.39527130126953, "learning_rate": 4.637333333333333e-05, "loss": 1.3776, "step": 2073 }, { "epoch": 16.592, "grad_norm": 68.34848022460938, "learning_rate": 4.636888888888889e-05, "loss": 1.6313, "step": 2074 }, { "epoch": 16.6, "grad_norm": 34.127498626708984, "learning_rate": 4.636444444444445e-05, "loss": 1.7434, "step": 2075 }, { "epoch": 16.608, "grad_norm": 25.982044219970703, "learning_rate": 4.636e-05, "loss": 1.0931, "step": 2076 }, { "epoch": 16.616, "grad_norm": 29.13801383972168, "learning_rate": 4.635555555555556e-05, "loss": 1.0505, "step": 2077 }, { "epoch": 16.624, "grad_norm": 33.31601333618164, "learning_rate": 4.635111111111111e-05, "loss": 1.6027, "step": 2078 }, { "epoch": 16.632, "grad_norm": 21.90670394897461, "learning_rate": 4.6346666666666675e-05, "loss": 1.0249, "step": 2079 }, { "epoch": 16.64, "grad_norm": 40.53753662109375, "learning_rate": 4.634222222222222e-05, "loss": 2.0827, "step": 2080 }, { "epoch": 16.648, "grad_norm": 24.254562377929688, "learning_rate": 4.633777777777778e-05, "loss": 1.5735, "step": 2081 }, { "epoch": 16.656, "grad_norm": 50.33248519897461, "learning_rate": 4.633333333333333e-05, "loss": 1.9225, "step": 2082 }, { "epoch": 16.664, "grad_norm": 45.04800796508789, "learning_rate": 4.632888888888889e-05, "loss": 1.3171, "step": 2083 }, { "epoch": 16.672, "grad_norm": 21.122709274291992, "learning_rate": 4.632444444444445e-05, "loss": 1.1958, "step": 2084 }, { "epoch": 16.68, "grad_norm": 27.34699821472168, "learning_rate": 4.6320000000000004e-05, "loss": 1.3976, "step": 2085 }, { "epoch": 16.688, "grad_norm": 34.81392288208008, "learning_rate": 4.631555555555556e-05, "loss": 1.5616, "step": 2086 }, { "epoch": 16.696, "grad_norm": 36.61018371582031, "learning_rate": 4.6311111111111113e-05, "loss": 1.2925, "step": 2087 }, { "epoch": 16.704, "grad_norm": 85.38211822509766, "learning_rate": 4.630666666666667e-05, "loss": 1.2491, "step": 2088 }, { "epoch": 16.712, "grad_norm": 35.415042877197266, "learning_rate": 4.630222222222222e-05, "loss": 0.9977, "step": 2089 }, { "epoch": 16.72, "grad_norm": 19.591344833374023, "learning_rate": 4.629777777777778e-05, "loss": 1.3039, "step": 2090 }, { "epoch": 16.728, "grad_norm": 33.084617614746094, "learning_rate": 4.629333333333333e-05, "loss": 1.5791, "step": 2091 }, { "epoch": 16.736, "grad_norm": 19.805763244628906, "learning_rate": 4.6288888888888894e-05, "loss": 1.6856, "step": 2092 }, { "epoch": 16.744, "grad_norm": 21.320100784301758, "learning_rate": 4.628444444444445e-05, "loss": 1.3376, "step": 2093 }, { "epoch": 16.752, "grad_norm": 38.99585723876953, "learning_rate": 4.6280000000000004e-05, "loss": 1.2685, "step": 2094 }, { "epoch": 16.76, "grad_norm": 47.296871185302734, "learning_rate": 4.627555555555555e-05, "loss": 1.2902, "step": 2095 }, { "epoch": 16.768, "grad_norm": 26.81635093688965, "learning_rate": 4.6271111111111114e-05, "loss": 1.1073, "step": 2096 }, { "epoch": 16.776, "grad_norm": 38.23540115356445, "learning_rate": 4.626666666666667e-05, "loss": 1.467, "step": 2097 }, { "epoch": 16.784, "grad_norm": 27.61884117126465, "learning_rate": 4.6262222222222224e-05, "loss": 1.5284, "step": 2098 }, { "epoch": 16.792, "grad_norm": 26.26021957397461, "learning_rate": 4.625777777777778e-05, "loss": 1.6323, "step": 2099 }, { "epoch": 16.8, "grad_norm": 25.788862228393555, "learning_rate": 4.625333333333334e-05, "loss": 1.4241, "step": 2100 }, { "epoch": 16.808, "grad_norm": 47.53826141357422, "learning_rate": 4.6248888888888895e-05, "loss": 1.1485, "step": 2101 }, { "epoch": 16.816, "grad_norm": 23.21491050720215, "learning_rate": 4.624444444444444e-05, "loss": 1.6581, "step": 2102 }, { "epoch": 16.824, "grad_norm": 38.13594055175781, "learning_rate": 4.624e-05, "loss": 1.219, "step": 2103 }, { "epoch": 16.832, "grad_norm": 39.19220733642578, "learning_rate": 4.623555555555556e-05, "loss": 1.4578, "step": 2104 }, { "epoch": 16.84, "grad_norm": 26.373220443725586, "learning_rate": 4.6231111111111114e-05, "loss": 2.0829, "step": 2105 }, { "epoch": 16.848, "grad_norm": 38.13606262207031, "learning_rate": 4.622666666666667e-05, "loss": 1.6871, "step": 2106 }, { "epoch": 16.856, "grad_norm": 83.7221450805664, "learning_rate": 4.6222222222222224e-05, "loss": 1.1434, "step": 2107 }, { "epoch": 16.864, "grad_norm": 28.08944320678711, "learning_rate": 4.621777777777778e-05, "loss": 1.3207, "step": 2108 }, { "epoch": 16.872, "grad_norm": 26.58762550354004, "learning_rate": 4.6213333333333334e-05, "loss": 1.1871, "step": 2109 }, { "epoch": 16.88, "grad_norm": 30.00811195373535, "learning_rate": 4.620888888888889e-05, "loss": 1.3542, "step": 2110 }, { "epoch": 16.888, "grad_norm": 25.278568267822266, "learning_rate": 4.620444444444444e-05, "loss": 1.3529, "step": 2111 }, { "epoch": 16.896, "grad_norm": 25.02608299255371, "learning_rate": 4.6200000000000005e-05, "loss": 1.6362, "step": 2112 }, { "epoch": 16.904, "grad_norm": 62.53496170043945, "learning_rate": 4.619555555555556e-05, "loss": 1.4954, "step": 2113 }, { "epoch": 16.912, "grad_norm": 31.70366859436035, "learning_rate": 4.6191111111111115e-05, "loss": 1.1618, "step": 2114 }, { "epoch": 16.92, "grad_norm": 55.94925308227539, "learning_rate": 4.618666666666667e-05, "loss": 1.498, "step": 2115 }, { "epoch": 16.928, "grad_norm": 44.6969108581543, "learning_rate": 4.6182222222222224e-05, "loss": 1.6572, "step": 2116 }, { "epoch": 16.936, "grad_norm": 34.93545913696289, "learning_rate": 4.617777777777778e-05, "loss": 1.4518, "step": 2117 }, { "epoch": 16.944, "grad_norm": 125.09526062011719, "learning_rate": 4.6173333333333334e-05, "loss": 1.3714, "step": 2118 }, { "epoch": 16.951999999999998, "grad_norm": 63.755245208740234, "learning_rate": 4.616888888888889e-05, "loss": 1.5243, "step": 2119 }, { "epoch": 16.96, "grad_norm": 54.677371978759766, "learning_rate": 4.616444444444445e-05, "loss": 1.4005, "step": 2120 }, { "epoch": 16.968, "grad_norm": 24.21851348876953, "learning_rate": 4.6160000000000005e-05, "loss": 1.1572, "step": 2121 }, { "epoch": 16.976, "grad_norm": 81.26229095458984, "learning_rate": 4.615555555555556e-05, "loss": 2.5286, "step": 2122 }, { "epoch": 16.984, "grad_norm": 36.24240493774414, "learning_rate": 4.6151111111111115e-05, "loss": 1.2251, "step": 2123 }, { "epoch": 16.992, "grad_norm": 26.2452392578125, "learning_rate": 4.614666666666667e-05, "loss": 1.5227, "step": 2124 }, { "epoch": 17.0, "grad_norm": 30.102182388305664, "learning_rate": 4.6142222222222225e-05, "loss": 1.4405, "step": 2125 }, { "epoch": 17.0, "eval_loss": 1.5059325695037842, "eval_map": 0.2297, "eval_map_50": 0.4693, "eval_map_75": 0.1925, "eval_map_Coverall": 0.5279, "eval_map_Face_Shield": 0.1642, "eval_map_Gloves": 0.159, "eval_map_Goggles": 0.0452, "eval_map_Mask": 0.252, "eval_map_large": 0.3082, "eval_map_medium": 0.1534, "eval_map_small": 0.1195, "eval_mar_1": 0.2368, "eval_mar_10": 0.4375, "eval_mar_100": 0.4701, "eval_mar_100_Coverall": 0.7244, "eval_mar_100_Face_Shield": 0.5824, "eval_mar_100_Gloves": 0.323, "eval_mar_100_Goggles": 0.3594, "eval_mar_100_Mask": 0.3615, "eval_mar_large": 0.6181, "eval_mar_medium": 0.33, "eval_mar_small": 0.2123, "eval_runtime": 2.5438, "eval_samples_per_second": 11.4, "eval_steps_per_second": 0.786, "step": 2125 }, { "epoch": 17.008, "grad_norm": 41.30819320678711, "learning_rate": 4.613777777777778e-05, "loss": 2.0437, "step": 2126 }, { "epoch": 17.016, "grad_norm": 33.77207565307617, "learning_rate": 4.6133333333333334e-05, "loss": 1.9841, "step": 2127 }, { "epoch": 17.024, "grad_norm": 22.634946823120117, "learning_rate": 4.6128888888888896e-05, "loss": 1.3548, "step": 2128 }, { "epoch": 17.032, "grad_norm": 29.05863380432129, "learning_rate": 4.612444444444445e-05, "loss": 1.6459, "step": 2129 }, { "epoch": 17.04, "grad_norm": 30.907917022705078, "learning_rate": 4.612e-05, "loss": 2.5639, "step": 2130 }, { "epoch": 17.048, "grad_norm": 23.342695236206055, "learning_rate": 4.6115555555555554e-05, "loss": 0.94, "step": 2131 }, { "epoch": 17.056, "grad_norm": 68.65483856201172, "learning_rate": 4.6111111111111115e-05, "loss": 1.0692, "step": 2132 }, { "epoch": 17.064, "grad_norm": 57.36931228637695, "learning_rate": 4.610666666666667e-05, "loss": 1.7045, "step": 2133 }, { "epoch": 17.072, "grad_norm": 47.60623550415039, "learning_rate": 4.6102222222222225e-05, "loss": 1.6789, "step": 2134 }, { "epoch": 17.08, "grad_norm": 47.0242919921875, "learning_rate": 4.609777777777778e-05, "loss": 1.5424, "step": 2135 }, { "epoch": 17.088, "grad_norm": 34.1700325012207, "learning_rate": 4.6093333333333335e-05, "loss": 1.1392, "step": 2136 }, { "epoch": 17.096, "grad_norm": 26.748544692993164, "learning_rate": 4.608888888888889e-05, "loss": 1.2626, "step": 2137 }, { "epoch": 17.104, "grad_norm": 74.82675170898438, "learning_rate": 4.6084444444444444e-05, "loss": 1.3999, "step": 2138 }, { "epoch": 17.112, "grad_norm": 37.72703552246094, "learning_rate": 4.608e-05, "loss": 1.1771, "step": 2139 }, { "epoch": 17.12, "grad_norm": 31.57484245300293, "learning_rate": 4.6075555555555554e-05, "loss": 1.9215, "step": 2140 }, { "epoch": 17.128, "grad_norm": 33.983497619628906, "learning_rate": 4.6071111111111116e-05, "loss": 1.3622, "step": 2141 }, { "epoch": 17.136, "grad_norm": 49.675132751464844, "learning_rate": 4.606666666666667e-05, "loss": 1.1339, "step": 2142 }, { "epoch": 17.144, "grad_norm": 46.66606903076172, "learning_rate": 4.6062222222222225e-05, "loss": 1.6309, "step": 2143 }, { "epoch": 17.152, "grad_norm": 111.82109832763672, "learning_rate": 4.605777777777778e-05, "loss": 1.3518, "step": 2144 }, { "epoch": 17.16, "grad_norm": 34.72600555419922, "learning_rate": 4.6053333333333335e-05, "loss": 1.1613, "step": 2145 }, { "epoch": 17.168, "grad_norm": 38.50379180908203, "learning_rate": 4.604888888888889e-05, "loss": 1.8252, "step": 2146 }, { "epoch": 17.176, "grad_norm": 71.12712097167969, "learning_rate": 4.6044444444444445e-05, "loss": 1.5197, "step": 2147 }, { "epoch": 17.184, "grad_norm": 26.374040603637695, "learning_rate": 4.604e-05, "loss": 1.3871, "step": 2148 }, { "epoch": 17.192, "grad_norm": 63.79467010498047, "learning_rate": 4.603555555555556e-05, "loss": 1.467, "step": 2149 }, { "epoch": 17.2, "grad_norm": 34.22210693359375, "learning_rate": 4.6031111111111116e-05, "loss": 1.0653, "step": 2150 }, { "epoch": 17.208, "grad_norm": 19.78816032409668, "learning_rate": 4.602666666666667e-05, "loss": 1.5253, "step": 2151 }, { "epoch": 17.216, "grad_norm": 126.16915893554688, "learning_rate": 4.602222222222222e-05, "loss": 1.2441, "step": 2152 }, { "epoch": 17.224, "grad_norm": 98.54940795898438, "learning_rate": 4.601777777777778e-05, "loss": 1.0148, "step": 2153 }, { "epoch": 17.232, "grad_norm": 27.139463424682617, "learning_rate": 4.6013333333333336e-05, "loss": 1.2253, "step": 2154 }, { "epoch": 17.24, "grad_norm": 33.03756332397461, "learning_rate": 4.600888888888889e-05, "loss": 1.2828, "step": 2155 }, { "epoch": 17.248, "grad_norm": 49.78603744506836, "learning_rate": 4.6004444444444445e-05, "loss": 1.6814, "step": 2156 }, { "epoch": 17.256, "grad_norm": 27.243270874023438, "learning_rate": 4.600000000000001e-05, "loss": 1.7432, "step": 2157 }, { "epoch": 17.264, "grad_norm": 29.533178329467773, "learning_rate": 4.599555555555556e-05, "loss": 1.4782, "step": 2158 }, { "epoch": 17.272, "grad_norm": 27.87911033630371, "learning_rate": 4.599111111111111e-05, "loss": 0.9601, "step": 2159 }, { "epoch": 17.28, "grad_norm": 15.716251373291016, "learning_rate": 4.5986666666666665e-05, "loss": 1.148, "step": 2160 }, { "epoch": 17.288, "grad_norm": 28.16516876220703, "learning_rate": 4.5982222222222226e-05, "loss": 1.3752, "step": 2161 }, { "epoch": 17.296, "grad_norm": 1172.62548828125, "learning_rate": 4.597777777777778e-05, "loss": 1.1269, "step": 2162 }, { "epoch": 17.304, "grad_norm": 30.50324821472168, "learning_rate": 4.5973333333333336e-05, "loss": 1.0005, "step": 2163 }, { "epoch": 17.312, "grad_norm": 30.296903610229492, "learning_rate": 4.596888888888889e-05, "loss": 1.3898, "step": 2164 }, { "epoch": 17.32, "grad_norm": 68.56800842285156, "learning_rate": 4.5964444444444446e-05, "loss": 1.3019, "step": 2165 }, { "epoch": 17.328, "grad_norm": 30.571773529052734, "learning_rate": 4.596e-05, "loss": 1.436, "step": 2166 }, { "epoch": 17.336, "grad_norm": 142.37832641601562, "learning_rate": 4.5955555555555555e-05, "loss": 3.5725, "step": 2167 }, { "epoch": 17.344, "grad_norm": 32.133331298828125, "learning_rate": 4.595111111111111e-05, "loss": 1.6049, "step": 2168 }, { "epoch": 17.352, "grad_norm": 56.264888763427734, "learning_rate": 4.594666666666667e-05, "loss": 1.9892, "step": 2169 }, { "epoch": 17.36, "grad_norm": 43.42042922973633, "learning_rate": 4.5942222222222227e-05, "loss": 1.4396, "step": 2170 }, { "epoch": 17.368, "grad_norm": 110.6551742553711, "learning_rate": 4.593777777777778e-05, "loss": 1.5312, "step": 2171 }, { "epoch": 17.376, "grad_norm": 72.33045959472656, "learning_rate": 4.5933333333333336e-05, "loss": 1.4409, "step": 2172 }, { "epoch": 17.384, "grad_norm": 59.424686431884766, "learning_rate": 4.592888888888889e-05, "loss": 1.1431, "step": 2173 }, { "epoch": 17.392, "grad_norm": 90.6549072265625, "learning_rate": 4.5924444444444446e-05, "loss": 1.4107, "step": 2174 }, { "epoch": 17.4, "grad_norm": 17.34090805053711, "learning_rate": 4.592e-05, "loss": 1.3205, "step": 2175 }, { "epoch": 17.408, "grad_norm": 31.4638671875, "learning_rate": 4.5915555555555556e-05, "loss": 1.5027, "step": 2176 }, { "epoch": 17.416, "grad_norm": 30.601686477661133, "learning_rate": 4.591111111111112e-05, "loss": 2.3043, "step": 2177 }, { "epoch": 17.424, "grad_norm": 40.44391632080078, "learning_rate": 4.590666666666667e-05, "loss": 1.1338, "step": 2178 }, { "epoch": 17.432, "grad_norm": 29.111608505249023, "learning_rate": 4.590222222222223e-05, "loss": 1.4213, "step": 2179 }, { "epoch": 17.44, "grad_norm": 60.6359748840332, "learning_rate": 4.589777777777778e-05, "loss": 2.1253, "step": 2180 }, { "epoch": 17.448, "grad_norm": 24.06447410583496, "learning_rate": 4.589333333333334e-05, "loss": 1.1603, "step": 2181 }, { "epoch": 17.456, "grad_norm": 64.45423126220703, "learning_rate": 4.588888888888889e-05, "loss": 1.1719, "step": 2182 }, { "epoch": 17.464, "grad_norm": 69.94112396240234, "learning_rate": 4.5884444444444446e-05, "loss": 1.036, "step": 2183 }, { "epoch": 17.472, "grad_norm": 40.83097457885742, "learning_rate": 4.588e-05, "loss": 1.416, "step": 2184 }, { "epoch": 17.48, "grad_norm": 56.03423309326172, "learning_rate": 4.587555555555556e-05, "loss": 1.6282, "step": 2185 }, { "epoch": 17.488, "grad_norm": 38.46604919433594, "learning_rate": 4.587111111111112e-05, "loss": 1.331, "step": 2186 }, { "epoch": 17.496, "grad_norm": 43.102848052978516, "learning_rate": 4.5866666666666666e-05, "loss": 1.2353, "step": 2187 }, { "epoch": 17.504, "grad_norm": 26.274770736694336, "learning_rate": 4.586222222222222e-05, "loss": 1.2597, "step": 2188 }, { "epoch": 17.512, "grad_norm": 31.779298782348633, "learning_rate": 4.5857777777777775e-05, "loss": 1.1933, "step": 2189 }, { "epoch": 17.52, "grad_norm": 40.22209548950195, "learning_rate": 4.585333333333334e-05, "loss": 1.4497, "step": 2190 }, { "epoch": 17.528, "grad_norm": 29.060863494873047, "learning_rate": 4.584888888888889e-05, "loss": 1.3002, "step": 2191 }, { "epoch": 17.536, "grad_norm": 22.225221633911133, "learning_rate": 4.584444444444445e-05, "loss": 1.4907, "step": 2192 }, { "epoch": 17.544, "grad_norm": 28.829099655151367, "learning_rate": 4.584e-05, "loss": 1.4648, "step": 2193 }, { "epoch": 17.552, "grad_norm": 44.67953109741211, "learning_rate": 4.5835555555555556e-05, "loss": 1.3643, "step": 2194 }, { "epoch": 17.56, "grad_norm": 73.46977996826172, "learning_rate": 4.583111111111111e-05, "loss": 1.3495, "step": 2195 }, { "epoch": 17.568, "grad_norm": 16.847450256347656, "learning_rate": 4.5826666666666666e-05, "loss": 1.7253, "step": 2196 }, { "epoch": 17.576, "grad_norm": 22.874914169311523, "learning_rate": 4.582222222222222e-05, "loss": 1.3725, "step": 2197 }, { "epoch": 17.584, "grad_norm": 160.62716674804688, "learning_rate": 4.581777777777778e-05, "loss": 1.2935, "step": 2198 }, { "epoch": 17.592, "grad_norm": 25.337541580200195, "learning_rate": 4.581333333333334e-05, "loss": 1.3485, "step": 2199 }, { "epoch": 17.6, "grad_norm": 25.62245750427246, "learning_rate": 4.580888888888889e-05, "loss": 0.9573, "step": 2200 }, { "epoch": 17.608, "grad_norm": 72.19242858886719, "learning_rate": 4.580444444444445e-05, "loss": 1.8192, "step": 2201 }, { "epoch": 17.616, "grad_norm": 41.73384094238281, "learning_rate": 4.58e-05, "loss": 1.3965, "step": 2202 }, { "epoch": 17.624, "grad_norm": 39.535194396972656, "learning_rate": 4.579555555555556e-05, "loss": 2.5562, "step": 2203 }, { "epoch": 17.632, "grad_norm": 21.1689510345459, "learning_rate": 4.579111111111111e-05, "loss": 1.3442, "step": 2204 }, { "epoch": 17.64, "grad_norm": 28.323028564453125, "learning_rate": 4.5786666666666666e-05, "loss": 1.9, "step": 2205 }, { "epoch": 17.648, "grad_norm": 26.993816375732422, "learning_rate": 4.578222222222223e-05, "loss": 1.7241, "step": 2206 }, { "epoch": 17.656, "grad_norm": 49.110435485839844, "learning_rate": 4.577777777777778e-05, "loss": 1.6561, "step": 2207 }, { "epoch": 17.664, "grad_norm": 45.18815994262695, "learning_rate": 4.577333333333334e-05, "loss": 1.2149, "step": 2208 }, { "epoch": 17.672, "grad_norm": 26.73065948486328, "learning_rate": 4.5768888888888886e-05, "loss": 1.2497, "step": 2209 }, { "epoch": 17.68, "grad_norm": 30.029598236083984, "learning_rate": 4.576444444444445e-05, "loss": 1.3196, "step": 2210 }, { "epoch": 17.688, "grad_norm": 36.940494537353516, "learning_rate": 4.576e-05, "loss": 0.9026, "step": 2211 }, { "epoch": 17.696, "grad_norm": 33.850669860839844, "learning_rate": 4.575555555555556e-05, "loss": 1.3723, "step": 2212 }, { "epoch": 17.704, "grad_norm": 30.723133087158203, "learning_rate": 4.575111111111111e-05, "loss": 1.5535, "step": 2213 }, { "epoch": 17.712, "grad_norm": 24.282804489135742, "learning_rate": 4.5746666666666674e-05, "loss": 0.9638, "step": 2214 }, { "epoch": 17.72, "grad_norm": 26.474824905395508, "learning_rate": 4.574222222222223e-05, "loss": 1.4806, "step": 2215 }, { "epoch": 17.728, "grad_norm": 16.56348419189453, "learning_rate": 4.5737777777777777e-05, "loss": 1.3537, "step": 2216 }, { "epoch": 17.736, "grad_norm": 27.11639976501465, "learning_rate": 4.573333333333333e-05, "loss": 0.9726, "step": 2217 }, { "epoch": 17.744, "grad_norm": 21.69862174987793, "learning_rate": 4.572888888888889e-05, "loss": 1.3056, "step": 2218 }, { "epoch": 17.752, "grad_norm": 39.788841247558594, "learning_rate": 4.572444444444445e-05, "loss": 1.333, "step": 2219 }, { "epoch": 17.76, "grad_norm": 33.35932922363281, "learning_rate": 4.572e-05, "loss": 1.4782, "step": 2220 }, { "epoch": 17.768, "grad_norm": 70.55355834960938, "learning_rate": 4.571555555555556e-05, "loss": 1.3131, "step": 2221 }, { "epoch": 17.776, "grad_norm": 56.72207260131836, "learning_rate": 4.571111111111111e-05, "loss": 2.2204, "step": 2222 }, { "epoch": 17.784, "grad_norm": 47.53450012207031, "learning_rate": 4.570666666666667e-05, "loss": 1.5067, "step": 2223 }, { "epoch": 17.792, "grad_norm": 28.51338005065918, "learning_rate": 4.570222222222222e-05, "loss": 1.0724, "step": 2224 }, { "epoch": 17.8, "grad_norm": 16.675880432128906, "learning_rate": 4.569777777777778e-05, "loss": 1.5623, "step": 2225 }, { "epoch": 17.808, "grad_norm": 18.448007583618164, "learning_rate": 4.569333333333334e-05, "loss": 1.4119, "step": 2226 }, { "epoch": 17.816, "grad_norm": 17.744089126586914, "learning_rate": 4.5688888888888893e-05, "loss": 1.4275, "step": 2227 }, { "epoch": 17.824, "grad_norm": 30.66411590576172, "learning_rate": 4.568444444444445e-05, "loss": 1.4744, "step": 2228 }, { "epoch": 17.832, "grad_norm": 29.493993759155273, "learning_rate": 4.568e-05, "loss": 1.1625, "step": 2229 }, { "epoch": 17.84, "grad_norm": 22.90383529663086, "learning_rate": 4.567555555555556e-05, "loss": 0.8861, "step": 2230 }, { "epoch": 17.848, "grad_norm": 23.699230194091797, "learning_rate": 4.567111111111111e-05, "loss": 1.4034, "step": 2231 }, { "epoch": 17.856, "grad_norm": 43.75531005859375, "learning_rate": 4.566666666666667e-05, "loss": 0.9835, "step": 2232 }, { "epoch": 17.864, "grad_norm": 37.669795989990234, "learning_rate": 4.566222222222222e-05, "loss": 0.983, "step": 2233 }, { "epoch": 17.872, "grad_norm": 78.19281005859375, "learning_rate": 4.5657777777777784e-05, "loss": 1.2822, "step": 2234 }, { "epoch": 17.88, "grad_norm": 26.16254234313965, "learning_rate": 4.565333333333334e-05, "loss": 1.4971, "step": 2235 }, { "epoch": 17.888, "grad_norm": 39.748836517333984, "learning_rate": 4.5648888888888894e-05, "loss": 1.1289, "step": 2236 }, { "epoch": 17.896, "grad_norm": 17.43779182434082, "learning_rate": 4.564444444444444e-05, "loss": 1.449, "step": 2237 }, { "epoch": 17.904, "grad_norm": 25.286888122558594, "learning_rate": 4.564e-05, "loss": 1.0713, "step": 2238 }, { "epoch": 17.912, "grad_norm": 33.55952835083008, "learning_rate": 4.563555555555556e-05, "loss": 1.1935, "step": 2239 }, { "epoch": 17.92, "grad_norm": 30.33073616027832, "learning_rate": 4.563111111111111e-05, "loss": 1.5355, "step": 2240 }, { "epoch": 17.928, "grad_norm": 31.02189826965332, "learning_rate": 4.562666666666667e-05, "loss": 1.3382, "step": 2241 }, { "epoch": 17.936, "grad_norm": 42.87187957763672, "learning_rate": 4.562222222222222e-05, "loss": 1.2588, "step": 2242 }, { "epoch": 17.944, "grad_norm": 104.99735260009766, "learning_rate": 4.5617777777777784e-05, "loss": 1.448, "step": 2243 }, { "epoch": 17.951999999999998, "grad_norm": 60.023162841796875, "learning_rate": 4.561333333333333e-05, "loss": 1.5453, "step": 2244 }, { "epoch": 17.96, "grad_norm": 27.73769187927246, "learning_rate": 4.560888888888889e-05, "loss": 1.8037, "step": 2245 }, { "epoch": 17.968, "grad_norm": 33.8994140625, "learning_rate": 4.560444444444444e-05, "loss": 1.9859, "step": 2246 }, { "epoch": 17.976, "grad_norm": 25.26261329650879, "learning_rate": 4.5600000000000004e-05, "loss": 1.3363, "step": 2247 }, { "epoch": 17.984, "grad_norm": 45.046573638916016, "learning_rate": 4.559555555555556e-05, "loss": 1.9841, "step": 2248 }, { "epoch": 17.992, "grad_norm": 18.77027130126953, "learning_rate": 4.5591111111111114e-05, "loss": 1.4946, "step": 2249 }, { "epoch": 18.0, "grad_norm": 39.10255813598633, "learning_rate": 4.558666666666667e-05, "loss": 1.3413, "step": 2250 }, { "epoch": 18.0, "eval_loss": 1.34979248046875, "eval_map": 0.2554, "eval_map_50": 0.5306, "eval_map_75": 0.1917, "eval_map_Coverall": 0.4597, "eval_map_Face_Shield": 0.326, "eval_map_Gloves": 0.1152, "eval_map_Goggles": 0.1012, "eval_map_Mask": 0.2747, "eval_map_large": 0.3471, "eval_map_medium": 0.1664, "eval_map_small": 0.1146, "eval_mar_1": 0.2427, "eval_mar_10": 0.4457, "eval_mar_100": 0.4593, "eval_mar_100_Coverall": 0.6511, "eval_mar_100_Face_Shield": 0.5824, "eval_mar_100_Gloves": 0.3262, "eval_mar_100_Goggles": 0.3656, "eval_mar_100_Mask": 0.3712, "eval_mar_large": 0.5629, "eval_mar_medium": 0.3398, "eval_mar_small": 0.1828, "eval_runtime": 2.4965, "eval_samples_per_second": 11.616, "eval_steps_per_second": 0.801, "step": 2250 }, { "epoch": 18.008, "grad_norm": 38.007694244384766, "learning_rate": 4.558222222222222e-05, "loss": 1.2421, "step": 2251 }, { "epoch": 18.016, "grad_norm": 16.50503921508789, "learning_rate": 4.557777777777778e-05, "loss": 2.1113, "step": 2252 }, { "epoch": 18.024, "grad_norm": 90.63685607910156, "learning_rate": 4.557333333333333e-05, "loss": 1.2457, "step": 2253 }, { "epoch": 18.032, "grad_norm": 15.049619674682617, "learning_rate": 4.556888888888889e-05, "loss": 1.4465, "step": 2254 }, { "epoch": 18.04, "grad_norm": 15.936798095703125, "learning_rate": 4.556444444444445e-05, "loss": 1.4831, "step": 2255 }, { "epoch": 18.048, "grad_norm": 43.86414337158203, "learning_rate": 4.5560000000000004e-05, "loss": 1.3913, "step": 2256 }, { "epoch": 18.056, "grad_norm": 20.50970458984375, "learning_rate": 4.555555555555556e-05, "loss": 1.6975, "step": 2257 }, { "epoch": 18.064, "grad_norm": 29.88434410095215, "learning_rate": 4.5551111111111114e-05, "loss": 1.8053, "step": 2258 }, { "epoch": 18.072, "grad_norm": 32.616973876953125, "learning_rate": 4.554666666666667e-05, "loss": 1.2744, "step": 2259 }, { "epoch": 18.08, "grad_norm": 77.9848403930664, "learning_rate": 4.5542222222222224e-05, "loss": 1.6149, "step": 2260 }, { "epoch": 18.088, "grad_norm": 24.391416549682617, "learning_rate": 4.553777777777778e-05, "loss": 1.0354, "step": 2261 }, { "epoch": 18.096, "grad_norm": 26.476884841918945, "learning_rate": 4.553333333333333e-05, "loss": 1.3391, "step": 2262 }, { "epoch": 18.104, "grad_norm": 33.938446044921875, "learning_rate": 4.5528888888888895e-05, "loss": 1.2897, "step": 2263 }, { "epoch": 18.112, "grad_norm": 28.070981979370117, "learning_rate": 4.552444444444445e-05, "loss": 1.3353, "step": 2264 }, { "epoch": 18.12, "grad_norm": 132.2140655517578, "learning_rate": 4.5520000000000005e-05, "loss": 1.9223, "step": 2265 }, { "epoch": 18.128, "grad_norm": 27.646024703979492, "learning_rate": 4.551555555555555e-05, "loss": 1.6636, "step": 2266 }, { "epoch": 18.136, "grad_norm": 42.69108581542969, "learning_rate": 4.5511111111111114e-05, "loss": 1.2419, "step": 2267 }, { "epoch": 18.144, "grad_norm": 24.806854248046875, "learning_rate": 4.550666666666667e-05, "loss": 1.2752, "step": 2268 }, { "epoch": 18.152, "grad_norm": 30.680253982543945, "learning_rate": 4.5502222222222224e-05, "loss": 1.079, "step": 2269 }, { "epoch": 18.16, "grad_norm": 20.907373428344727, "learning_rate": 4.549777777777778e-05, "loss": 1.3986, "step": 2270 }, { "epoch": 18.168, "grad_norm": 32.07072067260742, "learning_rate": 4.549333333333334e-05, "loss": 1.5146, "step": 2271 }, { "epoch": 18.176, "grad_norm": 37.284080505371094, "learning_rate": 4.5488888888888895e-05, "loss": 1.4296, "step": 2272 }, { "epoch": 18.184, "grad_norm": 22.212223052978516, "learning_rate": 4.5484444444444443e-05, "loss": 1.4727, "step": 2273 }, { "epoch": 18.192, "grad_norm": 44.84404373168945, "learning_rate": 4.548e-05, "loss": 1.2425, "step": 2274 }, { "epoch": 18.2, "grad_norm": 35.75189971923828, "learning_rate": 4.547555555555556e-05, "loss": 1.768, "step": 2275 }, { "epoch": 18.208, "grad_norm": 51.0245361328125, "learning_rate": 4.5471111111111115e-05, "loss": 1.2913, "step": 2276 }, { "epoch": 18.216, "grad_norm": 38.37704849243164, "learning_rate": 4.546666666666667e-05, "loss": 2.4456, "step": 2277 }, { "epoch": 18.224, "grad_norm": 38.792564392089844, "learning_rate": 4.5462222222222224e-05, "loss": 1.2817, "step": 2278 }, { "epoch": 18.232, "grad_norm": 22.834430694580078, "learning_rate": 4.545777777777778e-05, "loss": 2.0307, "step": 2279 }, { "epoch": 18.24, "grad_norm": 27.244016647338867, "learning_rate": 4.5453333333333334e-05, "loss": 1.5206, "step": 2280 }, { "epoch": 18.248, "grad_norm": 37.738243103027344, "learning_rate": 4.544888888888889e-05, "loss": 1.5297, "step": 2281 }, { "epoch": 18.256, "grad_norm": 28.135202407836914, "learning_rate": 4.5444444444444444e-05, "loss": 1.1729, "step": 2282 }, { "epoch": 18.264, "grad_norm": 21.458942413330078, "learning_rate": 4.5440000000000005e-05, "loss": 1.2901, "step": 2283 }, { "epoch": 18.272, "grad_norm": 26.192604064941406, "learning_rate": 4.543555555555556e-05, "loss": 1.4614, "step": 2284 }, { "epoch": 18.28, "grad_norm": 19.814979553222656, "learning_rate": 4.5431111111111115e-05, "loss": 1.1709, "step": 2285 }, { "epoch": 18.288, "grad_norm": 27.14537811279297, "learning_rate": 4.542666666666667e-05, "loss": 1.2173, "step": 2286 }, { "epoch": 18.296, "grad_norm": 22.423734664916992, "learning_rate": 4.5422222222222225e-05, "loss": 1.5605, "step": 2287 }, { "epoch": 18.304, "grad_norm": 30.68739128112793, "learning_rate": 4.541777777777778e-05, "loss": 1.4025, "step": 2288 }, { "epoch": 18.312, "grad_norm": 32.427223205566406, "learning_rate": 4.5413333333333334e-05, "loss": 1.1823, "step": 2289 }, { "epoch": 18.32, "grad_norm": 25.716693878173828, "learning_rate": 4.540888888888889e-05, "loss": 1.651, "step": 2290 }, { "epoch": 18.328, "grad_norm": 26.453022003173828, "learning_rate": 4.5404444444444444e-05, "loss": 1.4563, "step": 2291 }, { "epoch": 18.336, "grad_norm": 32.95061492919922, "learning_rate": 4.5400000000000006e-05, "loss": 1.4769, "step": 2292 }, { "epoch": 18.344, "grad_norm": 25.219247817993164, "learning_rate": 4.539555555555556e-05, "loss": 1.8922, "step": 2293 }, { "epoch": 18.352, "grad_norm": 22.636472702026367, "learning_rate": 4.539111111111111e-05, "loss": 1.6867, "step": 2294 }, { "epoch": 18.36, "grad_norm": 24.288423538208008, "learning_rate": 4.5386666666666664e-05, "loss": 1.7126, "step": 2295 }, { "epoch": 18.368, "grad_norm": 16.694564819335938, "learning_rate": 4.5382222222222225e-05, "loss": 1.2762, "step": 2296 }, { "epoch": 18.376, "grad_norm": 64.49100494384766, "learning_rate": 4.537777777777778e-05, "loss": 1.3424, "step": 2297 }, { "epoch": 18.384, "grad_norm": 105.02654266357422, "learning_rate": 4.5373333333333335e-05, "loss": 1.1987, "step": 2298 }, { "epoch": 18.392, "grad_norm": 30.160236358642578, "learning_rate": 4.536888888888889e-05, "loss": 1.1919, "step": 2299 }, { "epoch": 18.4, "grad_norm": 139.14418029785156, "learning_rate": 4.536444444444445e-05, "loss": 1.5852, "step": 2300 }, { "epoch": 18.408, "grad_norm": 37.057559967041016, "learning_rate": 4.536e-05, "loss": 1.8274, "step": 2301 }, { "epoch": 18.416, "grad_norm": 115.97101593017578, "learning_rate": 4.5355555555555554e-05, "loss": 1.3834, "step": 2302 }, { "epoch": 18.424, "grad_norm": 21.266265869140625, "learning_rate": 4.535111111111111e-05, "loss": 1.9055, "step": 2303 }, { "epoch": 18.432, "grad_norm": 20.116649627685547, "learning_rate": 4.534666666666667e-05, "loss": 1.1812, "step": 2304 }, { "epoch": 18.44, "grad_norm": 43.26536178588867, "learning_rate": 4.5342222222222226e-05, "loss": 1.2313, "step": 2305 }, { "epoch": 18.448, "grad_norm": 42.013206481933594, "learning_rate": 4.533777777777778e-05, "loss": 1.3455, "step": 2306 }, { "epoch": 18.456, "grad_norm": 28.41094207763672, "learning_rate": 4.5333333333333335e-05, "loss": 1.3261, "step": 2307 }, { "epoch": 18.464, "grad_norm": 36.20697021484375, "learning_rate": 4.532888888888889e-05, "loss": 1.6624, "step": 2308 }, { "epoch": 18.472, "grad_norm": 36.869388580322266, "learning_rate": 4.5324444444444445e-05, "loss": 1.3946, "step": 2309 }, { "epoch": 18.48, "grad_norm": 78.20841979980469, "learning_rate": 4.532e-05, "loss": 1.1006, "step": 2310 }, { "epoch": 18.488, "grad_norm": 19.810930252075195, "learning_rate": 4.5315555555555555e-05, "loss": 1.1796, "step": 2311 }, { "epoch": 18.496, "grad_norm": 18.608755111694336, "learning_rate": 4.5311111111111116e-05, "loss": 1.2535, "step": 2312 }, { "epoch": 18.504, "grad_norm": 24.930988311767578, "learning_rate": 4.530666666666667e-05, "loss": 1.7286, "step": 2313 }, { "epoch": 18.512, "grad_norm": 36.06727981567383, "learning_rate": 4.5302222222222226e-05, "loss": 0.9869, "step": 2314 }, { "epoch": 18.52, "grad_norm": 55.007225036621094, "learning_rate": 4.529777777777778e-05, "loss": 1.6552, "step": 2315 }, { "epoch": 18.528, "grad_norm": 47.327674865722656, "learning_rate": 4.5293333333333336e-05, "loss": 1.2126, "step": 2316 }, { "epoch": 18.536, "grad_norm": 19.62275505065918, "learning_rate": 4.528888888888889e-05, "loss": 1.1914, "step": 2317 }, { "epoch": 18.544, "grad_norm": 27.912790298461914, "learning_rate": 4.5284444444444445e-05, "loss": 1.3342, "step": 2318 }, { "epoch": 18.552, "grad_norm": 46.302642822265625, "learning_rate": 4.528e-05, "loss": 1.1168, "step": 2319 }, { "epoch": 18.56, "grad_norm": 29.027587890625, "learning_rate": 4.527555555555556e-05, "loss": 1.7002, "step": 2320 }, { "epoch": 18.568, "grad_norm": 58.077415466308594, "learning_rate": 4.527111111111112e-05, "loss": 1.3825, "step": 2321 }, { "epoch": 18.576, "grad_norm": 39.1215934753418, "learning_rate": 4.526666666666667e-05, "loss": 1.2858, "step": 2322 }, { "epoch": 18.584, "grad_norm": 47.136322021484375, "learning_rate": 4.526222222222222e-05, "loss": 2.6354, "step": 2323 }, { "epoch": 18.592, "grad_norm": 18.0994873046875, "learning_rate": 4.525777777777778e-05, "loss": 1.5253, "step": 2324 }, { "epoch": 18.6, "grad_norm": 23.311641693115234, "learning_rate": 4.5253333333333336e-05, "loss": 1.1479, "step": 2325 }, { "epoch": 18.608, "grad_norm": 39.10108184814453, "learning_rate": 4.524888888888889e-05, "loss": 1.2407, "step": 2326 }, { "epoch": 18.616, "grad_norm": 45.82484436035156, "learning_rate": 4.5244444444444446e-05, "loss": 1.3539, "step": 2327 }, { "epoch": 18.624, "grad_norm": 48.76310348510742, "learning_rate": 4.524000000000001e-05, "loss": 2.6168, "step": 2328 }, { "epoch": 18.632, "grad_norm": 61.6785888671875, "learning_rate": 4.523555555555556e-05, "loss": 1.1676, "step": 2329 }, { "epoch": 18.64, "grad_norm": 27.221599578857422, "learning_rate": 4.523111111111111e-05, "loss": 1.7817, "step": 2330 }, { "epoch": 18.648, "grad_norm": 37.93451690673828, "learning_rate": 4.5226666666666665e-05, "loss": 1.2685, "step": 2331 }, { "epoch": 18.656, "grad_norm": 43.165950775146484, "learning_rate": 4.522222222222223e-05, "loss": 1.1924, "step": 2332 }, { "epoch": 18.664, "grad_norm": 20.11996078491211, "learning_rate": 4.521777777777778e-05, "loss": 1.1719, "step": 2333 }, { "epoch": 18.672, "grad_norm": 52.88570022583008, "learning_rate": 4.5213333333333336e-05, "loss": 1.2458, "step": 2334 }, { "epoch": 18.68, "grad_norm": 87.15320587158203, "learning_rate": 4.520888888888889e-05, "loss": 1.6688, "step": 2335 }, { "epoch": 18.688, "grad_norm": 28.217676162719727, "learning_rate": 4.5204444444444446e-05, "loss": 1.1791, "step": 2336 }, { "epoch": 18.696, "grad_norm": 42.90725326538086, "learning_rate": 4.52e-05, "loss": 1.6504, "step": 2337 }, { "epoch": 18.704, "grad_norm": 15.644707679748535, "learning_rate": 4.5195555555555556e-05, "loss": 0.9874, "step": 2338 }, { "epoch": 18.712, "grad_norm": 29.158897399902344, "learning_rate": 4.519111111111111e-05, "loss": 1.4143, "step": 2339 }, { "epoch": 18.72, "grad_norm": 32.6201286315918, "learning_rate": 4.518666666666667e-05, "loss": 1.1419, "step": 2340 }, { "epoch": 18.728, "grad_norm": 38.1205940246582, "learning_rate": 4.518222222222223e-05, "loss": 1.035, "step": 2341 }, { "epoch": 18.736, "grad_norm": 27.5091609954834, "learning_rate": 4.517777777777778e-05, "loss": 1.269, "step": 2342 }, { "epoch": 18.744, "grad_norm": 18.77747917175293, "learning_rate": 4.517333333333334e-05, "loss": 1.4761, "step": 2343 }, { "epoch": 18.752, "grad_norm": 24.37163734436035, "learning_rate": 4.516888888888889e-05, "loss": 1.676, "step": 2344 }, { "epoch": 18.76, "grad_norm": 24.286046981811523, "learning_rate": 4.5164444444444446e-05, "loss": 1.0132, "step": 2345 }, { "epoch": 18.768, "grad_norm": 30.22079849243164, "learning_rate": 4.516e-05, "loss": 1.5128, "step": 2346 }, { "epoch": 18.776, "grad_norm": 18.772008895874023, "learning_rate": 4.5155555555555556e-05, "loss": 1.6705, "step": 2347 }, { "epoch": 18.784, "grad_norm": 29.424999237060547, "learning_rate": 4.515111111111111e-05, "loss": 1.1306, "step": 2348 }, { "epoch": 18.792, "grad_norm": 33.470664978027344, "learning_rate": 4.514666666666667e-05, "loss": 1.5156, "step": 2349 }, { "epoch": 18.8, "grad_norm": 34.51283264160156, "learning_rate": 4.514222222222223e-05, "loss": 1.2531, "step": 2350 }, { "epoch": 18.808, "grad_norm": 37.59189224243164, "learning_rate": 4.5137777777777776e-05, "loss": 2.0188, "step": 2351 }, { "epoch": 18.816, "grad_norm": 24.735214233398438, "learning_rate": 4.513333333333333e-05, "loss": 1.0897, "step": 2352 }, { "epoch": 18.824, "grad_norm": 28.754961013793945, "learning_rate": 4.512888888888889e-05, "loss": 1.1511, "step": 2353 }, { "epoch": 18.832, "grad_norm": 37.90153884887695, "learning_rate": 4.512444444444445e-05, "loss": 1.0989, "step": 2354 }, { "epoch": 18.84, "grad_norm": 25.61355209350586, "learning_rate": 4.512e-05, "loss": 1.4227, "step": 2355 }, { "epoch": 18.848, "grad_norm": 17.938758850097656, "learning_rate": 4.5115555555555557e-05, "loss": 1.4501, "step": 2356 }, { "epoch": 18.856, "grad_norm": 34.39244079589844, "learning_rate": 4.511111111111112e-05, "loss": 1.4345, "step": 2357 }, { "epoch": 18.864, "grad_norm": 33.95014190673828, "learning_rate": 4.5106666666666666e-05, "loss": 1.2868, "step": 2358 }, { "epoch": 18.872, "grad_norm": 16.842130661010742, "learning_rate": 4.510222222222222e-05, "loss": 1.2251, "step": 2359 }, { "epoch": 18.88, "grad_norm": 23.6384334564209, "learning_rate": 4.5097777777777776e-05, "loss": 1.3372, "step": 2360 }, { "epoch": 18.888, "grad_norm": 24.642139434814453, "learning_rate": 4.509333333333334e-05, "loss": 0.9906, "step": 2361 }, { "epoch": 18.896, "grad_norm": 16.97746467590332, "learning_rate": 4.508888888888889e-05, "loss": 0.9183, "step": 2362 }, { "epoch": 18.904, "grad_norm": 92.26619720458984, "learning_rate": 4.508444444444445e-05, "loss": 0.9263, "step": 2363 }, { "epoch": 18.912, "grad_norm": 35.35995101928711, "learning_rate": 4.508e-05, "loss": 1.471, "step": 2364 }, { "epoch": 18.92, "grad_norm": 40.48569107055664, "learning_rate": 4.507555555555556e-05, "loss": 1.3251, "step": 2365 }, { "epoch": 18.928, "grad_norm": 33.214603424072266, "learning_rate": 4.507111111111111e-05, "loss": 1.0508, "step": 2366 }, { "epoch": 18.936, "grad_norm": 25.693601608276367, "learning_rate": 4.5066666666666667e-05, "loss": 1.0718, "step": 2367 }, { "epoch": 18.944, "grad_norm": 31.67488670349121, "learning_rate": 4.506222222222222e-05, "loss": 1.2668, "step": 2368 }, { "epoch": 18.951999999999998, "grad_norm": 30.43320083618164, "learning_rate": 4.505777777777778e-05, "loss": 1.1194, "step": 2369 }, { "epoch": 18.96, "grad_norm": 31.5770206451416, "learning_rate": 4.505333333333334e-05, "loss": 3.4912, "step": 2370 }, { "epoch": 18.968, "grad_norm": 28.25836753845215, "learning_rate": 4.504888888888889e-05, "loss": 1.2934, "step": 2371 }, { "epoch": 18.976, "grad_norm": 19.072999954223633, "learning_rate": 4.504444444444445e-05, "loss": 0.9863, "step": 2372 }, { "epoch": 18.984, "grad_norm": 25.215688705444336, "learning_rate": 4.504e-05, "loss": 1.3682, "step": 2373 }, { "epoch": 18.992, "grad_norm": 46.582576751708984, "learning_rate": 4.503555555555556e-05, "loss": 1.4031, "step": 2374 }, { "epoch": 19.0, "grad_norm": 47.114990234375, "learning_rate": 4.503111111111111e-05, "loss": 1.8109, "step": 2375 }, { "epoch": 19.0, "eval_loss": 1.359973669052124, "eval_map": 0.2614, "eval_map_50": 0.5285, "eval_map_75": 0.2375, "eval_map_Coverall": 0.5333, "eval_map_Face_Shield": 0.2348, "eval_map_Gloves": 0.139, "eval_map_Goggles": 0.1302, "eval_map_Mask": 0.2699, "eval_map_large": 0.4058, "eval_map_medium": 0.2261, "eval_map_small": 0.1527, "eval_mar_1": 0.2657, "eval_mar_10": 0.4507, "eval_mar_100": 0.4623, "eval_mar_100_Coverall": 0.7067, "eval_mar_100_Face_Shield": 0.5353, "eval_mar_100_Gloves": 0.3344, "eval_mar_100_Goggles": 0.3719, "eval_mar_100_Mask": 0.3635, "eval_mar_large": 0.6266, "eval_mar_medium": 0.3843, "eval_mar_small": 0.1994, "eval_runtime": 2.4979, "eval_samples_per_second": 11.61, "eval_steps_per_second": 0.801, "step": 2375 }, { "epoch": 19.008, "grad_norm": 24.325912475585938, "learning_rate": 4.502666666666667e-05, "loss": 1.0379, "step": 2376 }, { "epoch": 19.016, "grad_norm": 20.828157424926758, "learning_rate": 4.502222222222223e-05, "loss": 1.1961, "step": 2377 }, { "epoch": 19.024, "grad_norm": 62.92646408081055, "learning_rate": 4.5017777777777783e-05, "loss": 1.3947, "step": 2378 }, { "epoch": 19.032, "grad_norm": 18.00332260131836, "learning_rate": 4.501333333333334e-05, "loss": 1.7741, "step": 2379 }, { "epoch": 19.04, "grad_norm": 33.60249710083008, "learning_rate": 4.5008888888888886e-05, "loss": 0.8289, "step": 2380 }, { "epoch": 19.048, "grad_norm": 25.443532943725586, "learning_rate": 4.500444444444445e-05, "loss": 1.4097, "step": 2381 }, { "epoch": 19.056, "grad_norm": 24.857139587402344, "learning_rate": 4.5e-05, "loss": 1.2013, "step": 2382 }, { "epoch": 19.064, "grad_norm": 16.89017677307129, "learning_rate": 4.499555555555556e-05, "loss": 1.6676, "step": 2383 }, { "epoch": 19.072, "grad_norm": 23.32118034362793, "learning_rate": 4.499111111111111e-05, "loss": 1.501, "step": 2384 }, { "epoch": 19.08, "grad_norm": 83.39396667480469, "learning_rate": 4.4986666666666674e-05, "loss": 1.1837, "step": 2385 }, { "epoch": 19.088, "grad_norm": 22.565059661865234, "learning_rate": 4.498222222222222e-05, "loss": 1.2049, "step": 2386 }, { "epoch": 19.096, "grad_norm": 21.663684844970703, "learning_rate": 4.497777777777778e-05, "loss": 1.5043, "step": 2387 }, { "epoch": 19.104, "grad_norm": 19.791540145874023, "learning_rate": 4.497333333333333e-05, "loss": 1.5255, "step": 2388 }, { "epoch": 19.112, "grad_norm": 23.750892639160156, "learning_rate": 4.4968888888888894e-05, "loss": 1.2851, "step": 2389 }, { "epoch": 19.12, "grad_norm": 30.358430862426758, "learning_rate": 4.496444444444445e-05, "loss": 1.2164, "step": 2390 }, { "epoch": 19.128, "grad_norm": 34.968833923339844, "learning_rate": 4.496e-05, "loss": 1.744, "step": 2391 }, { "epoch": 19.136, "grad_norm": 64.41290283203125, "learning_rate": 4.495555555555556e-05, "loss": 1.453, "step": 2392 }, { "epoch": 19.144, "grad_norm": 32.57810592651367, "learning_rate": 4.495111111111111e-05, "loss": 1.3422, "step": 2393 }, { "epoch": 19.152, "grad_norm": 19.374618530273438, "learning_rate": 4.494666666666667e-05, "loss": 1.0033, "step": 2394 }, { "epoch": 19.16, "grad_norm": 27.17669677734375, "learning_rate": 4.494222222222222e-05, "loss": 0.8665, "step": 2395 }, { "epoch": 19.168, "grad_norm": 29.18901824951172, "learning_rate": 4.493777777777778e-05, "loss": 1.1728, "step": 2396 }, { "epoch": 19.176, "grad_norm": 91.29004669189453, "learning_rate": 4.493333333333333e-05, "loss": 1.2432, "step": 2397 }, { "epoch": 19.184, "grad_norm": 20.759225845336914, "learning_rate": 4.4928888888888894e-05, "loss": 1.2931, "step": 2398 }, { "epoch": 19.192, "grad_norm": 32.376739501953125, "learning_rate": 4.492444444444445e-05, "loss": 1.1321, "step": 2399 }, { "epoch": 19.2, "grad_norm": 24.02031898498535, "learning_rate": 4.4920000000000004e-05, "loss": 1.4746, "step": 2400 }, { "epoch": 19.208, "grad_norm": 17.710325241088867, "learning_rate": 4.491555555555556e-05, "loss": 1.0346, "step": 2401 }, { "epoch": 19.216, "grad_norm": 37.826072692871094, "learning_rate": 4.491111111111111e-05, "loss": 1.2278, "step": 2402 }, { "epoch": 19.224, "grad_norm": 16.8991756439209, "learning_rate": 4.490666666666667e-05, "loss": 1.7773, "step": 2403 }, { "epoch": 19.232, "grad_norm": 103.60989379882812, "learning_rate": 4.490222222222222e-05, "loss": 1.6637, "step": 2404 }, { "epoch": 19.24, "grad_norm": 24.903732299804688, "learning_rate": 4.489777777777778e-05, "loss": 1.5094, "step": 2405 }, { "epoch": 19.248, "grad_norm": 94.6587905883789, "learning_rate": 4.489333333333334e-05, "loss": 0.8806, "step": 2406 }, { "epoch": 19.256, "grad_norm": 29.687198638916016, "learning_rate": 4.4888888888888894e-05, "loss": 1.0871, "step": 2407 }, { "epoch": 19.264, "grad_norm": 38.563995361328125, "learning_rate": 4.488444444444444e-05, "loss": 1.8612, "step": 2408 }, { "epoch": 19.272, "grad_norm": 23.091745376586914, "learning_rate": 4.488e-05, "loss": 1.0624, "step": 2409 }, { "epoch": 19.28, "grad_norm": 48.21519470214844, "learning_rate": 4.487555555555556e-05, "loss": 1.8311, "step": 2410 }, { "epoch": 19.288, "grad_norm": 23.735342025756836, "learning_rate": 4.4871111111111114e-05, "loss": 1.3423, "step": 2411 }, { "epoch": 19.296, "grad_norm": 41.23270797729492, "learning_rate": 4.486666666666667e-05, "loss": 1.6932, "step": 2412 }, { "epoch": 19.304, "grad_norm": 32.113502502441406, "learning_rate": 4.486222222222222e-05, "loss": 1.2415, "step": 2413 }, { "epoch": 19.312, "grad_norm": 31.297622680664062, "learning_rate": 4.4857777777777785e-05, "loss": 2.1343, "step": 2414 }, { "epoch": 19.32, "grad_norm": 54.699745178222656, "learning_rate": 4.485333333333333e-05, "loss": 1.5005, "step": 2415 }, { "epoch": 19.328, "grad_norm": 46.60300827026367, "learning_rate": 4.484888888888889e-05, "loss": 0.9472, "step": 2416 }, { "epoch": 19.336, "grad_norm": 27.383480072021484, "learning_rate": 4.484444444444444e-05, "loss": 1.715, "step": 2417 }, { "epoch": 19.344, "grad_norm": 24.965896606445312, "learning_rate": 4.4840000000000004e-05, "loss": 1.5555, "step": 2418 }, { "epoch": 19.352, "grad_norm": 40.04841613769531, "learning_rate": 4.483555555555556e-05, "loss": 1.0129, "step": 2419 }, { "epoch": 19.36, "grad_norm": 49.0279655456543, "learning_rate": 4.4831111111111114e-05, "loss": 1.1187, "step": 2420 }, { "epoch": 19.368, "grad_norm": 45.11487579345703, "learning_rate": 4.482666666666667e-05, "loss": 1.062, "step": 2421 }, { "epoch": 19.376, "grad_norm": 28.286911010742188, "learning_rate": 4.4822222222222224e-05, "loss": 1.2675, "step": 2422 }, { "epoch": 19.384, "grad_norm": 25.546493530273438, "learning_rate": 4.481777777777778e-05, "loss": 1.3386, "step": 2423 }, { "epoch": 19.392, "grad_norm": 21.3663272857666, "learning_rate": 4.4813333333333333e-05, "loss": 1.3828, "step": 2424 }, { "epoch": 19.4, "grad_norm": 50.66783905029297, "learning_rate": 4.480888888888889e-05, "loss": 1.08, "step": 2425 }, { "epoch": 19.408, "grad_norm": 47.78269577026367, "learning_rate": 4.480444444444445e-05, "loss": 1.2097, "step": 2426 }, { "epoch": 19.416, "grad_norm": 234.03550720214844, "learning_rate": 4.4800000000000005e-05, "loss": 1.189, "step": 2427 }, { "epoch": 19.424, "grad_norm": 44.555545806884766, "learning_rate": 4.479555555555556e-05, "loss": 3.0772, "step": 2428 }, { "epoch": 19.432, "grad_norm": 33.635986328125, "learning_rate": 4.4791111111111114e-05, "loss": 1.0722, "step": 2429 }, { "epoch": 19.44, "grad_norm": 41.31098937988281, "learning_rate": 4.478666666666667e-05, "loss": 0.8902, "step": 2430 }, { "epoch": 19.448, "grad_norm": 29.568960189819336, "learning_rate": 4.4782222222222224e-05, "loss": 1.1737, "step": 2431 }, { "epoch": 19.456, "grad_norm": 23.86166763305664, "learning_rate": 4.477777777777778e-05, "loss": 1.4435, "step": 2432 }, { "epoch": 19.464, "grad_norm": 23.683042526245117, "learning_rate": 4.4773333333333334e-05, "loss": 1.1293, "step": 2433 }, { "epoch": 19.472, "grad_norm": 34.27937698364258, "learning_rate": 4.4768888888888895e-05, "loss": 1.4131, "step": 2434 }, { "epoch": 19.48, "grad_norm": 19.600330352783203, "learning_rate": 4.476444444444445e-05, "loss": 1.5648, "step": 2435 }, { "epoch": 19.488, "grad_norm": 54.67304992675781, "learning_rate": 4.4760000000000005e-05, "loss": 1.1924, "step": 2436 }, { "epoch": 19.496, "grad_norm": 45.38713836669922, "learning_rate": 4.475555555555555e-05, "loss": 1.5689, "step": 2437 }, { "epoch": 19.504, "grad_norm": 87.61907196044922, "learning_rate": 4.4751111111111115e-05, "loss": 1.4772, "step": 2438 }, { "epoch": 19.512, "grad_norm": 31.245527267456055, "learning_rate": 4.474666666666667e-05, "loss": 1.7629, "step": 2439 }, { "epoch": 19.52, "grad_norm": 42.04084014892578, "learning_rate": 4.4742222222222225e-05, "loss": 0.9547, "step": 2440 }, { "epoch": 19.528, "grad_norm": 21.063114166259766, "learning_rate": 4.473777777777778e-05, "loss": 1.7396, "step": 2441 }, { "epoch": 19.536, "grad_norm": 118.55558013916016, "learning_rate": 4.473333333333334e-05, "loss": 1.1795, "step": 2442 }, { "epoch": 19.544, "grad_norm": 38.06156539916992, "learning_rate": 4.472888888888889e-05, "loss": 1.4604, "step": 2443 }, { "epoch": 19.552, "grad_norm": 100.15120697021484, "learning_rate": 4.4724444444444444e-05, "loss": 1.0939, "step": 2444 }, { "epoch": 19.56, "grad_norm": 20.3720703125, "learning_rate": 4.472e-05, "loss": 0.9288, "step": 2445 }, { "epoch": 19.568, "grad_norm": 42.497039794921875, "learning_rate": 4.4715555555555554e-05, "loss": 1.3911, "step": 2446 }, { "epoch": 19.576, "grad_norm": 32.04441833496094, "learning_rate": 4.4711111111111115e-05, "loss": 1.4901, "step": 2447 }, { "epoch": 19.584, "grad_norm": 20.213733673095703, "learning_rate": 4.470666666666667e-05, "loss": 2.5876, "step": 2448 }, { "epoch": 19.592, "grad_norm": 28.95050048828125, "learning_rate": 4.4702222222222225e-05, "loss": 1.0652, "step": 2449 }, { "epoch": 19.6, "grad_norm": 54.76129913330078, "learning_rate": 4.469777777777778e-05, "loss": 0.9223, "step": 2450 }, { "epoch": 19.608, "grad_norm": 24.803327560424805, "learning_rate": 4.4693333333333335e-05, "loss": 1.2212, "step": 2451 }, { "epoch": 19.616, "grad_norm": 66.72420501708984, "learning_rate": 4.468888888888889e-05, "loss": 1.1122, "step": 2452 }, { "epoch": 19.624, "grad_norm": 159.91854858398438, "learning_rate": 4.4684444444444444e-05, "loss": 1.3158, "step": 2453 }, { "epoch": 19.632, "grad_norm": 461.2635803222656, "learning_rate": 4.468e-05, "loss": 1.8494, "step": 2454 }, { "epoch": 19.64, "grad_norm": 40.70729446411133, "learning_rate": 4.467555555555556e-05, "loss": 1.2923, "step": 2455 }, { "epoch": 19.648, "grad_norm": 50.82982635498047, "learning_rate": 4.4671111111111116e-05, "loss": 1.0893, "step": 2456 }, { "epoch": 19.656, "grad_norm": 31.639978408813477, "learning_rate": 4.466666666666667e-05, "loss": 2.1619, "step": 2457 }, { "epoch": 19.664, "grad_norm": 39.229984283447266, "learning_rate": 4.4662222222222225e-05, "loss": 1.3238, "step": 2458 }, { "epoch": 19.672, "grad_norm": 47.56379699707031, "learning_rate": 4.465777777777778e-05, "loss": 1.1802, "step": 2459 }, { "epoch": 19.68, "grad_norm": 20.97281837463379, "learning_rate": 4.4653333333333335e-05, "loss": 1.0977, "step": 2460 }, { "epoch": 19.688, "grad_norm": 33.781211853027344, "learning_rate": 4.464888888888889e-05, "loss": 1.6048, "step": 2461 }, { "epoch": 19.696, "grad_norm": 28.713546752929688, "learning_rate": 4.4644444444444445e-05, "loss": 1.4301, "step": 2462 }, { "epoch": 19.704, "grad_norm": 20.812788009643555, "learning_rate": 4.4640000000000006e-05, "loss": 1.1676, "step": 2463 }, { "epoch": 19.712, "grad_norm": 25.112253189086914, "learning_rate": 4.463555555555556e-05, "loss": 1.0751, "step": 2464 }, { "epoch": 19.72, "grad_norm": 47.86423873901367, "learning_rate": 4.463111111111111e-05, "loss": 1.2502, "step": 2465 }, { "epoch": 19.728, "grad_norm": 25.676536560058594, "learning_rate": 4.4626666666666664e-05, "loss": 1.5742, "step": 2466 }, { "epoch": 19.736, "grad_norm": 30.286375045776367, "learning_rate": 4.4622222222222226e-05, "loss": 1.0778, "step": 2467 }, { "epoch": 19.744, "grad_norm": 28.77909278869629, "learning_rate": 4.461777777777778e-05, "loss": 2.0919, "step": 2468 }, { "epoch": 19.752, "grad_norm": 37.68330001831055, "learning_rate": 4.4613333333333335e-05, "loss": 1.7493, "step": 2469 }, { "epoch": 19.76, "grad_norm": 35.30447769165039, "learning_rate": 4.460888888888889e-05, "loss": 1.1026, "step": 2470 }, { "epoch": 19.768, "grad_norm": 41.074642181396484, "learning_rate": 4.460444444444445e-05, "loss": 1.9558, "step": 2471 }, { "epoch": 19.776, "grad_norm": 28.029890060424805, "learning_rate": 4.46e-05, "loss": 1.3156, "step": 2472 }, { "epoch": 19.784, "grad_norm": 25.8765926361084, "learning_rate": 4.4595555555555555e-05, "loss": 1.5773, "step": 2473 }, { "epoch": 19.792, "grad_norm": 27.576492309570312, "learning_rate": 4.459111111111111e-05, "loss": 1.2634, "step": 2474 }, { "epoch": 19.8, "grad_norm": 25.308555603027344, "learning_rate": 4.458666666666667e-05, "loss": 1.5478, "step": 2475 }, { "epoch": 19.808, "grad_norm": 24.21664810180664, "learning_rate": 4.4582222222222226e-05, "loss": 1.3713, "step": 2476 }, { "epoch": 19.816, "grad_norm": 18.97246742248535, "learning_rate": 4.457777777777778e-05, "loss": 1.1465, "step": 2477 }, { "epoch": 19.824, "grad_norm": 46.93341064453125, "learning_rate": 4.4573333333333336e-05, "loss": 1.2794, "step": 2478 }, { "epoch": 19.832, "grad_norm": 27.891847610473633, "learning_rate": 4.456888888888889e-05, "loss": 0.9678, "step": 2479 }, { "epoch": 19.84, "grad_norm": 18.956483840942383, "learning_rate": 4.4564444444444445e-05, "loss": 1.3185, "step": 2480 }, { "epoch": 19.848, "grad_norm": 30.93056869506836, "learning_rate": 4.456e-05, "loss": 0.994, "step": 2481 }, { "epoch": 19.856, "grad_norm": 31.658235549926758, "learning_rate": 4.4555555555555555e-05, "loss": 1.1295, "step": 2482 }, { "epoch": 19.864, "grad_norm": 33.534793853759766, "learning_rate": 4.455111111111112e-05, "loss": 1.3794, "step": 2483 }, { "epoch": 19.872, "grad_norm": 33.968048095703125, "learning_rate": 4.454666666666667e-05, "loss": 1.1173, "step": 2484 }, { "epoch": 19.88, "grad_norm": 25.188228607177734, "learning_rate": 4.4542222222222226e-05, "loss": 1.3893, "step": 2485 }, { "epoch": 19.888, "grad_norm": 23.657506942749023, "learning_rate": 4.453777777777778e-05, "loss": 1.6281, "step": 2486 }, { "epoch": 19.896, "grad_norm": 17.34295654296875, "learning_rate": 4.4533333333333336e-05, "loss": 1.1527, "step": 2487 }, { "epoch": 19.904, "grad_norm": 23.12708854675293, "learning_rate": 4.452888888888889e-05, "loss": 1.1166, "step": 2488 }, { "epoch": 19.912, "grad_norm": 23.428817749023438, "learning_rate": 4.4524444444444446e-05, "loss": 1.1379, "step": 2489 }, { "epoch": 19.92, "grad_norm": 19.210229873657227, "learning_rate": 4.452e-05, "loss": 1.2175, "step": 2490 }, { "epoch": 19.928, "grad_norm": 32.39546203613281, "learning_rate": 4.451555555555556e-05, "loss": 0.9954, "step": 2491 }, { "epoch": 19.936, "grad_norm": 16.531930923461914, "learning_rate": 4.451111111111112e-05, "loss": 1.1055, "step": 2492 }, { "epoch": 19.944, "grad_norm": 36.42578887939453, "learning_rate": 4.450666666666667e-05, "loss": 1.1243, "step": 2493 }, { "epoch": 19.951999999999998, "grad_norm": 26.36699676513672, "learning_rate": 4.450222222222222e-05, "loss": 1.0418, "step": 2494 }, { "epoch": 19.96, "grad_norm": 27.713180541992188, "learning_rate": 4.449777777777778e-05, "loss": 1.1708, "step": 2495 }, { "epoch": 19.968, "grad_norm": 28.780593872070312, "learning_rate": 4.4493333333333337e-05, "loss": 0.9575, "step": 2496 }, { "epoch": 19.976, "grad_norm": 103.88208770751953, "learning_rate": 4.448888888888889e-05, "loss": 1.1219, "step": 2497 }, { "epoch": 19.984, "grad_norm": 34.972171783447266, "learning_rate": 4.4484444444444446e-05, "loss": 1.2402, "step": 2498 }, { "epoch": 19.992, "grad_norm": 23.532346725463867, "learning_rate": 4.448e-05, "loss": 1.2898, "step": 2499 }, { "epoch": 20.0, "grad_norm": 20.333518981933594, "learning_rate": 4.4475555555555556e-05, "loss": 1.5289, "step": 2500 }, { "epoch": 20.0, "eval_loss": 1.4883310794830322, "eval_map": 0.2293, "eval_map_50": 0.4738, "eval_map_75": 0.191, "eval_map_Coverall": 0.4913, "eval_map_Face_Shield": 0.1186, "eval_map_Gloves": 0.1969, "eval_map_Goggles": 0.0355, "eval_map_Mask": 0.3042, "eval_map_large": 0.3815, "eval_map_medium": 0.1449, "eval_map_small": 0.1685, "eval_mar_1": 0.2371, "eval_mar_10": 0.4201, "eval_mar_100": 0.4374, "eval_mar_100_Coverall": 0.6467, "eval_mar_100_Face_Shield": 0.5412, "eval_mar_100_Gloves": 0.3705, "eval_mar_100_Goggles": 0.2344, "eval_mar_100_Mask": 0.3942, "eval_mar_large": 0.6265, "eval_mar_medium": 0.3066, "eval_mar_small": 0.2104, "eval_runtime": 2.4902, "eval_samples_per_second": 11.646, "eval_steps_per_second": 0.803, "step": 2500 }, { "epoch": 20.008, "grad_norm": 31.299596786499023, "learning_rate": 4.447111111111111e-05, "loss": 1.4987, "step": 2501 }, { "epoch": 20.016, "grad_norm": 28.945894241333008, "learning_rate": 4.4466666666666666e-05, "loss": 1.1005, "step": 2502 }, { "epoch": 20.024, "grad_norm": 27.85692596435547, "learning_rate": 4.446222222222222e-05, "loss": 0.9877, "step": 2503 }, { "epoch": 20.032, "grad_norm": 45.288818359375, "learning_rate": 4.445777777777778e-05, "loss": 1.3847, "step": 2504 }, { "epoch": 20.04, "grad_norm": 46.27237319946289, "learning_rate": 4.445333333333334e-05, "loss": 1.6003, "step": 2505 }, { "epoch": 20.048, "grad_norm": 45.970184326171875, "learning_rate": 4.444888888888889e-05, "loss": 1.2739, "step": 2506 }, { "epoch": 20.056, "grad_norm": 54.99766540527344, "learning_rate": 4.4444444444444447e-05, "loss": 1.1455, "step": 2507 }, { "epoch": 20.064, "grad_norm": 30.403974533081055, "learning_rate": 4.444e-05, "loss": 2.0669, "step": 2508 }, { "epoch": 20.072, "grad_norm": 33.99847412109375, "learning_rate": 4.4435555555555556e-05, "loss": 1.4881, "step": 2509 }, { "epoch": 20.08, "grad_norm": 27.44559669494629, "learning_rate": 4.443111111111111e-05, "loss": 1.8582, "step": 2510 }, { "epoch": 20.088, "grad_norm": 38.45943069458008, "learning_rate": 4.4426666666666666e-05, "loss": 1.0919, "step": 2511 }, { "epoch": 20.096, "grad_norm": 21.26645278930664, "learning_rate": 4.442222222222223e-05, "loss": 1.1482, "step": 2512 }, { "epoch": 20.104, "grad_norm": 41.54813003540039, "learning_rate": 4.441777777777778e-05, "loss": 1.4019, "step": 2513 }, { "epoch": 20.112, "grad_norm": 70.4938735961914, "learning_rate": 4.441333333333334e-05, "loss": 1.3069, "step": 2514 }, { "epoch": 20.12, "grad_norm": 26.005611419677734, "learning_rate": 4.440888888888889e-05, "loss": 1.4155, "step": 2515 }, { "epoch": 20.128, "grad_norm": 24.843008041381836, "learning_rate": 4.440444444444445e-05, "loss": 1.1523, "step": 2516 }, { "epoch": 20.136, "grad_norm": 27.588298797607422, "learning_rate": 4.44e-05, "loss": 1.1221, "step": 2517 }, { "epoch": 20.144, "grad_norm": 30.541120529174805, "learning_rate": 4.439555555555556e-05, "loss": 1.4112, "step": 2518 }, { "epoch": 20.152, "grad_norm": 47.32604217529297, "learning_rate": 4.439111111111111e-05, "loss": 1.0733, "step": 2519 }, { "epoch": 20.16, "grad_norm": 23.22517204284668, "learning_rate": 4.438666666666667e-05, "loss": 1.1952, "step": 2520 }, { "epoch": 20.168, "grad_norm": 49.22733688354492, "learning_rate": 4.438222222222223e-05, "loss": 1.1073, "step": 2521 }, { "epoch": 20.176, "grad_norm": 23.035036087036133, "learning_rate": 4.4377777777777776e-05, "loss": 1.3971, "step": 2522 }, { "epoch": 20.184, "grad_norm": 301.3517150878906, "learning_rate": 4.437333333333333e-05, "loss": 2.3597, "step": 2523 }, { "epoch": 20.192, "grad_norm": 33.64874267578125, "learning_rate": 4.436888888888889e-05, "loss": 1.3774, "step": 2524 }, { "epoch": 20.2, "grad_norm": 51.73877716064453, "learning_rate": 4.436444444444445e-05, "loss": 1.584, "step": 2525 }, { "epoch": 20.208, "grad_norm": 52.803714752197266, "learning_rate": 4.436e-05, "loss": 1.5205, "step": 2526 }, { "epoch": 20.216, "grad_norm": 40.96205139160156, "learning_rate": 4.435555555555556e-05, "loss": 1.3636, "step": 2527 }, { "epoch": 20.224, "grad_norm": 46.29652786254883, "learning_rate": 4.435111111111112e-05, "loss": 0.9427, "step": 2528 }, { "epoch": 20.232, "grad_norm": 24.932788848876953, "learning_rate": 4.434666666666667e-05, "loss": 1.399, "step": 2529 }, { "epoch": 20.24, "grad_norm": 56.11345672607422, "learning_rate": 4.434222222222222e-05, "loss": 1.158, "step": 2530 }, { "epoch": 20.248, "grad_norm": 28.10399055480957, "learning_rate": 4.4337777777777776e-05, "loss": 1.3529, "step": 2531 }, { "epoch": 20.256, "grad_norm": 26.379945755004883, "learning_rate": 4.433333333333334e-05, "loss": 1.2602, "step": 2532 }, { "epoch": 20.264, "grad_norm": 42.69734191894531, "learning_rate": 4.432888888888889e-05, "loss": 0.9586, "step": 2533 }, { "epoch": 20.272, "grad_norm": 24.20310401916504, "learning_rate": 4.432444444444445e-05, "loss": 1.5711, "step": 2534 }, { "epoch": 20.28, "grad_norm": 30.350406646728516, "learning_rate": 4.432e-05, "loss": 0.9956, "step": 2535 }, { "epoch": 20.288, "grad_norm": 17.270776748657227, "learning_rate": 4.431555555555556e-05, "loss": 0.8699, "step": 2536 }, { "epoch": 20.296, "grad_norm": 15.907876014709473, "learning_rate": 4.431111111111111e-05, "loss": 0.8765, "step": 2537 }, { "epoch": 20.304, "grad_norm": 341.6497802734375, "learning_rate": 4.430666666666667e-05, "loss": 1.2173, "step": 2538 }, { "epoch": 20.312, "grad_norm": 25.639122009277344, "learning_rate": 4.430222222222222e-05, "loss": 1.4656, "step": 2539 }, { "epoch": 20.32, "grad_norm": 40.072715759277344, "learning_rate": 4.4297777777777784e-05, "loss": 1.1423, "step": 2540 }, { "epoch": 20.328, "grad_norm": 22.791067123413086, "learning_rate": 4.429333333333334e-05, "loss": 1.5499, "step": 2541 }, { "epoch": 20.336, "grad_norm": 30.5935001373291, "learning_rate": 4.428888888888889e-05, "loss": 2.2609, "step": 2542 }, { "epoch": 20.344, "grad_norm": 43.94755172729492, "learning_rate": 4.428444444444445e-05, "loss": 1.0641, "step": 2543 }, { "epoch": 20.352, "grad_norm": 17.924692153930664, "learning_rate": 4.428e-05, "loss": 1.1687, "step": 2544 }, { "epoch": 20.36, "grad_norm": 55.548370361328125, "learning_rate": 4.427555555555556e-05, "loss": 1.3505, "step": 2545 }, { "epoch": 20.368, "grad_norm": 30.10599136352539, "learning_rate": 4.427111111111111e-05, "loss": 1.4755, "step": 2546 }, { "epoch": 20.376, "grad_norm": 29.664554595947266, "learning_rate": 4.426666666666667e-05, "loss": 1.1804, "step": 2547 }, { "epoch": 20.384, "grad_norm": 65.8941650390625, "learning_rate": 4.426222222222222e-05, "loss": 1.7611, "step": 2548 }, { "epoch": 20.392, "grad_norm": 39.4832763671875, "learning_rate": 4.4257777777777784e-05, "loss": 1.1379, "step": 2549 }, { "epoch": 20.4, "grad_norm": 30.587299346923828, "learning_rate": 4.425333333333334e-05, "loss": 1.6238, "step": 2550 }, { "epoch": 20.408, "grad_norm": 23.646358489990234, "learning_rate": 4.424888888888889e-05, "loss": 1.1485, "step": 2551 }, { "epoch": 20.416, "grad_norm": 23.00309181213379, "learning_rate": 4.424444444444444e-05, "loss": 1.6343, "step": 2552 }, { "epoch": 20.424, "grad_norm": 37.30080032348633, "learning_rate": 4.424e-05, "loss": 1.3483, "step": 2553 }, { "epoch": 20.432, "grad_norm": 48.46896743774414, "learning_rate": 4.423555555555556e-05, "loss": 2.1729, "step": 2554 }, { "epoch": 20.44, "grad_norm": 23.22496795654297, "learning_rate": 4.423111111111111e-05, "loss": 0.9807, "step": 2555 }, { "epoch": 20.448, "grad_norm": 54.84933090209961, "learning_rate": 4.422666666666667e-05, "loss": 1.1999, "step": 2556 }, { "epoch": 20.456, "grad_norm": 28.4980411529541, "learning_rate": 4.422222222222222e-05, "loss": 1.5144, "step": 2557 }, { "epoch": 20.464, "grad_norm": 19.55126953125, "learning_rate": 4.421777777777778e-05, "loss": 1.1328, "step": 2558 }, { "epoch": 20.472, "grad_norm": 32.840476989746094, "learning_rate": 4.421333333333333e-05, "loss": 1.0492, "step": 2559 }, { "epoch": 20.48, "grad_norm": 27.676557540893555, "learning_rate": 4.420888888888889e-05, "loss": 1.0658, "step": 2560 }, { "epoch": 20.488, "grad_norm": 87.1694107055664, "learning_rate": 4.420444444444445e-05, "loss": 1.2098, "step": 2561 }, { "epoch": 20.496, "grad_norm": 47.3755989074707, "learning_rate": 4.4200000000000004e-05, "loss": 1.6828, "step": 2562 }, { "epoch": 20.504, "grad_norm": 28.898725509643555, "learning_rate": 4.419555555555556e-05, "loss": 1.3636, "step": 2563 }, { "epoch": 20.512, "grad_norm": 32.52335739135742, "learning_rate": 4.4191111111111113e-05, "loss": 3.1618, "step": 2564 }, { "epoch": 20.52, "grad_norm": 26.24427604675293, "learning_rate": 4.418666666666667e-05, "loss": 1.214, "step": 2565 }, { "epoch": 20.528, "grad_norm": 59.29016876220703, "learning_rate": 4.418222222222222e-05, "loss": 1.2144, "step": 2566 }, { "epoch": 20.536, "grad_norm": 34.9389762878418, "learning_rate": 4.417777777777778e-05, "loss": 0.7798, "step": 2567 }, { "epoch": 20.544, "grad_norm": 43.851749420166016, "learning_rate": 4.417333333333333e-05, "loss": 1.3656, "step": 2568 }, { "epoch": 20.552, "grad_norm": 18.68810272216797, "learning_rate": 4.4168888888888894e-05, "loss": 1.0349, "step": 2569 }, { "epoch": 20.56, "grad_norm": 31.49493980407715, "learning_rate": 4.416444444444445e-05, "loss": 1.495, "step": 2570 }, { "epoch": 20.568, "grad_norm": 41.63833236694336, "learning_rate": 4.4160000000000004e-05, "loss": 1.5978, "step": 2571 }, { "epoch": 20.576, "grad_norm": 23.019901275634766, "learning_rate": 4.415555555555556e-05, "loss": 1.2685, "step": 2572 }, { "epoch": 20.584, "grad_norm": 19.844999313354492, "learning_rate": 4.4151111111111114e-05, "loss": 1.416, "step": 2573 }, { "epoch": 20.592, "grad_norm": 35.554195404052734, "learning_rate": 4.414666666666667e-05, "loss": 1.496, "step": 2574 }, { "epoch": 20.6, "grad_norm": 18.39525604248047, "learning_rate": 4.4142222222222223e-05, "loss": 1.4273, "step": 2575 }, { "epoch": 20.608, "grad_norm": 35.97130584716797, "learning_rate": 4.413777777777778e-05, "loss": 1.298, "step": 2576 }, { "epoch": 20.616, "grad_norm": 17.360414505004883, "learning_rate": 4.413333333333334e-05, "loss": 0.9824, "step": 2577 }, { "epoch": 20.624, "grad_norm": 45.528934478759766, "learning_rate": 4.4128888888888895e-05, "loss": 1.4171, "step": 2578 }, { "epoch": 20.632, "grad_norm": 21.665451049804688, "learning_rate": 4.412444444444444e-05, "loss": 0.9712, "step": 2579 }, { "epoch": 20.64, "grad_norm": 25.60260581970215, "learning_rate": 4.412e-05, "loss": 1.2521, "step": 2580 }, { "epoch": 20.648, "grad_norm": 20.671045303344727, "learning_rate": 4.411555555555556e-05, "loss": 0.9255, "step": 2581 }, { "epoch": 20.656, "grad_norm": 17.28809356689453, "learning_rate": 4.4111111111111114e-05, "loss": 1.4052, "step": 2582 }, { "epoch": 20.664, "grad_norm": 15.016551971435547, "learning_rate": 4.410666666666667e-05, "loss": 1.6235, "step": 2583 }, { "epoch": 20.672, "grad_norm": 68.26554107666016, "learning_rate": 4.4102222222222224e-05, "loss": 1.2722, "step": 2584 }, { "epoch": 20.68, "grad_norm": 38.72378158569336, "learning_rate": 4.4097777777777785e-05, "loss": 1.6137, "step": 2585 }, { "epoch": 20.688, "grad_norm": 25.685176849365234, "learning_rate": 4.4093333333333334e-05, "loss": 1.1504, "step": 2586 }, { "epoch": 20.696, "grad_norm": 42.987327575683594, "learning_rate": 4.408888888888889e-05, "loss": 1.1501, "step": 2587 }, { "epoch": 20.704, "grad_norm": 21.4054012298584, "learning_rate": 4.408444444444444e-05, "loss": 1.026, "step": 2588 }, { "epoch": 20.712, "grad_norm": 20.760255813598633, "learning_rate": 4.4080000000000005e-05, "loss": 1.0555, "step": 2589 }, { "epoch": 20.72, "grad_norm": 49.15171432495117, "learning_rate": 4.407555555555556e-05, "loss": 1.2868, "step": 2590 }, { "epoch": 20.728, "grad_norm": 40.331092834472656, "learning_rate": 4.4071111111111115e-05, "loss": 1.6245, "step": 2591 }, { "epoch": 20.736, "grad_norm": 28.455917358398438, "learning_rate": 4.406666666666667e-05, "loss": 1.675, "step": 2592 }, { "epoch": 20.744, "grad_norm": 44.27690124511719, "learning_rate": 4.4062222222222224e-05, "loss": 1.0296, "step": 2593 }, { "epoch": 20.752, "grad_norm": 19.68973731994629, "learning_rate": 4.405777777777778e-05, "loss": 1.23, "step": 2594 }, { "epoch": 20.76, "grad_norm": 28.949094772338867, "learning_rate": 4.4053333333333334e-05, "loss": 1.4773, "step": 2595 }, { "epoch": 20.768, "grad_norm": 27.19438362121582, "learning_rate": 4.404888888888889e-05, "loss": 1.4126, "step": 2596 }, { "epoch": 20.776, "grad_norm": 692.4525146484375, "learning_rate": 4.404444444444445e-05, "loss": 1.0322, "step": 2597 }, { "epoch": 20.784, "grad_norm": 30.630956649780273, "learning_rate": 4.4040000000000005e-05, "loss": 1.1982, "step": 2598 }, { "epoch": 20.792, "grad_norm": 34.061214447021484, "learning_rate": 4.403555555555556e-05, "loss": 1.6156, "step": 2599 }, { "epoch": 20.8, "grad_norm": 36.48095703125, "learning_rate": 4.4031111111111115e-05, "loss": 1.1586, "step": 2600 }, { "epoch": 20.808, "grad_norm": 24.224349975585938, "learning_rate": 4.402666666666666e-05, "loss": 1.4867, "step": 2601 }, { "epoch": 20.816, "grad_norm": 50.71461868286133, "learning_rate": 4.4022222222222225e-05, "loss": 1.2737, "step": 2602 }, { "epoch": 20.824, "grad_norm": 35.236026763916016, "learning_rate": 4.401777777777778e-05, "loss": 1.0597, "step": 2603 }, { "epoch": 20.832, "grad_norm": 51.907222747802734, "learning_rate": 4.4013333333333334e-05, "loss": 1.3368, "step": 2604 }, { "epoch": 20.84, "grad_norm": 37.66876220703125, "learning_rate": 4.400888888888889e-05, "loss": 1.2082, "step": 2605 }, { "epoch": 20.848, "grad_norm": 88.17715454101562, "learning_rate": 4.400444444444445e-05, "loss": 1.2408, "step": 2606 }, { "epoch": 20.856, "grad_norm": 39.82918167114258, "learning_rate": 4.4000000000000006e-05, "loss": 2.0856, "step": 2607 }, { "epoch": 20.864, "grad_norm": 38.741363525390625, "learning_rate": 4.3995555555555554e-05, "loss": 1.6325, "step": 2608 }, { "epoch": 20.872, "grad_norm": 60.493896484375, "learning_rate": 4.399111111111111e-05, "loss": 1.6484, "step": 2609 }, { "epoch": 20.88, "grad_norm": 29.10042953491211, "learning_rate": 4.398666666666667e-05, "loss": 1.2973, "step": 2610 }, { "epoch": 20.888, "grad_norm": 33.76005935668945, "learning_rate": 4.3982222222222225e-05, "loss": 1.1528, "step": 2611 }, { "epoch": 20.896, "grad_norm": 23.7939453125, "learning_rate": 4.397777777777778e-05, "loss": 1.4331, "step": 2612 }, { "epoch": 20.904, "grad_norm": 25.5266170501709, "learning_rate": 4.3973333333333335e-05, "loss": 1.5613, "step": 2613 }, { "epoch": 20.912, "grad_norm": 51.59452438354492, "learning_rate": 4.396888888888889e-05, "loss": 2.0039, "step": 2614 }, { "epoch": 20.92, "grad_norm": 20.21539878845215, "learning_rate": 4.3964444444444444e-05, "loss": 1.4211, "step": 2615 }, { "epoch": 20.928, "grad_norm": 31.13737678527832, "learning_rate": 4.396e-05, "loss": 1.5772, "step": 2616 }, { "epoch": 20.936, "grad_norm": 15.810937881469727, "learning_rate": 4.3955555555555554e-05, "loss": 1.1804, "step": 2617 }, { "epoch": 20.944, "grad_norm": 62.595703125, "learning_rate": 4.3951111111111116e-05, "loss": 1.2121, "step": 2618 }, { "epoch": 20.951999999999998, "grad_norm": 28.936019897460938, "learning_rate": 4.394666666666667e-05, "loss": 1.1366, "step": 2619 }, { "epoch": 20.96, "grad_norm": 71.97296142578125, "learning_rate": 4.3942222222222225e-05, "loss": 1.5653, "step": 2620 }, { "epoch": 20.968, "grad_norm": 111.84375762939453, "learning_rate": 4.393777777777778e-05, "loss": 1.2816, "step": 2621 }, { "epoch": 20.976, "grad_norm": 63.952911376953125, "learning_rate": 4.3933333333333335e-05, "loss": 1.3329, "step": 2622 }, { "epoch": 20.984, "grad_norm": 29.638242721557617, "learning_rate": 4.392888888888889e-05, "loss": 1.429, "step": 2623 }, { "epoch": 20.992, "grad_norm": 57.575801849365234, "learning_rate": 4.3924444444444445e-05, "loss": 1.4034, "step": 2624 }, { "epoch": 21.0, "grad_norm": 36.15318298339844, "learning_rate": 4.392e-05, "loss": 1.3654, "step": 2625 }, { "epoch": 21.0, "eval_loss": 1.3475022315979004, "eval_map": 0.2746, "eval_map_50": 0.5638, "eval_map_75": 0.2333, "eval_map_Coverall": 0.522, "eval_map_Face_Shield": 0.2338, "eval_map_Gloves": 0.2226, "eval_map_Goggles": 0.0988, "eval_map_Mask": 0.296, "eval_map_large": 0.4482, "eval_map_medium": 0.2275, "eval_map_small": 0.1345, "eval_mar_1": 0.2715, "eval_mar_10": 0.4663, "eval_mar_100": 0.49, "eval_mar_100_Coverall": 0.6822, "eval_mar_100_Face_Shield": 0.6294, "eval_mar_100_Gloves": 0.3656, "eval_mar_100_Goggles": 0.3938, "eval_mar_100_Mask": 0.3788, "eval_mar_large": 0.6686, "eval_mar_medium": 0.4158, "eval_mar_small": 0.1839, "eval_runtime": 2.4611, "eval_samples_per_second": 11.784, "eval_steps_per_second": 0.813, "step": 2625 } ], "logging_steps": 1, "max_steps": 12500, "num_input_tokens_seen": 0, "num_train_epochs": 100, "save_steps": 500, "total_flos": 5.64419914128e+18, "train_batch_size": 8, "trial_name": null, "trial_params": null }