|
{ |
|
"best_metric": 0.03139420226216316, |
|
"best_model_checkpoint": "/robodata/smodak/Projects/nspl/scripts/terrainseg/training/models/INTERNAL_BEST-safety-utcustom-train-SF-RGB-b5/checkpoint-1440", |
|
"epoch": 144.0, |
|
"global_step": 1440, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.0000000000000001e-07, |
|
"loss": 1.2416, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 1.2501, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 1.2448, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 1.2497, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 1.2596, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 1.2563, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 7.000000000000001e-07, |
|
"loss": 1.2528, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 1.2488, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9e-07, |
|
"loss": 1.246, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.2569, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.1e-06, |
|
"loss": 1.2506, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.2516, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.3e-06, |
|
"loss": 1.2334, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 1.2405, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.5e-06, |
|
"loss": 1.2353, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 1.2381, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.7000000000000002e-06, |
|
"loss": 1.2353, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.8e-06, |
|
"loss": 1.2275, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.9e-06, |
|
"loss": 1.2315, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.2199, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy_safe": 0.6769557557603865, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.20277138320874238, |
|
"eval_iou_safe": 0.027914850351298256, |
|
"eval_iou_unlabeled": 0.0, |
|
"eval_iou_unsafe": 0.20144342824057224, |
|
"eval_loss": 1.147387981414795, |
|
"eval_mean_accuracy": 0.4398635694845644, |
|
"eval_mean_iou": 0.0764527595306235, |
|
"eval_overall_accuracy": 0.2167780292567922, |
|
"eval_runtime": 9.4386, |
|
"eval_samples_per_second": 7.098, |
|
"eval_steps_per_second": 0.53, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 2.1000000000000002e-06, |
|
"loss": 1.2169, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.2e-06, |
|
"loss": 1.2233, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.3e-06, |
|
"loss": 1.225, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.2193, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.2153, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 2.6e-06, |
|
"loss": 1.2085, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 2.7e-06, |
|
"loss": 1.2061, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 1.2082, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 2.9e-06, |
|
"loss": 1.208, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3e-06, |
|
"loss": 1.205, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.1e-06, |
|
"loss": 1.1978, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.1951, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 3.3e-06, |
|
"loss": 1.1905, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 1.1892, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 3.5000000000000004e-06, |
|
"loss": 1.1726, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.6e-06, |
|
"loss": 1.1638, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.7e-06, |
|
"loss": 1.1697, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 3.8e-06, |
|
"loss": 1.1557, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 3.9e-06, |
|
"loss": 1.1568, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.1542, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy_safe": 0.7907563964672457, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.42571478791888173, |
|
"eval_iou_safe": 0.04364337256292872, |
|
"eval_iou_unlabeled": 0.0, |
|
"eval_iou_unsafe": 0.4237482501114813, |
|
"eval_loss": 1.0615942478179932, |
|
"eval_mean_accuracy": 0.6082355921930638, |
|
"eval_mean_iou": 0.15579720755813667, |
|
"eval_overall_accuracy": 0.4364975317200618, |
|
"eval_runtime": 9.575, |
|
"eval_samples_per_second": 6.997, |
|
"eval_steps_per_second": 0.522, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 4.1000000000000006e-06, |
|
"loss": 1.1516, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 1.1479, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 4.2999999999999995e-06, |
|
"loss": 1.1341, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.1384, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 4.5e-06, |
|
"loss": 1.1202, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 4.6e-06, |
|
"loss": 1.1091, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 4.7e-06, |
|
"loss": 1.112, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.1014, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 4.9000000000000005e-06, |
|
"loss": 1.1065, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 5e-06, |
|
"loss": 1.1028, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 5.1e-06, |
|
"loss": 1.0823, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 5.2e-06, |
|
"loss": 1.0721, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 5.3e-06, |
|
"loss": 1.0809, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 5.4e-06, |
|
"loss": 1.0711, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 1.0566, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 1.0401, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 5.7000000000000005e-06, |
|
"loss": 1.073, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 5.8e-06, |
|
"loss": 1.0281, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 5.9e-06, |
|
"loss": 1.0327, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 6e-06, |
|
"loss": 1.0324, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy_safe": 0.6667148546073454, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.7104100559195431, |
|
"eval_iou_safe": 0.06715586533607736, |
|
"eval_iou_unlabeled": 0.0, |
|
"eval_iou_unsafe": 0.7035684769007364, |
|
"eval_loss": 0.9139688611030579, |
|
"eval_mean_accuracy": 0.6885624552634442, |
|
"eval_mean_iou": 0.25690811407893793, |
|
"eval_overall_accuracy": 0.709119369734579, |
|
"eval_runtime": 9.3129, |
|
"eval_samples_per_second": 7.194, |
|
"eval_steps_per_second": 0.537, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 6.1e-06, |
|
"loss": 0.996, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 6.2e-06, |
|
"loss": 1.0074, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 6.300000000000001e-06, |
|
"loss": 1.0368, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 0.981, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 0.9959, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 6.6e-06, |
|
"loss": 0.9577, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 6.700000000000001e-06, |
|
"loss": 0.9664, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 0.9371, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 6.900000000000001e-06, |
|
"loss": 0.9334, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 7.000000000000001e-06, |
|
"loss": 0.9258, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 7.1e-06, |
|
"loss": 0.9073, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 7.2e-06, |
|
"loss": 1.004, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 7.2999999999999996e-06, |
|
"loss": 0.9029, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 7.4e-06, |
|
"loss": 0.8715, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 7.5e-06, |
|
"loss": 0.8641, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 7.6e-06, |
|
"loss": 0.8934, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 7.7e-06, |
|
"loss": 0.8487, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 7.8e-06, |
|
"loss": 0.8626, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 7.9e-06, |
|
"loss": 0.8309, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.8058, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy_safe": 0.5359944641693748, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.8199187015242027, |
|
"eval_iou_safe": 0.08228570650745816, |
|
"eval_iou_unlabeled": 0.0, |
|
"eval_iou_unsafe": 0.8086088911065791, |
|
"eval_loss": 0.7629496455192566, |
|
"eval_mean_accuracy": 0.6779565828467887, |
|
"eval_mean_iou": 0.29696486587134574, |
|
"eval_overall_accuracy": 0.8115320348027927, |
|
"eval_runtime": 10.0198, |
|
"eval_samples_per_second": 6.687, |
|
"eval_steps_per_second": 0.499, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 8.1e-06, |
|
"loss": 0.8324, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 8.200000000000001e-06, |
|
"loss": 0.7996, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 8.3e-06, |
|
"loss": 0.8127, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.7676, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 8.500000000000002e-06, |
|
"loss": 0.7872, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 8.599999999999999e-06, |
|
"loss": 0.771, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 8.7e-06, |
|
"loss": 0.7401, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 8.8e-06, |
|
"loss": 0.7632, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 8.9e-06, |
|
"loss": 0.7355, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 9e-06, |
|
"loss": 0.7243, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 9.100000000000001e-06, |
|
"loss": 0.7161, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 9.2e-06, |
|
"loss": 0.6884, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"learning_rate": 9.3e-06, |
|
"loss": 0.6782, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 9.4e-06, |
|
"loss": 0.6289, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.6552, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 0.6889, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 9.7e-06, |
|
"loss": 0.6848, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 0.6281, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"learning_rate": 9.900000000000002e-06, |
|
"loss": 0.6154, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 1e-05, |
|
"loss": 0.681, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_accuracy_safe": 0.4657171714835332, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9271301717833063, |
|
"eval_iou_safe": 0.1407124757510266, |
|
"eval_iou_unlabeled": 0.0, |
|
"eval_iou_unsafe": 0.9122946126509682, |
|
"eval_loss": 0.5544750094413757, |
|
"eval_mean_accuracy": 0.6964236716334198, |
|
"eval_mean_iou": 0.35100236280066494, |
|
"eval_overall_accuracy": 0.9135007715936917, |
|
"eval_runtime": 10.2459, |
|
"eval_samples_per_second": 6.539, |
|
"eval_steps_per_second": 0.488, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 10.1, |
|
"learning_rate": 1.0100000000000002e-05, |
|
"loss": 0.616, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 10.2, |
|
"learning_rate": 1.02e-05, |
|
"loss": 0.6151, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 10.3, |
|
"learning_rate": 1.03e-05, |
|
"loss": 0.5594, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 10.4, |
|
"learning_rate": 1.04e-05, |
|
"loss": 0.5978, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 1.05e-05, |
|
"loss": 0.5543, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 10.6, |
|
"learning_rate": 1.06e-05, |
|
"loss": 0.6125, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 10.7, |
|
"learning_rate": 1.0700000000000001e-05, |
|
"loss": 0.5329, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"learning_rate": 1.08e-05, |
|
"loss": 0.509, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"learning_rate": 1.09e-05, |
|
"loss": 0.4982, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 0.6686, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 11.1, |
|
"learning_rate": 1.11e-05, |
|
"loss": 0.4986, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 11.2, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 0.5283, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 11.3, |
|
"learning_rate": 1.13e-05, |
|
"loss": 0.5007, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 11.4, |
|
"learning_rate": 1.1400000000000001e-05, |
|
"loss": 0.4822, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 11.5, |
|
"learning_rate": 1.1500000000000002e-05, |
|
"loss": 0.4493, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 11.6, |
|
"learning_rate": 1.16e-05, |
|
"loss": 0.5733, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 11.7, |
|
"learning_rate": 1.1700000000000001e-05, |
|
"loss": 0.4298, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 11.8, |
|
"learning_rate": 1.18e-05, |
|
"loss": 0.4971, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 11.9, |
|
"learning_rate": 1.19e-05, |
|
"loss": 0.4348, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.5248, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_accuracy_safe": 0.38612804114093624, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.963271126063562, |
|
"eval_iou_safe": 0.17572296116049224, |
|
"eval_iou_unlabeled": 0.0, |
|
"eval_iou_unsafe": 0.9456049903923704, |
|
"eval_loss": 0.41525349020957947, |
|
"eval_mean_accuracy": 0.6746995836022491, |
|
"eval_mean_iou": 0.37377598385095423, |
|
"eval_overall_accuracy": 0.946223244738223, |
|
"eval_runtime": 10.1924, |
|
"eval_samples_per_second": 6.574, |
|
"eval_steps_per_second": 0.491, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 12.1, |
|
"learning_rate": 1.2100000000000001e-05, |
|
"loss": 0.4342, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 12.2, |
|
"learning_rate": 1.22e-05, |
|
"loss": 0.4505, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 12.3, |
|
"learning_rate": 1.23e-05, |
|
"loss": 0.4162, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 12.4, |
|
"learning_rate": 1.24e-05, |
|
"loss": 0.4098, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.4432, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 12.6, |
|
"learning_rate": 1.2600000000000001e-05, |
|
"loss": 0.3806, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 12.7, |
|
"learning_rate": 1.27e-05, |
|
"loss": 0.4371, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 12.8, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 0.3939, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 12.9, |
|
"learning_rate": 1.29e-05, |
|
"loss": 0.3648, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 0.4909, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 13.1, |
|
"learning_rate": 1.3100000000000002e-05, |
|
"loss": 0.3521, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 13.2, |
|
"learning_rate": 1.32e-05, |
|
"loss": 0.4571, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 13.3, |
|
"learning_rate": 1.3300000000000001e-05, |
|
"loss": 0.3663, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 13.4, |
|
"learning_rate": 1.3400000000000002e-05, |
|
"loss": 0.3555, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 13.5, |
|
"learning_rate": 1.3500000000000001e-05, |
|
"loss": 0.3289, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 13.6, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 0.4375, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 13.7, |
|
"learning_rate": 1.3700000000000001e-05, |
|
"loss": 0.3208, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 13.8, |
|
"learning_rate": 1.3800000000000002e-05, |
|
"loss": 0.3526, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 13.9, |
|
"learning_rate": 1.3900000000000002e-05, |
|
"loss": 0.4187, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 0.3372, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_accuracy_safe": 0.3275700556281587, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9820977555326695, |
|
"eval_iou_safe": 0.20636769111391487, |
|
"eval_iou_unlabeled": 0.0, |
|
"eval_iou_unsafe": 0.962400245514124, |
|
"eval_loss": 0.30499833822250366, |
|
"eval_mean_accuracy": 0.6548339055804141, |
|
"eval_mean_iou": 0.3895893122093463, |
|
"eval_overall_accuracy": 0.9627640567608734, |
|
"eval_runtime": 9.9614, |
|
"eval_samples_per_second": 6.726, |
|
"eval_steps_per_second": 0.502, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 14.1, |
|
"learning_rate": 1.4099999999999999e-05, |
|
"loss": 0.2916, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 14.2, |
|
"learning_rate": 1.42e-05, |
|
"loss": 0.2813, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 14.3, |
|
"learning_rate": 1.43e-05, |
|
"loss": 0.3099, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 14.4, |
|
"learning_rate": 1.44e-05, |
|
"loss": 0.3049, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"learning_rate": 1.45e-05, |
|
"loss": 0.3078, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 14.6, |
|
"learning_rate": 1.4599999999999999e-05, |
|
"loss": 0.4255, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 14.7, |
|
"learning_rate": 1.47e-05, |
|
"loss": 0.3547, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 14.8, |
|
"learning_rate": 1.48e-05, |
|
"loss": 0.2994, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 14.9, |
|
"learning_rate": 1.49e-05, |
|
"loss": 0.2957, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.3591, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 15.1, |
|
"learning_rate": 1.51e-05, |
|
"loss": 0.2865, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 15.2, |
|
"learning_rate": 1.52e-05, |
|
"loss": 0.2496, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 15.3, |
|
"learning_rate": 1.53e-05, |
|
"loss": 0.2879, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 15.4, |
|
"learning_rate": 1.54e-05, |
|
"loss": 0.2785, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 15.5, |
|
"learning_rate": 1.55e-05, |
|
"loss": 0.2806, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 15.6, |
|
"learning_rate": 1.56e-05, |
|
"loss": 0.2708, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 15.7, |
|
"learning_rate": 1.5700000000000002e-05, |
|
"loss": 0.3191, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 15.8, |
|
"learning_rate": 1.58e-05, |
|
"loss": 0.3125, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 15.9, |
|
"learning_rate": 1.59e-05, |
|
"loss": 0.3343, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.2818, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_accuracy_safe": 0.4809831110905509, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.98521576551645, |
|
"eval_iou_safe": 0.32389013422357094, |
|
"eval_iou_unlabeled": 0.0, |
|
"eval_iou_unsafe": 0.9698938085941798, |
|
"eval_loss": 0.23463933169841766, |
|
"eval_mean_accuracy": 0.7330994383035004, |
|
"eval_mean_iou": 0.43126131427258363, |
|
"eval_overall_accuracy": 0.9703215414018773, |
|
"eval_runtime": 10.542, |
|
"eval_samples_per_second": 6.356, |
|
"eval_steps_per_second": 0.474, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 16.1, |
|
"learning_rate": 1.6100000000000002e-05, |
|
"loss": 0.2259, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 16.2, |
|
"learning_rate": 1.62e-05, |
|
"loss": 0.268, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 16.3, |
|
"learning_rate": 1.63e-05, |
|
"loss": 0.2373, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 16.4, |
|
"learning_rate": 1.6400000000000002e-05, |
|
"loss": 0.2507, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 16.5, |
|
"learning_rate": 1.65e-05, |
|
"loss": 0.2117, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 16.6, |
|
"learning_rate": 1.66e-05, |
|
"loss": 0.2779, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 16.7, |
|
"learning_rate": 1.6700000000000003e-05, |
|
"loss": 0.2908, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 16.8, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 0.2671, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 16.9, |
|
"learning_rate": 1.69e-05, |
|
"loss": 0.2014, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"learning_rate": 1.7000000000000003e-05, |
|
"loss": 0.2139, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 17.1, |
|
"learning_rate": 1.7100000000000002e-05, |
|
"loss": 0.2172, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 17.2, |
|
"learning_rate": 1.7199999999999998e-05, |
|
"loss": 0.1981, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 17.3, |
|
"learning_rate": 1.73e-05, |
|
"loss": 0.1886, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 17.4, |
|
"learning_rate": 1.74e-05, |
|
"loss": 0.2209, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"learning_rate": 1.75e-05, |
|
"loss": 0.2069, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 17.6, |
|
"learning_rate": 1.76e-05, |
|
"loss": 0.2043, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 17.7, |
|
"learning_rate": 1.77e-05, |
|
"loss": 0.204, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 17.8, |
|
"learning_rate": 1.78e-05, |
|
"loss": 0.1817, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 17.9, |
|
"learning_rate": 1.79e-05, |
|
"loss": 0.2892, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.2081, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"eval_accuracy_safe": 0.6050188703975697, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9895959165603491, |
|
"eval_iou_safe": 0.45089868013537665, |
|
"eval_iou_unlabeled": 0.0, |
|
"eval_iou_unsafe": 0.9778401013393633, |
|
"eval_loss": 0.1802198737859726, |
|
"eval_mean_accuracy": 0.7973073934789594, |
|
"eval_mean_iou": 0.47624626049158, |
|
"eval_overall_accuracy": 0.9782361272555679, |
|
"eval_runtime": 9.9413, |
|
"eval_samples_per_second": 6.74, |
|
"eval_steps_per_second": 0.503, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 18.1, |
|
"learning_rate": 1.81e-05, |
|
"loss": 0.2182, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 18.2, |
|
"learning_rate": 1.8200000000000002e-05, |
|
"loss": 0.1684, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 18.3, |
|
"learning_rate": 1.83e-05, |
|
"loss": 0.185, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 18.4, |
|
"learning_rate": 1.84e-05, |
|
"loss": 0.1545, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 18.5, |
|
"learning_rate": 1.85e-05, |
|
"loss": 0.182, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 18.6, |
|
"learning_rate": 1.86e-05, |
|
"loss": 0.1736, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 18.7, |
|
"learning_rate": 1.87e-05, |
|
"loss": 0.1688, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 18.8, |
|
"learning_rate": 1.88e-05, |
|
"loss": 0.1437, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 18.9, |
|
"learning_rate": 1.8900000000000002e-05, |
|
"loss": 0.1709, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"learning_rate": 1.9e-05, |
|
"loss": 0.2237, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 19.1, |
|
"learning_rate": 1.91e-05, |
|
"loss": 0.1785, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 19.2, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 0.1698, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 19.3, |
|
"learning_rate": 1.93e-05, |
|
"loss": 0.148, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 19.4, |
|
"learning_rate": 1.94e-05, |
|
"loss": 0.1564, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 19.5, |
|
"learning_rate": 1.9500000000000003e-05, |
|
"loss": 0.1362, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 19.6, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 0.2276, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 19.7, |
|
"learning_rate": 1.97e-05, |
|
"loss": 0.1363, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 19.8, |
|
"learning_rate": 1.9800000000000004e-05, |
|
"loss": 0.1327, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 19.9, |
|
"learning_rate": 1.9900000000000003e-05, |
|
"loss": 0.1364, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 2e-05, |
|
"loss": 0.141, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_accuracy_safe": 0.5942671770733343, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9948155589085405, |
|
"eval_iou_safe": 0.5077946270190679, |
|
"eval_iou_unlabeled": 0.0, |
|
"eval_iou_unsafe": 0.9826799713781373, |
|
"eval_loss": 0.13621608912944794, |
|
"eval_mean_accuracy": 0.7945413679909374, |
|
"eval_mean_iou": 0.49682486613240173, |
|
"eval_overall_accuracy": 0.9829840019567689, |
|
"eval_runtime": 10.1896, |
|
"eval_samples_per_second": 6.575, |
|
"eval_steps_per_second": 0.491, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 20.1, |
|
"learning_rate": 2.01e-05, |
|
"loss": 0.1604, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 20.2, |
|
"learning_rate": 2.0200000000000003e-05, |
|
"loss": 0.1408, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 20.3, |
|
"learning_rate": 2.0300000000000002e-05, |
|
"loss": 0.1187, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 20.4, |
|
"learning_rate": 2.04e-05, |
|
"loss": 0.1333, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 20.5, |
|
"learning_rate": 2.05e-05, |
|
"loss": 0.1454, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 20.6, |
|
"learning_rate": 2.06e-05, |
|
"loss": 0.1241, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 20.7, |
|
"learning_rate": 2.07e-05, |
|
"loss": 0.1159, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 20.8, |
|
"learning_rate": 2.08e-05, |
|
"loss": 0.1315, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 20.9, |
|
"learning_rate": 2.09e-05, |
|
"loss": 0.1234, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 21.0, |
|
"learning_rate": 2.1e-05, |
|
"loss": 0.1264, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 21.1, |
|
"learning_rate": 2.11e-05, |
|
"loss": 0.1121, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 21.2, |
|
"learning_rate": 2.12e-05, |
|
"loss": 0.1177, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 21.3, |
|
"learning_rate": 2.13e-05, |
|
"loss": 0.1201, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 21.4, |
|
"learning_rate": 2.1400000000000002e-05, |
|
"loss": 0.1105, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 21.5, |
|
"learning_rate": 2.15e-05, |
|
"loss": 0.1061, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 21.6, |
|
"learning_rate": 2.16e-05, |
|
"loss": 0.1219, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 21.7, |
|
"learning_rate": 2.1700000000000002e-05, |
|
"loss": 0.0945, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 21.8, |
|
"learning_rate": 2.18e-05, |
|
"loss": 0.1224, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 21.9, |
|
"learning_rate": 2.19e-05, |
|
"loss": 0.1251, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 22.0, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 0.0963, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 22.0, |
|
"eval_accuracy_safe": 0.5325634827930501, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9978760148375644, |
|
"eval_iou_safe": 0.4978243439248295, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9838778094020525, |
|
"eval_loss": 0.10320360213518143, |
|
"eval_mean_accuracy": 0.7652197488153072, |
|
"eval_mean_iou": 0.740851076663441, |
|
"eval_overall_accuracy": 0.9841314287328008, |
|
"eval_runtime": 10.0963, |
|
"eval_samples_per_second": 6.636, |
|
"eval_steps_per_second": 0.495, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 22.1, |
|
"learning_rate": 2.2100000000000002e-05, |
|
"loss": 0.1083, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 22.2, |
|
"learning_rate": 2.22e-05, |
|
"loss": 0.0885, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 22.3, |
|
"learning_rate": 2.23e-05, |
|
"loss": 0.0925, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 22.4, |
|
"learning_rate": 2.2400000000000002e-05, |
|
"loss": 0.111, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 22.5, |
|
"learning_rate": 2.25e-05, |
|
"loss": 0.1168, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 22.6, |
|
"learning_rate": 2.26e-05, |
|
"loss": 0.0963, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 22.7, |
|
"learning_rate": 2.2700000000000003e-05, |
|
"loss": 0.0989, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 22.8, |
|
"learning_rate": 2.2800000000000002e-05, |
|
"loss": 0.0887, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 22.9, |
|
"learning_rate": 2.29e-05, |
|
"loss": 0.1072, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 23.0, |
|
"learning_rate": 2.3000000000000003e-05, |
|
"loss": 0.1032, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 23.1, |
|
"learning_rate": 2.3100000000000002e-05, |
|
"loss": 0.1034, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 23.2, |
|
"learning_rate": 2.32e-05, |
|
"loss": 0.0931, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 23.3, |
|
"learning_rate": 2.3300000000000004e-05, |
|
"loss": 0.0855, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 23.4, |
|
"learning_rate": 2.3400000000000003e-05, |
|
"loss": 0.0973, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 23.5, |
|
"learning_rate": 2.35e-05, |
|
"loss": 0.0835, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 23.6, |
|
"learning_rate": 2.36e-05, |
|
"loss": 0.0767, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 23.7, |
|
"learning_rate": 2.37e-05, |
|
"loss": 0.0985, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 23.8, |
|
"learning_rate": 2.38e-05, |
|
"loss": 0.1129, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 23.9, |
|
"learning_rate": 2.39e-05, |
|
"loss": 0.0763, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"learning_rate": 2.4e-05, |
|
"loss": 0.0842, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"eval_accuracy_safe": 0.6432241972852841, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9967877679857008, |
|
"eval_iou_safe": 0.5818214152459925, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9860795816956319, |
|
"eval_loss": 0.07701940834522247, |
|
"eval_mean_accuracy": 0.8200059826354924, |
|
"eval_mean_iou": 0.7839504984708122, |
|
"eval_overall_accuracy": 0.9863440670184235, |
|
"eval_runtime": 10.2164, |
|
"eval_samples_per_second": 6.558, |
|
"eval_steps_per_second": 0.489, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 24.1, |
|
"learning_rate": 2.41e-05, |
|
"loss": 0.0811, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 24.2, |
|
"learning_rate": 2.4200000000000002e-05, |
|
"loss": 0.0857, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 24.3, |
|
"learning_rate": 2.43e-05, |
|
"loss": 0.0772, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 24.4, |
|
"learning_rate": 2.44e-05, |
|
"loss": 0.0846, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 24.5, |
|
"learning_rate": 2.45e-05, |
|
"loss": 0.1003, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 24.6, |
|
"learning_rate": 2.46e-05, |
|
"loss": 0.0709, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 24.7, |
|
"learning_rate": 2.47e-05, |
|
"loss": 0.1084, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 24.8, |
|
"learning_rate": 2.48e-05, |
|
"loss": 0.0854, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 24.9, |
|
"learning_rate": 2.4900000000000002e-05, |
|
"loss": 0.0868, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.0744, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 25.1, |
|
"learning_rate": 2.51e-05, |
|
"loss": 0.083, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 25.2, |
|
"learning_rate": 2.5200000000000003e-05, |
|
"loss": 0.0675, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 25.3, |
|
"learning_rate": 2.5300000000000002e-05, |
|
"loss": 0.0835, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 25.4, |
|
"learning_rate": 2.54e-05, |
|
"loss": 0.0785, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 25.5, |
|
"learning_rate": 2.5500000000000003e-05, |
|
"loss": 0.0753, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 25.6, |
|
"learning_rate": 2.5600000000000002e-05, |
|
"loss": 0.0809, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 25.7, |
|
"learning_rate": 2.57e-05, |
|
"loss": 0.0776, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 25.8, |
|
"learning_rate": 2.58e-05, |
|
"loss": 0.0657, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 25.9, |
|
"learning_rate": 2.5900000000000003e-05, |
|
"loss": 0.1026, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 26.0, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 0.0702, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 26.0, |
|
"eval_accuracy_safe": 0.6417380811947525, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9968286014435097, |
|
"eval_iou_safe": 0.5811824221709394, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9860758518814631, |
|
"eval_loss": 0.06694883108139038, |
|
"eval_mean_accuracy": 0.8192833413191312, |
|
"eval_mean_iou": 0.7836291370262012, |
|
"eval_overall_accuracy": 0.9863397968349172, |
|
"eval_runtime": 9.7688, |
|
"eval_samples_per_second": 6.859, |
|
"eval_steps_per_second": 0.512, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 26.1, |
|
"learning_rate": 2.61e-05, |
|
"loss": 0.0826, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 26.2, |
|
"learning_rate": 2.6200000000000003e-05, |
|
"loss": 0.0727, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 26.3, |
|
"learning_rate": 2.6300000000000002e-05, |
|
"loss": 0.0707, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 26.4, |
|
"learning_rate": 2.64e-05, |
|
"loss": 0.0679, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 26.5, |
|
"learning_rate": 2.6500000000000004e-05, |
|
"loss": 0.0673, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 26.6, |
|
"learning_rate": 2.6600000000000003e-05, |
|
"loss": 0.0657, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 26.7, |
|
"learning_rate": 2.6700000000000002e-05, |
|
"loss": 0.0808, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 26.8, |
|
"learning_rate": 2.6800000000000004e-05, |
|
"loss": 0.0577, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 26.9, |
|
"learning_rate": 2.6900000000000003e-05, |
|
"loss": 0.0661, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 27.0, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 0.0754, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 27.1, |
|
"learning_rate": 2.7100000000000005e-05, |
|
"loss": 0.0625, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 27.2, |
|
"learning_rate": 2.7200000000000004e-05, |
|
"loss": 0.0653, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 27.3, |
|
"learning_rate": 2.7300000000000003e-05, |
|
"loss": 0.0533, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 27.4, |
|
"learning_rate": 2.7400000000000002e-05, |
|
"loss": 0.0712, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 27.5, |
|
"learning_rate": 2.7500000000000004e-05, |
|
"loss": 0.0536, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 27.6, |
|
"learning_rate": 2.7600000000000003e-05, |
|
"loss": 0.048, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 27.7, |
|
"learning_rate": 2.7700000000000002e-05, |
|
"loss": 0.069, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 27.8, |
|
"learning_rate": 2.7800000000000005e-05, |
|
"loss": 0.0562, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 27.9, |
|
"learning_rate": 2.7900000000000004e-05, |
|
"loss": 0.0755, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 28.0, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 0.0706, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 28.0, |
|
"eval_accuracy_safe": 0.7233703802221272, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9952718845333071, |
|
"eval_iou_safe": 0.6261111259409535, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9869617593578724, |
|
"eval_loss": 0.06709133088588715, |
|
"eval_mean_accuracy": 0.8593211323777171, |
|
"eval_mean_iou": 0.806536442649413, |
|
"eval_overall_accuracy": 0.9872403500685051, |
|
"eval_runtime": 9.8141, |
|
"eval_samples_per_second": 6.827, |
|
"eval_steps_per_second": 0.509, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 28.1, |
|
"learning_rate": 2.8100000000000005e-05, |
|
"loss": 0.0595, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 28.2, |
|
"learning_rate": 2.8199999999999998e-05, |
|
"loss": 0.0593, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 28.3, |
|
"learning_rate": 2.83e-05, |
|
"loss": 0.0729, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 28.4, |
|
"learning_rate": 2.84e-05, |
|
"loss": 0.0491, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 28.5, |
|
"learning_rate": 2.8499999999999998e-05, |
|
"loss": 0.0543, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 28.6, |
|
"learning_rate": 2.86e-05, |
|
"loss": 0.0504, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 28.7, |
|
"learning_rate": 2.87e-05, |
|
"loss": 0.0604, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 28.8, |
|
"learning_rate": 2.88e-05, |
|
"loss": 0.056, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 28.9, |
|
"learning_rate": 2.8899999999999998e-05, |
|
"loss": 0.0638, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 29.0, |
|
"learning_rate": 2.9e-05, |
|
"loss": 0.0588, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 29.1, |
|
"learning_rate": 2.91e-05, |
|
"loss": 0.0439, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 29.2, |
|
"learning_rate": 2.9199999999999998e-05, |
|
"loss": 0.0526, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 29.3, |
|
"learning_rate": 2.93e-05, |
|
"loss": 0.0479, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 29.4, |
|
"learning_rate": 2.94e-05, |
|
"loss": 0.0585, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 29.5, |
|
"learning_rate": 2.95e-05, |
|
"loss": 0.0607, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 29.6, |
|
"learning_rate": 2.96e-05, |
|
"loss": 0.0497, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 29.7, |
|
"learning_rate": 2.97e-05, |
|
"loss": 0.0492, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 29.8, |
|
"learning_rate": 2.98e-05, |
|
"loss": 0.0581, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 29.9, |
|
"learning_rate": 2.9900000000000002e-05, |
|
"loss": 0.0573, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"learning_rate": 3e-05, |
|
"loss": 0.0747, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"eval_accuracy_safe": 0.597141105855413, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9988174137800951, |
|
"eval_iou_safe": 0.5748081022860737, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9867182686762664, |
|
"eval_loss": 0.055133841931819916, |
|
"eval_mean_accuracy": 0.7979792598177541, |
|
"eval_mean_iou": 0.7807631854811701, |
|
"eval_overall_accuracy": 0.9869525397001807, |
|
"eval_runtime": 9.668, |
|
"eval_samples_per_second": 6.93, |
|
"eval_steps_per_second": 0.517, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 30.1, |
|
"learning_rate": 3.01e-05, |
|
"loss": 0.0466, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 30.2, |
|
"learning_rate": 3.02e-05, |
|
"loss": 0.0455, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 30.3, |
|
"learning_rate": 3.03e-05, |
|
"loss": 0.0549, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 30.4, |
|
"learning_rate": 3.04e-05, |
|
"loss": 0.0447, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 30.5, |
|
"learning_rate": 3.05e-05, |
|
"loss": 0.0533, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 30.6, |
|
"learning_rate": 3.06e-05, |
|
"loss": 0.055, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 30.7, |
|
"learning_rate": 3.07e-05, |
|
"loss": 0.0599, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 30.8, |
|
"learning_rate": 3.08e-05, |
|
"loss": 0.0434, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 30.9, |
|
"learning_rate": 3.09e-05, |
|
"loss": 0.0719, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 31.0, |
|
"learning_rate": 3.1e-05, |
|
"loss": 0.0444, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 31.1, |
|
"learning_rate": 3.1100000000000004e-05, |
|
"loss": 0.0485, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 31.2, |
|
"learning_rate": 3.12e-05, |
|
"loss": 0.0952, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 31.3, |
|
"learning_rate": 3.13e-05, |
|
"loss": 0.0434, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 31.4, |
|
"learning_rate": 3.1400000000000004e-05, |
|
"loss": 0.0463, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 31.5, |
|
"learning_rate": 3.15e-05, |
|
"loss": 0.0524, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 31.6, |
|
"learning_rate": 3.16e-05, |
|
"loss": 0.0448, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 31.7, |
|
"learning_rate": 3.1700000000000005e-05, |
|
"loss": 0.0583, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 31.8, |
|
"learning_rate": 3.18e-05, |
|
"loss": 0.0434, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 31.9, |
|
"learning_rate": 3.19e-05, |
|
"loss": 0.0497, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 0.057, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"eval_accuracy_safe": 0.7511073588767969, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9960510056823042, |
|
"eval_iou_safe": 0.6648490390915047, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9885619936960419, |
|
"eval_loss": 0.04917014017701149, |
|
"eval_mean_accuracy": 0.8735791822795506, |
|
"eval_mean_iou": 0.8267055163937733, |
|
"eval_overall_accuracy": 0.9888157631034282, |
|
"eval_runtime": 10.6517, |
|
"eval_samples_per_second": 6.29, |
|
"eval_steps_per_second": 0.469, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 32.1, |
|
"learning_rate": 3.21e-05, |
|
"loss": 0.0455, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 32.2, |
|
"learning_rate": 3.2200000000000003e-05, |
|
"loss": 0.0469, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 32.3, |
|
"learning_rate": 3.2300000000000006e-05, |
|
"loss": 0.0494, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 32.4, |
|
"learning_rate": 3.24e-05, |
|
"loss": 0.0464, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 32.5, |
|
"learning_rate": 3.2500000000000004e-05, |
|
"loss": 0.0447, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 32.6, |
|
"learning_rate": 3.26e-05, |
|
"loss": 0.0431, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 32.7, |
|
"learning_rate": 3.27e-05, |
|
"loss": 0.043, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 32.8, |
|
"learning_rate": 3.2800000000000004e-05, |
|
"loss": 0.0419, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 32.9, |
|
"learning_rate": 3.29e-05, |
|
"loss": 0.0424, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 33.0, |
|
"learning_rate": 3.3e-05, |
|
"loss": 0.072, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 33.1, |
|
"learning_rate": 3.3100000000000005e-05, |
|
"loss": 0.0518, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 33.2, |
|
"learning_rate": 3.32e-05, |
|
"loss": 0.0341, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 33.3, |
|
"learning_rate": 3.33e-05, |
|
"loss": 0.0483, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 33.4, |
|
"learning_rate": 3.3400000000000005e-05, |
|
"loss": 0.0409, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 33.5, |
|
"learning_rate": 3.35e-05, |
|
"loss": 0.0433, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 33.6, |
|
"learning_rate": 3.3600000000000004e-05, |
|
"loss": 0.0433, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 33.7, |
|
"learning_rate": 3.3700000000000006e-05, |
|
"loss": 0.0403, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 33.8, |
|
"learning_rate": 3.38e-05, |
|
"loss": 0.0426, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 33.9, |
|
"learning_rate": 3.3900000000000004e-05, |
|
"loss": 0.0365, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 34.0, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 0.0435, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 34.0, |
|
"eval_accuracy_safe": 0.627977532854538, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9987678973456258, |
|
"eval_iou_safe": 0.603546108487726, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9875850541007082, |
|
"eval_loss": 0.05069776251912117, |
|
"eval_mean_accuracy": 0.8133727151000818, |
|
"eval_mean_iou": 0.7955655812942171, |
|
"eval_overall_accuracy": 0.987815344511573, |
|
"eval_runtime": 9.8092, |
|
"eval_samples_per_second": 6.83, |
|
"eval_steps_per_second": 0.51, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 34.1, |
|
"learning_rate": 3.41e-05, |
|
"loss": 0.0399, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 34.2, |
|
"learning_rate": 3.4200000000000005e-05, |
|
"loss": 0.0351, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 34.3, |
|
"learning_rate": 3.430000000000001e-05, |
|
"loss": 0.044, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 34.4, |
|
"learning_rate": 3.4399999999999996e-05, |
|
"loss": 0.0536, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 34.5, |
|
"learning_rate": 3.45e-05, |
|
"loss": 0.0387, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 34.6, |
|
"learning_rate": 3.46e-05, |
|
"loss": 0.0326, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 34.7, |
|
"learning_rate": 3.4699999999999996e-05, |
|
"loss": 0.0362, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 34.8, |
|
"learning_rate": 3.48e-05, |
|
"loss": 0.1107, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 34.9, |
|
"learning_rate": 3.49e-05, |
|
"loss": 0.056, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 35.0, |
|
"learning_rate": 3.5e-05, |
|
"loss": 0.0371, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 35.1, |
|
"learning_rate": 3.51e-05, |
|
"loss": 0.0336, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 35.2, |
|
"learning_rate": 3.52e-05, |
|
"loss": 0.032, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 35.3, |
|
"learning_rate": 3.53e-05, |
|
"loss": 0.0406, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 35.4, |
|
"learning_rate": 3.54e-05, |
|
"loss": 0.0371, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 35.5, |
|
"learning_rate": 3.55e-05, |
|
"loss": 0.0414, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 35.6, |
|
"learning_rate": 3.56e-05, |
|
"loss": 0.0388, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 35.7, |
|
"learning_rate": 3.57e-05, |
|
"loss": 0.0533, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 35.8, |
|
"learning_rate": 3.58e-05, |
|
"loss": 0.04, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 35.9, |
|
"learning_rate": 3.59e-05, |
|
"loss": 0.0487, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 36.0, |
|
"learning_rate": 3.6e-05, |
|
"loss": 0.0326, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 36.0, |
|
"eval_accuracy_safe": 0.7829615151830563, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9961353126921768, |
|
"eval_iou_safe": 0.694748364475991, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9895979273372039, |
|
"eval_loss": 0.0417884923517704, |
|
"eval_mean_accuracy": 0.8895484139376166, |
|
"eval_mean_iou": 0.8421731459065974, |
|
"eval_overall_accuracy": 0.9898385005210762, |
|
"eval_runtime": 9.8003, |
|
"eval_samples_per_second": 6.837, |
|
"eval_steps_per_second": 0.51, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 36.1, |
|
"learning_rate": 3.61e-05, |
|
"loss": 0.0318, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 36.2, |
|
"learning_rate": 3.62e-05, |
|
"loss": 0.0395, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 36.3, |
|
"learning_rate": 3.63e-05, |
|
"loss": 0.033, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 36.4, |
|
"learning_rate": 3.6400000000000004e-05, |
|
"loss": 0.0336, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 36.5, |
|
"learning_rate": 3.65e-05, |
|
"loss": 0.056, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 36.6, |
|
"learning_rate": 3.66e-05, |
|
"loss": 0.0303, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 36.7, |
|
"learning_rate": 3.6700000000000004e-05, |
|
"loss": 0.0441, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 36.8, |
|
"learning_rate": 3.68e-05, |
|
"loss": 0.0373, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 36.9, |
|
"learning_rate": 3.69e-05, |
|
"loss": 0.0358, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 37.0, |
|
"learning_rate": 3.7e-05, |
|
"loss": 0.044, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 37.1, |
|
"learning_rate": 3.71e-05, |
|
"loss": 0.0412, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 37.2, |
|
"learning_rate": 3.72e-05, |
|
"loss": 0.0377, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 37.3, |
|
"learning_rate": 3.73e-05, |
|
"loss": 0.0349, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 37.4, |
|
"learning_rate": 3.74e-05, |
|
"loss": 0.0532, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 37.5, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.0313, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 37.6, |
|
"learning_rate": 3.76e-05, |
|
"loss": 0.0334, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 37.7, |
|
"learning_rate": 3.77e-05, |
|
"loss": 0.0348, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 37.8, |
|
"learning_rate": 3.7800000000000004e-05, |
|
"loss": 0.0539, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 37.9, |
|
"learning_rate": 3.79e-05, |
|
"loss": 0.0323, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 38.0, |
|
"learning_rate": 3.8e-05, |
|
"loss": 0.0262, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 38.0, |
|
"eval_accuracy_safe": 0.7119826060809326, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9979335102235597, |
|
"eval_iou_safe": 0.6667172059022607, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9892611317607551, |
|
"eval_loss": 0.04198002442717552, |
|
"eval_mean_accuracy": 0.8549580581522461, |
|
"eval_mean_iou": 0.827989168831508, |
|
"eval_overall_accuracy": 0.989486979014838, |
|
"eval_runtime": 9.7227, |
|
"eval_samples_per_second": 6.891, |
|
"eval_steps_per_second": 0.514, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 38.1, |
|
"learning_rate": 3.8100000000000005e-05, |
|
"loss": 0.0391, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 38.2, |
|
"learning_rate": 3.82e-05, |
|
"loss": 0.0383, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 38.3, |
|
"learning_rate": 3.83e-05, |
|
"loss": 0.0337, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 38.4, |
|
"learning_rate": 3.8400000000000005e-05, |
|
"loss": 0.0331, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 38.5, |
|
"learning_rate": 3.85e-05, |
|
"loss": 0.0421, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 38.6, |
|
"learning_rate": 3.86e-05, |
|
"loss": 0.027, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 38.7, |
|
"learning_rate": 3.8700000000000006e-05, |
|
"loss": 0.0294, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 38.8, |
|
"learning_rate": 3.88e-05, |
|
"loss": 0.0278, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 38.9, |
|
"learning_rate": 3.8900000000000004e-05, |
|
"loss": 0.0347, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 39.0, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 0.0355, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 39.1, |
|
"learning_rate": 3.91e-05, |
|
"loss": 0.0337, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 39.2, |
|
"learning_rate": 3.9200000000000004e-05, |
|
"loss": 0.0359, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 39.3, |
|
"learning_rate": 3.9300000000000007e-05, |
|
"loss": 0.0326, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 39.4, |
|
"learning_rate": 3.94e-05, |
|
"loss": 0.0288, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 39.5, |
|
"learning_rate": 3.9500000000000005e-05, |
|
"loss": 0.0303, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 39.6, |
|
"learning_rate": 3.960000000000001e-05, |
|
"loss": 0.034, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 39.7, |
|
"learning_rate": 3.97e-05, |
|
"loss": 0.0324, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 39.8, |
|
"learning_rate": 3.9800000000000005e-05, |
|
"loss": 0.0373, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 39.9, |
|
"learning_rate": 3.99e-05, |
|
"loss": 0.0372, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"learning_rate": 4e-05, |
|
"loss": 0.0268, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"eval_accuracy_safe": 0.7676396775648513, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9966623928429743, |
|
"eval_iou_safe": 0.6917825708583251, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9896630400873385, |
|
"eval_loss": 0.03918380290269852, |
|
"eval_mean_accuracy": 0.8821510352039128, |
|
"eval_mean_iou": 0.8407228054728317, |
|
"eval_overall_accuracy": 0.9898974290534631, |
|
"eval_runtime": 9.7657, |
|
"eval_samples_per_second": 6.861, |
|
"eval_steps_per_second": 0.512, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 40.1, |
|
"learning_rate": 4.0100000000000006e-05, |
|
"loss": 0.0305, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 40.2, |
|
"learning_rate": 4.02e-05, |
|
"loss": 0.0386, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 40.3, |
|
"learning_rate": 4.0300000000000004e-05, |
|
"loss": 0.0286, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 40.4, |
|
"learning_rate": 4.0400000000000006e-05, |
|
"loss": 0.0491, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 40.5, |
|
"learning_rate": 4.05e-05, |
|
"loss": 0.0355, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 40.6, |
|
"learning_rate": 4.0600000000000004e-05, |
|
"loss": 0.0259, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 40.7, |
|
"learning_rate": 4.07e-05, |
|
"loss": 0.0306, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 40.8, |
|
"learning_rate": 4.08e-05, |
|
"loss": 0.0412, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 40.9, |
|
"learning_rate": 4.09e-05, |
|
"loss": 0.0365, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 41.0, |
|
"learning_rate": 4.1e-05, |
|
"loss": 0.0339, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 41.1, |
|
"learning_rate": 4.11e-05, |
|
"loss": 0.0267, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 41.2, |
|
"learning_rate": 4.12e-05, |
|
"loss": 0.0273, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 41.3, |
|
"learning_rate": 4.13e-05, |
|
"loss": 0.0556, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 41.4, |
|
"learning_rate": 4.14e-05, |
|
"loss": 0.0347, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 41.5, |
|
"learning_rate": 4.15e-05, |
|
"loss": 0.0299, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 41.6, |
|
"learning_rate": 4.16e-05, |
|
"loss": 0.0896, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 41.7, |
|
"learning_rate": 4.17e-05, |
|
"loss": 0.0357, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 41.8, |
|
"learning_rate": 4.18e-05, |
|
"loss": 0.0523, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 41.9, |
|
"learning_rate": 4.19e-05, |
|
"loss": 0.0323, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 42.0, |
|
"learning_rate": 4.2e-05, |
|
"loss": 0.0395, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 42.0, |
|
"eval_accuracy_safe": 0.6754889148461263, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9984243330799234, |
|
"eval_iou_safe": 0.6422488284679091, |
|
"eval_iou_unlabeled": 0.0, |
|
"eval_iou_unsafe": 0.9886590543317243, |
|
"eval_loss": 0.04656589403748512, |
|
"eval_mean_accuracy": 0.8369566239630248, |
|
"eval_mean_iou": 0.5436359609332112, |
|
"eval_overall_accuracy": 0.9888853386266908, |
|
"eval_runtime": 9.8482, |
|
"eval_samples_per_second": 6.803, |
|
"eval_steps_per_second": 0.508, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 42.1, |
|
"learning_rate": 4.21e-05, |
|
"loss": 0.0378, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 42.2, |
|
"learning_rate": 4.22e-05, |
|
"loss": 0.0406, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 42.3, |
|
"learning_rate": 4.23e-05, |
|
"loss": 0.0399, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 42.4, |
|
"learning_rate": 4.24e-05, |
|
"loss": 0.0468, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 42.5, |
|
"learning_rate": 4.25e-05, |
|
"loss": 0.0271, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 42.6, |
|
"learning_rate": 4.26e-05, |
|
"loss": 0.0453, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 42.7, |
|
"learning_rate": 4.27e-05, |
|
"loss": 0.078, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 42.8, |
|
"learning_rate": 4.2800000000000004e-05, |
|
"loss": 0.0315, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 42.9, |
|
"learning_rate": 4.29e-05, |
|
"loss": 0.0314, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 43.0, |
|
"learning_rate": 4.3e-05, |
|
"loss": 0.0295, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 43.1, |
|
"learning_rate": 4.3100000000000004e-05, |
|
"loss": 0.0335, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 43.2, |
|
"learning_rate": 4.32e-05, |
|
"loss": 0.0318, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 43.3, |
|
"learning_rate": 4.33e-05, |
|
"loss": 0.0254, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 43.4, |
|
"learning_rate": 4.3400000000000005e-05, |
|
"loss": 0.026, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 43.5, |
|
"learning_rate": 4.35e-05, |
|
"loss": 0.0344, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 43.6, |
|
"learning_rate": 4.36e-05, |
|
"loss": 0.0279, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 43.7, |
|
"learning_rate": 4.3700000000000005e-05, |
|
"loss": 0.0251, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 43.8, |
|
"learning_rate": 4.38e-05, |
|
"loss": 0.0276, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 43.9, |
|
"learning_rate": 4.39e-05, |
|
"loss": 0.0291, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 44.0, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 0.0279, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 44.0, |
|
"eval_accuracy_safe": 0.7945381860517114, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9946534571212905, |
|
"eval_iou_safe": 0.6758250496365994, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9884718134454017, |
|
"eval_loss": 0.043944161385297775, |
|
"eval_mean_accuracy": 0.894595821586501, |
|
"eval_mean_iou": 0.8321484315410006, |
|
"eval_overall_accuracy": 0.9887423728828999, |
|
"eval_runtime": 10.0763, |
|
"eval_samples_per_second": 6.649, |
|
"eval_steps_per_second": 0.496, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 44.1, |
|
"learning_rate": 4.41e-05, |
|
"loss": 0.0546, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 44.2, |
|
"learning_rate": 4.4200000000000004e-05, |
|
"loss": 0.0283, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 44.3, |
|
"learning_rate": 4.43e-05, |
|
"loss": 0.0291, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 44.4, |
|
"learning_rate": 4.44e-05, |
|
"loss": 0.0277, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 44.5, |
|
"learning_rate": 4.4500000000000004e-05, |
|
"loss": 0.0346, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 44.6, |
|
"learning_rate": 4.46e-05, |
|
"loss": 0.0248, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 44.7, |
|
"learning_rate": 4.47e-05, |
|
"loss": 0.0204, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 44.8, |
|
"learning_rate": 4.4800000000000005e-05, |
|
"loss": 0.0267, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 44.9, |
|
"learning_rate": 4.49e-05, |
|
"loss": 0.0305, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 45.0, |
|
"learning_rate": 4.5e-05, |
|
"loss": 0.0277, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 45.1, |
|
"learning_rate": 4.5100000000000005e-05, |
|
"loss": 0.0279, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 45.2, |
|
"learning_rate": 4.52e-05, |
|
"loss": 0.0314, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 45.3, |
|
"learning_rate": 4.53e-05, |
|
"loss": 0.0279, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 45.4, |
|
"learning_rate": 4.5400000000000006e-05, |
|
"loss": 0.0333, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 45.5, |
|
"learning_rate": 4.55e-05, |
|
"loss": 0.0291, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 45.6, |
|
"learning_rate": 4.5600000000000004e-05, |
|
"loss": 0.0228, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 45.7, |
|
"learning_rate": 4.5700000000000006e-05, |
|
"loss": 0.0255, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 45.8, |
|
"learning_rate": 4.58e-05, |
|
"loss": 0.0385, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 45.9, |
|
"learning_rate": 4.5900000000000004e-05, |
|
"loss": 0.0257, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 46.0, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 0.0468, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 46.0, |
|
"eval_accuracy_safe": 0.7821905081321968, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9967090931769053, |
|
"eval_iou_safe": 0.7058715069708381, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9901448585096287, |
|
"eval_loss": 0.03598049655556679, |
|
"eval_mean_accuracy": 0.889449800654551, |
|
"eval_mean_iou": 0.8480081827402335, |
|
"eval_overall_accuracy": 0.9903725581382637, |
|
"eval_runtime": 9.7124, |
|
"eval_samples_per_second": 6.898, |
|
"eval_steps_per_second": 0.515, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 46.1, |
|
"learning_rate": 4.61e-05, |
|
"loss": 0.0246, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 46.2, |
|
"learning_rate": 4.6200000000000005e-05, |
|
"loss": 0.0264, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 46.3, |
|
"learning_rate": 4.630000000000001e-05, |
|
"loss": 0.0254, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 46.4, |
|
"learning_rate": 4.64e-05, |
|
"loss": 0.0266, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 46.5, |
|
"learning_rate": 4.6500000000000005e-05, |
|
"loss": 0.031, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 46.6, |
|
"learning_rate": 4.660000000000001e-05, |
|
"loss": 0.0208, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 46.7, |
|
"learning_rate": 4.6700000000000003e-05, |
|
"loss": 0.0287, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 46.8, |
|
"learning_rate": 4.6800000000000006e-05, |
|
"loss": 0.0243, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 46.9, |
|
"learning_rate": 4.69e-05, |
|
"loss": 0.0249, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 47.0, |
|
"learning_rate": 4.7e-05, |
|
"loss": 0.0228, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 47.1, |
|
"learning_rate": 4.71e-05, |
|
"loss": 0.0221, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 47.2, |
|
"learning_rate": 4.72e-05, |
|
"loss": 0.0322, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 47.3, |
|
"learning_rate": 4.73e-05, |
|
"loss": 0.0254, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 47.4, |
|
"learning_rate": 4.74e-05, |
|
"loss": 0.0279, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 47.5, |
|
"learning_rate": 4.75e-05, |
|
"loss": 0.0205, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 47.6, |
|
"learning_rate": 4.76e-05, |
|
"loss": 0.0246, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 47.7, |
|
"learning_rate": 4.77e-05, |
|
"loss": 0.0231, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 47.8, |
|
"learning_rate": 4.78e-05, |
|
"loss": 0.0225, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 47.9, |
|
"learning_rate": 4.79e-05, |
|
"loss": 0.0208, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 48.0, |
|
"learning_rate": 4.8e-05, |
|
"loss": 0.0233, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 48.0, |
|
"eval_accuracy_safe": 0.7960146645541073, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9963735665314899, |
|
"eval_iou_safe": 0.7112711715528733, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9902254570286859, |
|
"eval_loss": 0.037598393857479095, |
|
"eval_mean_accuracy": 0.8961941155427986, |
|
"eval_mean_iou": 0.8507483142907797, |
|
"eval_overall_accuracy": 0.9904552858267257, |
|
"eval_runtime": 9.9744, |
|
"eval_samples_per_second": 6.717, |
|
"eval_steps_per_second": 0.501, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 48.1, |
|
"learning_rate": 4.8100000000000004e-05, |
|
"loss": 0.0208, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 48.2, |
|
"learning_rate": 4.82e-05, |
|
"loss": 0.0229, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 48.3, |
|
"learning_rate": 4.83e-05, |
|
"loss": 0.0232, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 48.4, |
|
"learning_rate": 4.8400000000000004e-05, |
|
"loss": 0.0253, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 48.5, |
|
"learning_rate": 4.85e-05, |
|
"loss": 0.0245, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 48.6, |
|
"learning_rate": 4.86e-05, |
|
"loss": 0.0199, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 48.7, |
|
"learning_rate": 4.87e-05, |
|
"loss": 0.0224, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 48.8, |
|
"learning_rate": 4.88e-05, |
|
"loss": 0.02, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 48.9, |
|
"learning_rate": 4.89e-05, |
|
"loss": 0.0299, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 49.0, |
|
"learning_rate": 4.9e-05, |
|
"loss": 0.0259, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 49.1, |
|
"learning_rate": 4.91e-05, |
|
"loss": 0.0209, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 49.2, |
|
"learning_rate": 4.92e-05, |
|
"loss": 0.0274, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 49.3, |
|
"learning_rate": 4.93e-05, |
|
"loss": 0.0199, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 49.4, |
|
"learning_rate": 4.94e-05, |
|
"loss": 0.0301, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 49.5, |
|
"learning_rate": 4.9500000000000004e-05, |
|
"loss": 0.022, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 49.6, |
|
"learning_rate": 4.96e-05, |
|
"loss": 0.0211, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 49.7, |
|
"learning_rate": 4.97e-05, |
|
"loss": 0.0185, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 49.8, |
|
"learning_rate": 4.9800000000000004e-05, |
|
"loss": 0.024, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 49.9, |
|
"learning_rate": 4.99e-05, |
|
"loss": 0.0223, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0288, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"eval_accuracy_safe": 0.7725336448201818, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9964236696535715, |
|
"eval_iou_safe": 0.6913067476197047, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.989572364879132, |
|
"eval_loss": 0.038585416972637177, |
|
"eval_mean_accuracy": 0.8844786572368767, |
|
"eval_mean_iou": 0.8404395562494184, |
|
"eval_overall_accuracy": 0.9898103173099347, |
|
"eval_runtime": 9.9942, |
|
"eval_samples_per_second": 6.704, |
|
"eval_steps_per_second": 0.5, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 50.1, |
|
"learning_rate": 5.0100000000000005e-05, |
|
"loss": 0.0228, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 50.2, |
|
"learning_rate": 5.02e-05, |
|
"loss": 0.0247, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 50.3, |
|
"learning_rate": 5.03e-05, |
|
"loss": 0.0207, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 50.4, |
|
"learning_rate": 5.0400000000000005e-05, |
|
"loss": 0.0272, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 50.5, |
|
"learning_rate": 5.05e-05, |
|
"loss": 0.0188, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 50.6, |
|
"learning_rate": 5.0600000000000003e-05, |
|
"loss": 0.0261, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 50.7, |
|
"learning_rate": 5.0700000000000006e-05, |
|
"loss": 0.0231, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 50.8, |
|
"learning_rate": 5.08e-05, |
|
"loss": 0.0242, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 50.9, |
|
"learning_rate": 5.0900000000000004e-05, |
|
"loss": 0.0226, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 51.0, |
|
"learning_rate": 5.1000000000000006e-05, |
|
"loss": 0.024, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 51.1, |
|
"learning_rate": 5.11e-05, |
|
"loss": 0.0216, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 51.2, |
|
"learning_rate": 5.1200000000000004e-05, |
|
"loss": 0.0211, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 51.3, |
|
"learning_rate": 5.130000000000001e-05, |
|
"loss": 0.0192, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 51.4, |
|
"learning_rate": 5.14e-05, |
|
"loss": 0.0275, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 51.5, |
|
"learning_rate": 5.1500000000000005e-05, |
|
"loss": 0.0226, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 51.6, |
|
"learning_rate": 5.16e-05, |
|
"loss": 0.0212, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 51.7, |
|
"learning_rate": 5.17e-05, |
|
"loss": 0.0232, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 51.8, |
|
"learning_rate": 5.1800000000000005e-05, |
|
"loss": 0.022, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 51.9, |
|
"learning_rate": 5.19e-05, |
|
"loss": 0.02, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 52.0, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 0.0266, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 52.0, |
|
"eval_accuracy_safe": 0.7560263838612804, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9976025597415196, |
|
"eval_iou_safe": 0.7008251349912269, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.990249030074391, |
|
"eval_loss": 0.036057572811841965, |
|
"eval_mean_accuracy": 0.8768144718013999, |
|
"eval_mean_iou": 0.845537082532809, |
|
"eval_overall_accuracy": 0.9904667868543027, |
|
"eval_runtime": 9.6421, |
|
"eval_samples_per_second": 6.949, |
|
"eval_steps_per_second": 0.519, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 52.1, |
|
"learning_rate": 5.2100000000000006e-05, |
|
"loss": 0.0196, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 52.2, |
|
"learning_rate": 5.22e-05, |
|
"loss": 0.0221, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 52.3, |
|
"learning_rate": 5.2300000000000004e-05, |
|
"loss": 0.0208, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 52.4, |
|
"learning_rate": 5.2400000000000007e-05, |
|
"loss": 0.0244, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 52.5, |
|
"learning_rate": 5.25e-05, |
|
"loss": 0.0192, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 52.6, |
|
"learning_rate": 5.2600000000000005e-05, |
|
"loss": 0.0231, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 52.7, |
|
"learning_rate": 5.270000000000001e-05, |
|
"loss": 0.021, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 52.8, |
|
"learning_rate": 5.28e-05, |
|
"loss": 0.0211, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 52.9, |
|
"learning_rate": 5.2900000000000005e-05, |
|
"loss": 0.0177, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 53.0, |
|
"learning_rate": 5.300000000000001e-05, |
|
"loss": 0.017, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 53.1, |
|
"learning_rate": 5.31e-05, |
|
"loss": 0.0439, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 53.2, |
|
"learning_rate": 5.3200000000000006e-05, |
|
"loss": 0.0237, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 53.3, |
|
"learning_rate": 5.330000000000001e-05, |
|
"loss": 0.0206, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 53.4, |
|
"learning_rate": 5.3400000000000004e-05, |
|
"loss": 0.0208, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 53.5, |
|
"learning_rate": 5.3500000000000006e-05, |
|
"loss": 0.0194, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 53.6, |
|
"learning_rate": 5.360000000000001e-05, |
|
"loss": 0.0174, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 53.7, |
|
"learning_rate": 5.3700000000000004e-05, |
|
"loss": 0.032, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 53.8, |
|
"learning_rate": 5.380000000000001e-05, |
|
"loss": 0.0246, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 53.9, |
|
"learning_rate": 5.390000000000001e-05, |
|
"loss": 0.0193, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 54.0, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 0.0241, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 54.0, |
|
"eval_accuracy_safe": 0.8121807548930035, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9956612104327607, |
|
"eval_iou_safe": 0.7108506704276296, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9900016258040716, |
|
"eval_loss": 0.03671713173389435, |
|
"eval_mean_accuracy": 0.903920982662882, |
|
"eval_mean_iou": 0.8504261481158506, |
|
"eval_overall_accuracy": 0.9902414919725105, |
|
"eval_runtime": 9.733, |
|
"eval_samples_per_second": 6.884, |
|
"eval_steps_per_second": 0.514, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 54.1, |
|
"learning_rate": 5.410000000000001e-05, |
|
"loss": 0.025, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 54.2, |
|
"learning_rate": 5.420000000000001e-05, |
|
"loss": 0.0208, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 54.3, |
|
"learning_rate": 5.4300000000000005e-05, |
|
"loss": 0.0162, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 54.4, |
|
"learning_rate": 5.440000000000001e-05, |
|
"loss": 0.0185, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 54.5, |
|
"learning_rate": 5.45e-05, |
|
"loss": 0.0182, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 54.6, |
|
"learning_rate": 5.4600000000000006e-05, |
|
"loss": 0.019, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 54.7, |
|
"learning_rate": 5.470000000000001e-05, |
|
"loss": 0.0257, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 54.8, |
|
"learning_rate": 5.4800000000000004e-05, |
|
"loss": 0.0286, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 54.9, |
|
"learning_rate": 5.4900000000000006e-05, |
|
"loss": 0.0246, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 55.0, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 0.0214, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 55.1, |
|
"learning_rate": 5.5100000000000004e-05, |
|
"loss": 0.0245, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 55.2, |
|
"learning_rate": 5.520000000000001e-05, |
|
"loss": 0.0219, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 55.3, |
|
"learning_rate": 5.530000000000001e-05, |
|
"loss": 0.0203, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 55.4, |
|
"learning_rate": 5.5400000000000005e-05, |
|
"loss": 0.0216, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 55.5, |
|
"learning_rate": 5.550000000000001e-05, |
|
"loss": 0.0167, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 55.6, |
|
"learning_rate": 5.560000000000001e-05, |
|
"loss": 0.0229, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 55.7, |
|
"learning_rate": 5.5700000000000005e-05, |
|
"loss": 0.0173, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 55.8, |
|
"learning_rate": 5.580000000000001e-05, |
|
"loss": 0.0197, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 55.9, |
|
"learning_rate": 5.590000000000001e-05, |
|
"loss": 0.0181, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 56.0, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 0.0239, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 56.0, |
|
"eval_accuracy_safe": 0.7650066884861662, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9967202989102981, |
|
"eval_iou_safe": 0.6905937665302486, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9896417850752393, |
|
"eval_loss": 0.041364945471286774, |
|
"eval_mean_accuracy": 0.8808634936982321, |
|
"eval_mean_iou": 0.8401177758027439, |
|
"eval_overall_accuracy": 0.9898758503928113, |
|
"eval_runtime": 9.7968, |
|
"eval_samples_per_second": 6.839, |
|
"eval_steps_per_second": 0.51, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 56.1, |
|
"learning_rate": 5.610000000000001e-05, |
|
"loss": 0.0186, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 56.2, |
|
"learning_rate": 5.620000000000001e-05, |
|
"loss": 0.0207, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 56.3, |
|
"learning_rate": 5.63e-05, |
|
"loss": 0.0186, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 56.4, |
|
"learning_rate": 5.6399999999999995e-05, |
|
"loss": 0.0218, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 56.5, |
|
"learning_rate": 5.65e-05, |
|
"loss": 0.0165, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 56.6, |
|
"learning_rate": 5.66e-05, |
|
"loss": 0.021, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 56.7, |
|
"learning_rate": 5.6699999999999996e-05, |
|
"loss": 0.0188, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 56.8, |
|
"learning_rate": 5.68e-05, |
|
"loss": 0.0171, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 56.9, |
|
"learning_rate": 5.69e-05, |
|
"loss": 0.0193, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 57.0, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 0.0197, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 57.1, |
|
"learning_rate": 5.71e-05, |
|
"loss": 0.0196, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 57.2, |
|
"learning_rate": 5.72e-05, |
|
"loss": 0.0176, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 57.3, |
|
"learning_rate": 5.73e-05, |
|
"loss": 0.0183, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 57.4, |
|
"learning_rate": 5.74e-05, |
|
"loss": 0.0172, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 57.5, |
|
"learning_rate": 5.7499999999999995e-05, |
|
"loss": 0.0222, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 57.6, |
|
"learning_rate": 5.76e-05, |
|
"loss": 0.0191, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 57.7, |
|
"learning_rate": 5.77e-05, |
|
"loss": 0.0159, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 57.8, |
|
"learning_rate": 5.7799999999999995e-05, |
|
"loss": 0.0146, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 57.9, |
|
"learning_rate": 5.79e-05, |
|
"loss": 0.0188, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 58.0, |
|
"learning_rate": 5.8e-05, |
|
"loss": 0.0221, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 58.0, |
|
"eval_accuracy_safe": 0.7976993149602353, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9965618932550051, |
|
"eval_iou_safe": 0.7167390596532064, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9904630957795426, |
|
"eval_loss": 0.03748689964413643, |
|
"eval_mean_accuracy": 0.8971306041076201, |
|
"eval_mean_iou": 0.8536010777163745, |
|
"eval_overall_accuracy": 0.9906878115525887, |
|
"eval_runtime": 9.8334, |
|
"eval_samples_per_second": 6.814, |
|
"eval_steps_per_second": 0.508, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 58.1, |
|
"learning_rate": 5.8099999999999996e-05, |
|
"loss": 0.0206, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 58.2, |
|
"learning_rate": 5.82e-05, |
|
"loss": 0.0216, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 58.3, |
|
"learning_rate": 5.83e-05, |
|
"loss": 0.0168, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 58.4, |
|
"learning_rate": 5.8399999999999997e-05, |
|
"loss": 0.0307, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 58.5, |
|
"learning_rate": 5.85e-05, |
|
"loss": 0.0148, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 58.6, |
|
"learning_rate": 5.86e-05, |
|
"loss": 0.0172, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 58.7, |
|
"learning_rate": 5.87e-05, |
|
"loss": 0.0162, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 58.8, |
|
"learning_rate": 5.88e-05, |
|
"loss": 0.0183, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 58.9, |
|
"learning_rate": 5.89e-05, |
|
"loss": 0.0194, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 59.0, |
|
"learning_rate": 5.9e-05, |
|
"loss": 0.0196, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 59.1, |
|
"learning_rate": 5.91e-05, |
|
"loss": 0.0141, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 59.2, |
|
"learning_rate": 5.92e-05, |
|
"loss": 0.0171, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 59.3, |
|
"learning_rate": 5.93e-05, |
|
"loss": 0.0177, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 59.4, |
|
"learning_rate": 5.94e-05, |
|
"loss": 0.0222, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 59.5, |
|
"learning_rate": 5.95e-05, |
|
"loss": 0.0166, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 59.6, |
|
"learning_rate": 5.96e-05, |
|
"loss": 0.0174, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 59.7, |
|
"learning_rate": 5.97e-05, |
|
"loss": 0.0154, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 59.8, |
|
"learning_rate": 5.9800000000000003e-05, |
|
"loss": 0.0232, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 59.9, |
|
"learning_rate": 5.99e-05, |
|
"loss": 0.0157, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 60.0, |
|
"learning_rate": 6e-05, |
|
"loss": 0.0324, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 60.0, |
|
"eval_accuracy_safe": 0.8068685163125817, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9964494839085082, |
|
"eval_iou_safe": 0.7225799212870262, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9906261535768677, |
|
"eval_loss": 0.03895491361618042, |
|
"eval_mean_accuracy": 0.9016590001105449, |
|
"eval_mean_iou": 0.8566030374319469, |
|
"eval_overall_accuracy": 0.9908495661038071, |
|
"eval_runtime": 10.0022, |
|
"eval_samples_per_second": 6.699, |
|
"eval_steps_per_second": 0.5, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 60.1, |
|
"learning_rate": 6.0100000000000004e-05, |
|
"loss": 0.0155, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 60.2, |
|
"learning_rate": 6.02e-05, |
|
"loss": 0.0155, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 60.3, |
|
"learning_rate": 6.03e-05, |
|
"loss": 0.0176, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 60.4, |
|
"learning_rate": 6.04e-05, |
|
"loss": 0.0234, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 60.5, |
|
"learning_rate": 6.05e-05, |
|
"loss": 0.0178, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 60.6, |
|
"learning_rate": 6.06e-05, |
|
"loss": 0.0157, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 60.7, |
|
"learning_rate": 6.07e-05, |
|
"loss": 0.0164, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 60.8, |
|
"learning_rate": 6.08e-05, |
|
"loss": 0.0192, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 60.9, |
|
"learning_rate": 6.09e-05, |
|
"loss": 0.0182, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 61.0, |
|
"learning_rate": 6.1e-05, |
|
"loss": 0.0202, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 61.1, |
|
"learning_rate": 6.110000000000001e-05, |
|
"loss": 0.0204, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 61.2, |
|
"learning_rate": 6.12e-05, |
|
"loss": 0.0137, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 61.3, |
|
"learning_rate": 6.13e-05, |
|
"loss": 0.017, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 61.4, |
|
"learning_rate": 6.14e-05, |
|
"loss": 0.0159, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 61.5, |
|
"learning_rate": 6.15e-05, |
|
"loss": 0.0151, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 61.6, |
|
"learning_rate": 6.16e-05, |
|
"loss": 0.015, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 61.7, |
|
"learning_rate": 6.170000000000001e-05, |
|
"loss": 0.0172, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 61.8, |
|
"learning_rate": 6.18e-05, |
|
"loss": 0.0191, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 61.9, |
|
"learning_rate": 6.19e-05, |
|
"loss": 0.0179, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 62.0, |
|
"learning_rate": 6.2e-05, |
|
"loss": 0.0264, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 62.0, |
|
"eval_accuracy_safe": 0.8224043083874002, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9952494143977599, |
|
"eval_iou_safe": 0.7113750989954566, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9898984552881916, |
|
"eval_loss": 0.04052896052598953, |
|
"eval_mean_accuracy": 0.9088268613925801, |
|
"eval_mean_iou": 0.8506367771418242, |
|
"eval_overall_accuracy": 0.9901438471096665, |
|
"eval_runtime": 9.9165, |
|
"eval_samples_per_second": 6.756, |
|
"eval_steps_per_second": 0.504, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 62.1, |
|
"learning_rate": 6.21e-05, |
|
"loss": 0.0192, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 62.2, |
|
"learning_rate": 6.220000000000001e-05, |
|
"loss": 0.0147, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 62.3, |
|
"learning_rate": 6.23e-05, |
|
"loss": 0.0165, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 62.4, |
|
"learning_rate": 6.24e-05, |
|
"loss": 0.0152, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 62.5, |
|
"learning_rate": 6.25e-05, |
|
"loss": 0.0182, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 62.6, |
|
"learning_rate": 6.26e-05, |
|
"loss": 0.0183, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 62.7, |
|
"learning_rate": 6.27e-05, |
|
"loss": 0.0166, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 62.8, |
|
"learning_rate": 6.280000000000001e-05, |
|
"loss": 0.0164, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 62.9, |
|
"learning_rate": 6.29e-05, |
|
"loss": 0.0197, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 63.0, |
|
"learning_rate": 6.3e-05, |
|
"loss": 0.0156, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 63.1, |
|
"learning_rate": 6.31e-05, |
|
"loss": 0.0154, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 63.2, |
|
"learning_rate": 6.32e-05, |
|
"loss": 0.0157, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 63.3, |
|
"learning_rate": 6.330000000000001e-05, |
|
"loss": 0.0182, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 63.4, |
|
"learning_rate": 6.340000000000001e-05, |
|
"loss": 0.0158, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 63.5, |
|
"learning_rate": 6.35e-05, |
|
"loss": 0.0164, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 63.6, |
|
"learning_rate": 6.36e-05, |
|
"loss": 0.0165, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 63.7, |
|
"learning_rate": 6.37e-05, |
|
"loss": 0.0174, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 63.8, |
|
"learning_rate": 6.38e-05, |
|
"loss": 0.0187, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 63.9, |
|
"learning_rate": 6.390000000000001e-05, |
|
"loss": 0.0159, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 64.0, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 0.0146, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 64.0, |
|
"eval_accuracy_safe": 0.8358294686604909, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.995804948897749, |
|
"eval_iou_safe": 0.7345851134835153, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9908537165576825, |
|
"eval_loss": 0.03560740128159523, |
|
"eval_mean_accuracy": 0.9158172087791199, |
|
"eval_mean_iou": 0.8627194150205989, |
|
"eval_overall_accuracy": 0.9910795297195663, |
|
"eval_runtime": 9.931, |
|
"eval_samples_per_second": 6.747, |
|
"eval_steps_per_second": 0.503, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 64.1, |
|
"learning_rate": 6.41e-05, |
|
"loss": 0.0193, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 64.2, |
|
"learning_rate": 6.42e-05, |
|
"loss": 0.0134, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 64.3, |
|
"learning_rate": 6.43e-05, |
|
"loss": 0.0151, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 64.4, |
|
"learning_rate": 6.440000000000001e-05, |
|
"loss": 0.0163, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 64.5, |
|
"learning_rate": 6.450000000000001e-05, |
|
"loss": 0.0152, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 64.6, |
|
"learning_rate": 6.460000000000001e-05, |
|
"loss": 0.0151, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 64.7, |
|
"learning_rate": 6.47e-05, |
|
"loss": 0.0175, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 64.8, |
|
"learning_rate": 6.48e-05, |
|
"loss": 0.0145, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 64.9, |
|
"learning_rate": 6.49e-05, |
|
"loss": 0.0169, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 65.0, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 0.0147, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 65.1, |
|
"learning_rate": 6.510000000000001e-05, |
|
"loss": 0.0147, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 65.2, |
|
"learning_rate": 6.52e-05, |
|
"loss": 0.0155, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 65.3, |
|
"learning_rate": 6.53e-05, |
|
"loss": 0.0148, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 65.4, |
|
"learning_rate": 6.54e-05, |
|
"loss": 0.0135, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 65.5, |
|
"learning_rate": 6.55e-05, |
|
"loss": 0.0133, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 65.6, |
|
"learning_rate": 6.560000000000001e-05, |
|
"loss": 0.0136, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 65.7, |
|
"learning_rate": 6.570000000000001e-05, |
|
"loss": 0.0173, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 65.8, |
|
"learning_rate": 6.58e-05, |
|
"loss": 0.0142, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 65.9, |
|
"learning_rate": 6.59e-05, |
|
"loss": 0.0165, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 66.0, |
|
"learning_rate": 6.6e-05, |
|
"loss": 0.0252, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 66.0, |
|
"eval_accuracy_safe": 0.8046113931711906, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9974217426194405, |
|
"eval_iou_safe": 0.7417778472780001, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.991525009544381, |
|
"eval_loss": 0.030962325632572174, |
|
"eval_mean_accuracy": 0.9010165678953155, |
|
"eval_mean_iou": 0.8666514284111906, |
|
"eval_overall_accuracy": 0.9917264340528801, |
|
"eval_runtime": 9.7104, |
|
"eval_samples_per_second": 6.9, |
|
"eval_steps_per_second": 0.515, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 66.1, |
|
"learning_rate": 6.610000000000001e-05, |
|
"loss": 0.0154, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 66.2, |
|
"learning_rate": 6.620000000000001e-05, |
|
"loss": 0.0141, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 66.3, |
|
"learning_rate": 6.630000000000001e-05, |
|
"loss": 0.018, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 66.4, |
|
"learning_rate": 6.64e-05, |
|
"loss": 0.0162, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 66.5, |
|
"learning_rate": 6.65e-05, |
|
"loss": 0.0191, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 66.6, |
|
"learning_rate": 6.66e-05, |
|
"loss": 0.0175, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 66.7, |
|
"learning_rate": 6.670000000000001e-05, |
|
"loss": 0.0178, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 66.8, |
|
"learning_rate": 6.680000000000001e-05, |
|
"loss": 0.0136, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 66.9, |
|
"learning_rate": 6.690000000000001e-05, |
|
"loss": 0.0149, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 67.0, |
|
"learning_rate": 6.7e-05, |
|
"loss": 0.0209, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 67.1, |
|
"learning_rate": 6.71e-05, |
|
"loss": 0.0178, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 67.2, |
|
"learning_rate": 6.720000000000001e-05, |
|
"loss": 0.017, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 67.3, |
|
"learning_rate": 6.730000000000001e-05, |
|
"loss": 0.0201, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 67.4, |
|
"learning_rate": 6.740000000000001e-05, |
|
"loss": 0.0164, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 67.5, |
|
"learning_rate": 6.750000000000001e-05, |
|
"loss": 0.0175, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 67.6, |
|
"learning_rate": 6.76e-05, |
|
"loss": 0.016, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 67.7, |
|
"learning_rate": 6.77e-05, |
|
"loss": 0.0171, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 67.8, |
|
"learning_rate": 6.780000000000001e-05, |
|
"loss": 0.0166, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 67.9, |
|
"learning_rate": 6.790000000000001e-05, |
|
"loss": 0.02, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 68.0, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 0.0155, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 68.0, |
|
"eval_accuracy_safe": 0.815156842109321, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9961157173259295, |
|
"eval_iou_safe": 0.7229034083236326, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.990542767020076, |
|
"eval_loss": 0.03587646782398224, |
|
"eval_mean_accuracy": 0.9056362797176253, |
|
"eval_mean_iou": 0.8567230876718543, |
|
"eval_overall_accuracy": 0.9907704823052705, |
|
"eval_runtime": 9.8125, |
|
"eval_samples_per_second": 6.828, |
|
"eval_steps_per_second": 0.51, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 68.1, |
|
"learning_rate": 6.81e-05, |
|
"loss": 0.0142, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 68.2, |
|
"learning_rate": 6.82e-05, |
|
"loss": 0.0171, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 68.3, |
|
"learning_rate": 6.83e-05, |
|
"loss": 0.0248, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 68.4, |
|
"learning_rate": 6.840000000000001e-05, |
|
"loss": 0.014, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 68.5, |
|
"learning_rate": 6.850000000000001e-05, |
|
"loss": 0.0152, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 68.6, |
|
"learning_rate": 6.860000000000001e-05, |
|
"loss": 0.0184, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 68.7, |
|
"learning_rate": 6.87e-05, |
|
"loss": 0.0148, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 68.8, |
|
"learning_rate": 6.879999999999999e-05, |
|
"loss": 0.0143, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 68.9, |
|
"learning_rate": 6.89e-05, |
|
"loss": 0.0191, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 69.0, |
|
"learning_rate": 6.9e-05, |
|
"loss": 0.014, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 69.1, |
|
"learning_rate": 6.91e-05, |
|
"loss": 0.0146, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 69.2, |
|
"learning_rate": 6.92e-05, |
|
"loss": 0.0141, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 69.3, |
|
"learning_rate": 6.93e-05, |
|
"loss": 0.0153, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 69.4, |
|
"learning_rate": 6.939999999999999e-05, |
|
"loss": 0.0192, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 69.5, |
|
"learning_rate": 6.95e-05, |
|
"loss": 0.0222, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 69.6, |
|
"learning_rate": 6.96e-05, |
|
"loss": 0.0168, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 69.7, |
|
"learning_rate": 6.97e-05, |
|
"loss": 0.0153, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 69.8, |
|
"learning_rate": 6.98e-05, |
|
"loss": 0.0147, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 69.9, |
|
"learning_rate": 6.99e-05, |
|
"loss": 0.0152, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 70.0, |
|
"learning_rate": 7e-05, |
|
"loss": 0.0169, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 70.0, |
|
"eval_accuracy_safe": 0.842248102358896, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9941141738681594, |
|
"eval_iou_safe": 0.7057702402584293, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9893636761526805, |
|
"eval_loss": 0.049034394323825836, |
|
"eval_mean_accuracy": 0.9181811381135276, |
|
"eval_mean_iou": 0.8475669582055549, |
|
"eval_overall_accuracy": 0.9896282936210063, |
|
"eval_runtime": 9.7248, |
|
"eval_samples_per_second": 6.89, |
|
"eval_steps_per_second": 0.514, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 70.1, |
|
"learning_rate": 7.01e-05, |
|
"loss": 0.023, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 70.2, |
|
"learning_rate": 7.02e-05, |
|
"loss": 0.014, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 70.3, |
|
"learning_rate": 7.03e-05, |
|
"loss": 0.0134, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 70.4, |
|
"learning_rate": 7.04e-05, |
|
"loss": 0.0188, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 70.5, |
|
"learning_rate": 7.05e-05, |
|
"loss": 0.0215, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 70.6, |
|
"learning_rate": 7.06e-05, |
|
"loss": 0.0178, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 70.7, |
|
"learning_rate": 7.07e-05, |
|
"loss": 0.014, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 70.8, |
|
"learning_rate": 7.08e-05, |
|
"loss": 0.015, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 70.9, |
|
"learning_rate": 7.09e-05, |
|
"loss": 0.016, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 71.0, |
|
"learning_rate": 7.1e-05, |
|
"loss": 0.0149, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 71.1, |
|
"learning_rate": 7.11e-05, |
|
"loss": 0.0155, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 71.2, |
|
"learning_rate": 7.12e-05, |
|
"loss": 0.0194, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 71.3, |
|
"learning_rate": 7.13e-05, |
|
"loss": 0.016, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 71.4, |
|
"learning_rate": 7.14e-05, |
|
"loss": 0.0154, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 71.5, |
|
"learning_rate": 7.15e-05, |
|
"loss": 0.016, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 71.6, |
|
"learning_rate": 7.16e-05, |
|
"loss": 0.0125, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 71.7, |
|
"learning_rate": 7.17e-05, |
|
"loss": 0.0154, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 71.8, |
|
"learning_rate": 7.18e-05, |
|
"loss": 0.0205, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 71.9, |
|
"learning_rate": 7.19e-05, |
|
"loss": 0.0255, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 72.0, |
|
"learning_rate": 7.2e-05, |
|
"loss": 0.0142, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 72.0, |
|
"eval_accuracy_safe": 0.7568108835355299, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9974447407738386, |
|
"eval_iou_safe": 0.6981965160858957, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9901158423210092, |
|
"eval_loss": 0.03573448210954666, |
|
"eval_mean_accuracy": 0.8771278121546843, |
|
"eval_mean_iou": 0.8441561792034524, |
|
"eval_overall_accuracy": 0.9903368024683711, |
|
"eval_runtime": 9.7254, |
|
"eval_samples_per_second": 6.889, |
|
"eval_steps_per_second": 0.514, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 72.1, |
|
"learning_rate": 7.21e-05, |
|
"loss": 0.0148, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 72.2, |
|
"learning_rate": 7.22e-05, |
|
"loss": 0.0139, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 72.3, |
|
"learning_rate": 7.23e-05, |
|
"loss": 0.0145, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 72.4, |
|
"learning_rate": 7.24e-05, |
|
"loss": 0.0136, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 72.5, |
|
"learning_rate": 7.25e-05, |
|
"loss": 0.0342, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 72.6, |
|
"learning_rate": 7.26e-05, |
|
"loss": 0.0143, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 72.7, |
|
"learning_rate": 7.27e-05, |
|
"loss": 0.0145, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 72.8, |
|
"learning_rate": 7.280000000000001e-05, |
|
"loss": 0.0186, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 72.9, |
|
"learning_rate": 7.29e-05, |
|
"loss": 0.0174, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 73.0, |
|
"learning_rate": 7.3e-05, |
|
"loss": 0.0142, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 73.1, |
|
"learning_rate": 7.31e-05, |
|
"loss": 0.0189, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 73.2, |
|
"learning_rate": 7.32e-05, |
|
"loss": 0.0144, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 73.3, |
|
"learning_rate": 7.33e-05, |
|
"loss": 0.0129, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 73.4, |
|
"learning_rate": 7.340000000000001e-05, |
|
"loss": 0.0169, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 73.5, |
|
"learning_rate": 7.35e-05, |
|
"loss": 0.0112, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 73.6, |
|
"learning_rate": 7.36e-05, |
|
"loss": 0.0154, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 73.7, |
|
"learning_rate": 7.37e-05, |
|
"loss": 0.0141, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 73.8, |
|
"learning_rate": 7.38e-05, |
|
"loss": 0.0118, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 73.9, |
|
"learning_rate": 7.390000000000001e-05, |
|
"loss": 0.0139, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 74.0, |
|
"learning_rate": 7.4e-05, |
|
"loss": 0.0244, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 74.0, |
|
"eval_accuracy_safe": 0.8183468837822522, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9955801302047551, |
|
"eval_iou_safe": 0.7145814608727266, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9901057745896398, |
|
"eval_loss": 0.03999461978673935, |
|
"eval_mean_accuracy": 0.9069635069935036, |
|
"eval_mean_iou": 0.8523436177311832, |
|
"eval_overall_accuracy": 0.990344944284923, |
|
"eval_runtime": 9.8243, |
|
"eval_samples_per_second": 6.82, |
|
"eval_steps_per_second": 0.509, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 74.1, |
|
"learning_rate": 7.41e-05, |
|
"loss": 0.0117, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 74.2, |
|
"learning_rate": 7.42e-05, |
|
"loss": 0.013, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 74.3, |
|
"learning_rate": 7.43e-05, |
|
"loss": 0.0196, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 74.4, |
|
"learning_rate": 7.44e-05, |
|
"loss": 0.0149, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 74.5, |
|
"learning_rate": 7.450000000000001e-05, |
|
"loss": 0.0112, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 74.6, |
|
"learning_rate": 7.46e-05, |
|
"loss": 0.0149, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 74.7, |
|
"learning_rate": 7.47e-05, |
|
"loss": 0.0132, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 74.8, |
|
"learning_rate": 7.48e-05, |
|
"loss": 0.0143, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 74.9, |
|
"learning_rate": 7.49e-05, |
|
"loss": 0.0193, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 75.0, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.0151, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 75.1, |
|
"learning_rate": 7.510000000000001e-05, |
|
"loss": 0.0157, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 75.2, |
|
"learning_rate": 7.52e-05, |
|
"loss": 0.0145, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 75.3, |
|
"learning_rate": 7.53e-05, |
|
"loss": 0.0126, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 75.4, |
|
"learning_rate": 7.54e-05, |
|
"loss": 0.0131, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 75.5, |
|
"learning_rate": 7.55e-05, |
|
"loss": 0.0133, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 75.6, |
|
"learning_rate": 7.560000000000001e-05, |
|
"loss": 0.0114, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 75.7, |
|
"learning_rate": 7.570000000000001e-05, |
|
"loss": 0.0192, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 75.8, |
|
"learning_rate": 7.58e-05, |
|
"loss": 0.0175, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 75.9, |
|
"learning_rate": 7.59e-05, |
|
"loss": 0.0102, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 76.0, |
|
"learning_rate": 7.6e-05, |
|
"loss": 0.016, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 76.0, |
|
"eval_accuracy_safe": 0.810463336687214, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9969938126751042, |
|
"eval_iou_safe": 0.7376123162854666, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9912751299671508, |
|
"eval_loss": 0.03021140582859516, |
|
"eval_mean_accuracy": 0.9037285746811592, |
|
"eval_mean_iou": 0.8644437231263087, |
|
"eval_overall_accuracy": 0.9914840015012827, |
|
"eval_runtime": 9.5474, |
|
"eval_samples_per_second": 7.018, |
|
"eval_steps_per_second": 0.524, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 76.1, |
|
"learning_rate": 7.61e-05, |
|
"loss": 0.0124, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 76.2, |
|
"learning_rate": 7.620000000000001e-05, |
|
"loss": 0.0115, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 76.3, |
|
"learning_rate": 7.630000000000001e-05, |
|
"loss": 0.0122, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 76.4, |
|
"learning_rate": 7.64e-05, |
|
"loss": 0.0138, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 76.5, |
|
"learning_rate": 7.65e-05, |
|
"loss": 0.012, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 76.6, |
|
"learning_rate": 7.66e-05, |
|
"loss": 0.0154, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 76.7, |
|
"learning_rate": 7.670000000000001e-05, |
|
"loss": 0.0122, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 76.8, |
|
"learning_rate": 7.680000000000001e-05, |
|
"loss": 0.0146, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 76.9, |
|
"learning_rate": 7.69e-05, |
|
"loss": 0.0129, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 77.0, |
|
"learning_rate": 7.7e-05, |
|
"loss": 0.013, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 77.1, |
|
"learning_rate": 7.71e-05, |
|
"loss": 0.0128, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 77.2, |
|
"learning_rate": 7.72e-05, |
|
"loss": 0.0121, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 77.3, |
|
"learning_rate": 7.730000000000001e-05, |
|
"loss": 0.0119, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 77.4, |
|
"learning_rate": 7.740000000000001e-05, |
|
"loss": 0.0121, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 77.5, |
|
"learning_rate": 7.75e-05, |
|
"loss": 0.0102, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 77.6, |
|
"learning_rate": 7.76e-05, |
|
"loss": 0.0128, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 77.7, |
|
"learning_rate": 7.77e-05, |
|
"loss": 0.0161, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 77.8, |
|
"learning_rate": 7.780000000000001e-05, |
|
"loss": 0.0151, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 77.9, |
|
"learning_rate": 7.790000000000001e-05, |
|
"loss": 0.0113, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 78.0, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 0.0137, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 78.0, |
|
"eval_accuracy_safe": 0.8270785386332358, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9964887333097642, |
|
"eval_iou_safe": 0.7415350530803542, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9912713826642041, |
|
"eval_loss": 0.032535370439291, |
|
"eval_mean_accuracy": 0.9117836359714999, |
|
"eval_mean_iou": 0.8664032178722791, |
|
"eval_overall_accuracy": 0.9914846277948636, |
|
"eval_runtime": 9.7653, |
|
"eval_samples_per_second": 6.861, |
|
"eval_steps_per_second": 0.512, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 78.1, |
|
"learning_rate": 7.81e-05, |
|
"loss": 0.0108, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 78.2, |
|
"learning_rate": 7.82e-05, |
|
"loss": 0.0107, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 78.3, |
|
"learning_rate": 7.83e-05, |
|
"loss": 0.0143, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 78.4, |
|
"learning_rate": 7.840000000000001e-05, |
|
"loss": 0.0123, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 78.5, |
|
"learning_rate": 7.850000000000001e-05, |
|
"loss": 0.0115, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 78.6, |
|
"learning_rate": 7.860000000000001e-05, |
|
"loss": 0.0135, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 78.7, |
|
"learning_rate": 7.87e-05, |
|
"loss": 0.0122, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 78.8, |
|
"learning_rate": 7.88e-05, |
|
"loss": 0.0108, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 78.9, |
|
"learning_rate": 7.890000000000001e-05, |
|
"loss": 0.0118, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 79.0, |
|
"learning_rate": 7.900000000000001e-05, |
|
"loss": 0.0161, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 79.1, |
|
"learning_rate": 7.910000000000001e-05, |
|
"loss": 0.0096, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 79.2, |
|
"learning_rate": 7.920000000000001e-05, |
|
"loss": 0.0129, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 79.3, |
|
"learning_rate": 7.93e-05, |
|
"loss": 0.0111, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 79.4, |
|
"learning_rate": 7.94e-05, |
|
"loss": 0.0115, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 79.5, |
|
"learning_rate": 7.950000000000001e-05, |
|
"loss": 0.0101, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 79.6, |
|
"learning_rate": 7.960000000000001e-05, |
|
"loss": 0.0133, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 79.7, |
|
"learning_rate": 7.970000000000001e-05, |
|
"loss": 0.0138, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 79.8, |
|
"learning_rate": 7.98e-05, |
|
"loss": 0.0115, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 79.9, |
|
"learning_rate": 7.99e-05, |
|
"loss": 0.01, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 80.0, |
|
"learning_rate": 8e-05, |
|
"loss": 0.0115, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 80.0, |
|
"eval_accuracy_safe": 0.8362014795625305, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9962431458752986, |
|
"eval_iou_safe": 0.744329889831977, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.991300903541046, |
|
"eval_loss": 0.03468828648328781, |
|
"eval_mean_accuracy": 0.9162223127189146, |
|
"eval_mean_iou": 0.8678153966865115, |
|
"eval_overall_accuracy": 0.9915157716665695, |
|
"eval_runtime": 9.6306, |
|
"eval_samples_per_second": 6.957, |
|
"eval_steps_per_second": 0.519, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 80.1, |
|
"learning_rate": 8.010000000000001e-05, |
|
"loss": 0.0116, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 80.2, |
|
"learning_rate": 8.020000000000001e-05, |
|
"loss": 0.0116, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 80.3, |
|
"learning_rate": 8.030000000000001e-05, |
|
"loss": 0.0123, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 80.4, |
|
"learning_rate": 8.04e-05, |
|
"loss": 0.0125, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 80.5, |
|
"learning_rate": 8.05e-05, |
|
"loss": 0.0104, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 80.6, |
|
"learning_rate": 8.060000000000001e-05, |
|
"loss": 0.0122, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 80.7, |
|
"learning_rate": 8.070000000000001e-05, |
|
"loss": 0.0113, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 80.8, |
|
"learning_rate": 8.080000000000001e-05, |
|
"loss": 0.0099, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 80.9, |
|
"learning_rate": 8.090000000000001e-05, |
|
"loss": 0.0112, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 81.0, |
|
"learning_rate": 8.1e-05, |
|
"loss": 0.009, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 81.1, |
|
"learning_rate": 8.11e-05, |
|
"loss": 0.0098, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 81.2, |
|
"learning_rate": 8.120000000000001e-05, |
|
"loss": 0.0093, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 81.3, |
|
"learning_rate": 8.13e-05, |
|
"loss": 0.0125, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 81.4, |
|
"learning_rate": 8.14e-05, |
|
"loss": 0.0121, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 81.5, |
|
"learning_rate": 8.15e-05, |
|
"loss": 0.0103, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 81.6, |
|
"learning_rate": 8.16e-05, |
|
"loss": 0.0122, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 81.7, |
|
"learning_rate": 8.17e-05, |
|
"loss": 0.0109, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 81.8, |
|
"learning_rate": 8.18e-05, |
|
"loss": 0.0108, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 81.9, |
|
"learning_rate": 8.19e-05, |
|
"loss": 0.0119, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 82.0, |
|
"learning_rate": 8.2e-05, |
|
"loss": 0.0117, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 82.0, |
|
"eval_accuracy_safe": 0.8196653058392219, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9970733088465569, |
|
"eval_iou_safe": 0.7477645875499176, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9916303175021431, |
|
"eval_loss": 0.032025035470724106, |
|
"eval_mean_accuracy": 0.9083693073428893, |
|
"eval_mean_iou": 0.8696974525260304, |
|
"eval_overall_accuracy": 0.9918329608974172, |
|
"eval_runtime": 9.5737, |
|
"eval_samples_per_second": 6.998, |
|
"eval_steps_per_second": 0.522, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 82.1, |
|
"learning_rate": 8.21e-05, |
|
"loss": 0.0095, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 82.2, |
|
"learning_rate": 8.22e-05, |
|
"loss": 0.0106, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 82.3, |
|
"learning_rate": 8.23e-05, |
|
"loss": 0.0105, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 82.4, |
|
"learning_rate": 8.24e-05, |
|
"loss": 0.0097, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 82.5, |
|
"learning_rate": 8.25e-05, |
|
"loss": 0.0104, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 82.6, |
|
"learning_rate": 8.26e-05, |
|
"loss": 0.0105, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 82.7, |
|
"learning_rate": 8.27e-05, |
|
"loss": 0.0144, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 82.8, |
|
"learning_rate": 8.28e-05, |
|
"loss": 0.0102, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 82.9, |
|
"learning_rate": 8.29e-05, |
|
"loss": 0.0122, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 83.0, |
|
"learning_rate": 8.3e-05, |
|
"loss": 0.0116, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 83.1, |
|
"learning_rate": 8.31e-05, |
|
"loss": 0.01, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 83.2, |
|
"learning_rate": 8.32e-05, |
|
"loss": 0.0108, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 83.3, |
|
"learning_rate": 8.33e-05, |
|
"loss": 0.0115, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 83.4, |
|
"learning_rate": 8.34e-05, |
|
"loss": 0.0132, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 83.5, |
|
"learning_rate": 8.35e-05, |
|
"loss": 0.0135, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 83.6, |
|
"learning_rate": 8.36e-05, |
|
"loss": 0.0103, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 83.7, |
|
"learning_rate": 8.37e-05, |
|
"loss": 0.0122, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 83.8, |
|
"learning_rate": 8.38e-05, |
|
"loss": 0.0098, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 83.9, |
|
"learning_rate": 8.39e-05, |
|
"loss": 0.0109, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 84.0, |
|
"learning_rate": 8.4e-05, |
|
"loss": 0.0108, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 84.0, |
|
"eval_accuracy_safe": 0.8422924352643205, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9961086770745832, |
|
"eval_iou_safe": 0.746814804940449, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9913499796954048, |
|
"eval_loss": 0.03481964394450188, |
|
"eval_mean_accuracy": 0.9192005561694518, |
|
"eval_mean_iou": 0.8690823923179269, |
|
"eval_overall_accuracy": 0.9915651919236824, |
|
"eval_runtime": 9.5854, |
|
"eval_samples_per_second": 6.99, |
|
"eval_steps_per_second": 0.522, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 84.1, |
|
"learning_rate": 8.41e-05, |
|
"loss": 0.0102, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 84.2, |
|
"learning_rate": 8.42e-05, |
|
"loss": 0.01, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 84.3, |
|
"learning_rate": 8.43e-05, |
|
"loss": 0.0117, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 84.4, |
|
"learning_rate": 8.44e-05, |
|
"loss": 0.0118, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 84.5, |
|
"learning_rate": 8.450000000000001e-05, |
|
"loss": 0.01, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 84.6, |
|
"learning_rate": 8.46e-05, |
|
"loss": 0.0113, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 84.7, |
|
"learning_rate": 8.47e-05, |
|
"loss": 0.0094, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 84.8, |
|
"learning_rate": 8.48e-05, |
|
"loss": 0.0129, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 84.9, |
|
"learning_rate": 8.49e-05, |
|
"loss": 0.0117, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 85.0, |
|
"learning_rate": 8.5e-05, |
|
"loss": 0.0089, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 85.1, |
|
"learning_rate": 8.510000000000001e-05, |
|
"loss": 0.0113, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 85.2, |
|
"learning_rate": 8.52e-05, |
|
"loss": 0.0105, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 85.3, |
|
"learning_rate": 8.53e-05, |
|
"loss": 0.0122, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 85.4, |
|
"learning_rate": 8.54e-05, |
|
"loss": 0.0092, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 85.5, |
|
"learning_rate": 8.55e-05, |
|
"loss": 0.012, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 85.6, |
|
"learning_rate": 8.560000000000001e-05, |
|
"loss": 0.0134, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 85.7, |
|
"learning_rate": 8.57e-05, |
|
"loss": 0.0091, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 85.8, |
|
"learning_rate": 8.58e-05, |
|
"loss": 0.0096, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 85.9, |
|
"learning_rate": 8.59e-05, |
|
"loss": 0.01, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 86.0, |
|
"learning_rate": 8.6e-05, |
|
"loss": 0.0101, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 86.0, |
|
"eval_accuracy_safe": 0.8191641512561633, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9969775614282465, |
|
"eval_iou_safe": 0.7451689317732707, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9915200508044267, |
|
"eval_loss": 0.03419874235987663, |
|
"eval_mean_accuracy": 0.9080708563422049, |
|
"eval_mean_iou": 0.8683444912888487, |
|
"eval_overall_accuracy": 0.9917252384014984, |
|
"eval_runtime": 9.713, |
|
"eval_samples_per_second": 6.898, |
|
"eval_steps_per_second": 0.515, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 86.1, |
|
"learning_rate": 8.61e-05, |
|
"loss": 0.0091, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 86.2, |
|
"learning_rate": 8.620000000000001e-05, |
|
"loss": 0.0108, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 86.3, |
|
"learning_rate": 8.63e-05, |
|
"loss": 0.0092, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 86.4, |
|
"learning_rate": 8.64e-05, |
|
"loss": 0.0117, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 86.5, |
|
"learning_rate": 8.65e-05, |
|
"loss": 0.0084, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 86.6, |
|
"learning_rate": 8.66e-05, |
|
"loss": 0.0109, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 86.7, |
|
"learning_rate": 8.67e-05, |
|
"loss": 0.011, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 86.8, |
|
"learning_rate": 8.680000000000001e-05, |
|
"loss": 0.0106, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 86.9, |
|
"learning_rate": 8.69e-05, |
|
"loss": 0.0109, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 87.0, |
|
"learning_rate": 8.7e-05, |
|
"loss": 0.0091, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 87.1, |
|
"learning_rate": 8.71e-05, |
|
"loss": 0.009, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 87.2, |
|
"learning_rate": 8.72e-05, |
|
"loss": 0.0098, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 87.3, |
|
"learning_rate": 8.730000000000001e-05, |
|
"loss": 0.0098, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 87.4, |
|
"learning_rate": 8.740000000000001e-05, |
|
"loss": 0.0095, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 87.5, |
|
"learning_rate": 8.75e-05, |
|
"loss": 0.0143, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 87.6, |
|
"learning_rate": 8.76e-05, |
|
"loss": 0.0113, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 87.7, |
|
"learning_rate": 8.77e-05, |
|
"loss": 0.0112, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 87.8, |
|
"learning_rate": 8.78e-05, |
|
"loss": 0.0092, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 87.9, |
|
"learning_rate": 8.790000000000001e-05, |
|
"loss": 0.0087, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 88.0, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 0.0085, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 88.0, |
|
"eval_accuracy_safe": 0.8473849368352474, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9954396185216341, |
|
"eval_iou_safe": 0.7369667730033376, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9908369662899892, |
|
"eval_loss": 0.04408281296491623, |
|
"eval_mean_accuracy": 0.9214122776784408, |
|
"eval_mean_iou": 0.8639018696466634, |
|
"eval_overall_accuracy": 0.9910663206185868, |
|
"eval_runtime": 9.6602, |
|
"eval_samples_per_second": 6.936, |
|
"eval_steps_per_second": 0.518, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 88.1, |
|
"learning_rate": 8.81e-05, |
|
"loss": 0.009, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 88.2, |
|
"learning_rate": 8.82e-05, |
|
"loss": 0.0119, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 88.3, |
|
"learning_rate": 8.83e-05, |
|
"loss": 0.0112, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 88.4, |
|
"learning_rate": 8.840000000000001e-05, |
|
"loss": 0.0092, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 88.5, |
|
"learning_rate": 8.850000000000001e-05, |
|
"loss": 0.009, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 88.6, |
|
"learning_rate": 8.86e-05, |
|
"loss": 0.0093, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 88.7, |
|
"learning_rate": 8.87e-05, |
|
"loss": 0.0097, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 88.8, |
|
"learning_rate": 8.88e-05, |
|
"loss": 0.0131, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 88.9, |
|
"learning_rate": 8.89e-05, |
|
"loss": 0.0094, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 89.0, |
|
"learning_rate": 8.900000000000001e-05, |
|
"loss": 0.0089, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 89.1, |
|
"learning_rate": 8.910000000000001e-05, |
|
"loss": 0.0103, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 89.2, |
|
"learning_rate": 8.92e-05, |
|
"loss": 0.0091, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 89.3, |
|
"learning_rate": 8.93e-05, |
|
"loss": 0.0099, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 89.4, |
|
"learning_rate": 8.94e-05, |
|
"loss": 0.0087, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 89.5, |
|
"learning_rate": 8.950000000000001e-05, |
|
"loss": 0.0093, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 89.6, |
|
"learning_rate": 8.960000000000001e-05, |
|
"loss": 0.0083, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 89.7, |
|
"learning_rate": 8.970000000000001e-05, |
|
"loss": 0.0099, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 89.8, |
|
"learning_rate": 8.98e-05, |
|
"loss": 0.0102, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 89.9, |
|
"learning_rate": 8.99e-05, |
|
"loss": 0.0109, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 90.0, |
|
"learning_rate": 9e-05, |
|
"loss": 0.009, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 90.0, |
|
"eval_accuracy_safe": 0.8208565117327998, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9963495123393898, |
|
"eval_iou_safe": 0.7329507903287988, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9909462050698964, |
|
"eval_loss": 0.04276752471923828, |
|
"eval_mean_accuracy": 0.9086030120360948, |
|
"eval_mean_iou": 0.8619484976993477, |
|
"eval_overall_accuracy": 0.9911657304906133, |
|
"eval_runtime": 9.7841, |
|
"eval_samples_per_second": 6.848, |
|
"eval_steps_per_second": 0.511, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 90.1, |
|
"learning_rate": 9.010000000000001e-05, |
|
"loss": 0.009, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 90.2, |
|
"learning_rate": 9.020000000000001e-05, |
|
"loss": 0.009, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 90.3, |
|
"learning_rate": 9.030000000000001e-05, |
|
"loss": 0.0097, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 90.4, |
|
"learning_rate": 9.04e-05, |
|
"loss": 0.0112, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 90.5, |
|
"learning_rate": 9.05e-05, |
|
"loss": 0.009, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 90.6, |
|
"learning_rate": 9.06e-05, |
|
"loss": 0.0084, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 90.7, |
|
"learning_rate": 9.070000000000001e-05, |
|
"loss": 0.0098, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 90.8, |
|
"learning_rate": 9.080000000000001e-05, |
|
"loss": 0.0095, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 90.9, |
|
"learning_rate": 9.090000000000001e-05, |
|
"loss": 0.0095, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 91.0, |
|
"learning_rate": 9.1e-05, |
|
"loss": 0.01, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 91.1, |
|
"learning_rate": 9.11e-05, |
|
"loss": 0.0104, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 91.2, |
|
"learning_rate": 9.120000000000001e-05, |
|
"loss": 0.01, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 91.3, |
|
"learning_rate": 9.130000000000001e-05, |
|
"loss": 0.0079, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 91.4, |
|
"learning_rate": 9.140000000000001e-05, |
|
"loss": 0.011, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 91.5, |
|
"learning_rate": 9.15e-05, |
|
"loss": 0.0095, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 91.6, |
|
"learning_rate": 9.16e-05, |
|
"loss": 0.0118, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 91.7, |
|
"learning_rate": 9.17e-05, |
|
"loss": 0.01, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 91.8, |
|
"learning_rate": 9.180000000000001e-05, |
|
"loss": 0.0083, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 91.9, |
|
"learning_rate": 9.190000000000001e-05, |
|
"loss": 0.009, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 92.0, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 0.009, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 92.0, |
|
"eval_accuracy_safe": 0.821469462338233, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9963327917424423, |
|
"eval_iou_safe": 0.7331384857279003, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9909479626745328, |
|
"eval_loss": 0.044428784400224686, |
|
"eval_mean_accuracy": 0.9089011270403377, |
|
"eval_mean_iou": 0.8620432242012166, |
|
"eval_overall_accuracy": 0.9911676093713561, |
|
"eval_runtime": 9.7009, |
|
"eval_samples_per_second": 6.907, |
|
"eval_steps_per_second": 0.515, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 92.1, |
|
"learning_rate": 9.21e-05, |
|
"loss": 0.0081, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 92.2, |
|
"learning_rate": 9.22e-05, |
|
"loss": 0.0088, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 92.3, |
|
"learning_rate": 9.230000000000001e-05, |
|
"loss": 0.0091, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 92.4, |
|
"learning_rate": 9.240000000000001e-05, |
|
"loss": 0.0104, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 92.5, |
|
"learning_rate": 9.250000000000001e-05, |
|
"loss": 0.0096, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 92.6, |
|
"learning_rate": 9.260000000000001e-05, |
|
"loss": 0.0095, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 92.7, |
|
"learning_rate": 9.27e-05, |
|
"loss": 0.0088, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 92.8, |
|
"learning_rate": 9.28e-05, |
|
"loss": 0.0127, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 92.9, |
|
"learning_rate": 9.290000000000001e-05, |
|
"loss": 0.0105, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 93.0, |
|
"learning_rate": 9.300000000000001e-05, |
|
"loss": 0.0103, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 93.1, |
|
"learning_rate": 9.310000000000001e-05, |
|
"loss": 0.0093, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 93.2, |
|
"learning_rate": 9.320000000000002e-05, |
|
"loss": 0.0096, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 93.3, |
|
"learning_rate": 9.33e-05, |
|
"loss": 0.0098, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 93.4, |
|
"learning_rate": 9.340000000000001e-05, |
|
"loss": 0.0074, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 93.5, |
|
"learning_rate": 9.350000000000001e-05, |
|
"loss": 0.0105, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 93.6, |
|
"learning_rate": 9.360000000000001e-05, |
|
"loss": 0.0122, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 93.7, |
|
"learning_rate": 9.370000000000001e-05, |
|
"loss": 0.0096, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 93.8, |
|
"learning_rate": 9.38e-05, |
|
"loss": 0.0105, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 93.9, |
|
"learning_rate": 9.39e-05, |
|
"loss": 0.0096, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 94.0, |
|
"learning_rate": 9.4e-05, |
|
"loss": 0.0089, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 94.0, |
|
"eval_accuracy_safe": 0.8334335642499451, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9960640301472949, |
|
"eval_iou_safe": 0.7380003072249057, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9910395971434911, |
|
"eval_loss": 0.04098622128367424, |
|
"eval_mean_accuracy": 0.91474879719862, |
|
"eval_mean_iou": 0.8645199521841984, |
|
"eval_overall_accuracy": 0.9912601869497726, |
|
"eval_runtime": 9.7789, |
|
"eval_samples_per_second": 6.852, |
|
"eval_steps_per_second": 0.511, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 94.1, |
|
"learning_rate": 9.41e-05, |
|
"loss": 0.0085, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 94.2, |
|
"learning_rate": 9.42e-05, |
|
"loss": 0.0097, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 94.3, |
|
"learning_rate": 9.43e-05, |
|
"loss": 0.0105, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 94.4, |
|
"learning_rate": 9.44e-05, |
|
"loss": 0.0097, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 94.5, |
|
"learning_rate": 9.449999999999999e-05, |
|
"loss": 0.0083, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 94.6, |
|
"learning_rate": 9.46e-05, |
|
"loss": 0.0081, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 94.7, |
|
"learning_rate": 9.47e-05, |
|
"loss": 0.0101, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 94.8, |
|
"learning_rate": 9.48e-05, |
|
"loss": 0.0088, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 94.9, |
|
"learning_rate": 9.49e-05, |
|
"loss": 0.0086, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 95.0, |
|
"learning_rate": 9.5e-05, |
|
"loss": 0.0102, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 95.1, |
|
"learning_rate": 9.51e-05, |
|
"loss": 0.0075, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 95.2, |
|
"learning_rate": 9.52e-05, |
|
"loss": 0.01, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 95.3, |
|
"learning_rate": 9.53e-05, |
|
"loss": 0.012, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 95.4, |
|
"learning_rate": 9.54e-05, |
|
"loss": 0.0088, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 95.5, |
|
"learning_rate": 9.55e-05, |
|
"loss": 0.0094, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 95.6, |
|
"learning_rate": 9.56e-05, |
|
"loss": 0.0089, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 95.7, |
|
"learning_rate": 9.57e-05, |
|
"loss": 0.0091, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 95.8, |
|
"learning_rate": 9.58e-05, |
|
"loss": 0.0089, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 95.9, |
|
"learning_rate": 9.59e-05, |
|
"loss": 0.0099, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 96.0, |
|
"learning_rate": 9.6e-05, |
|
"loss": 0.0091, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 96.0, |
|
"eval_accuracy_safe": 0.8349370279991211, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9961565507837383, |
|
"eval_iou_safe": 0.74132699426506, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9911767801499561, |
|
"eval_loss": 0.04181906208395958, |
|
"eval_mean_accuracy": 0.9155467893914297, |
|
"eval_mean_iou": 0.866251887207508, |
|
"eval_overall_accuracy": 0.9913943845834305, |
|
"eval_runtime": 9.8088, |
|
"eval_samples_per_second": 6.831, |
|
"eval_steps_per_second": 0.51, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 96.1, |
|
"learning_rate": 9.61e-05, |
|
"loss": 0.0079, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 96.2, |
|
"learning_rate": 9.620000000000001e-05, |
|
"loss": 0.0089, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 96.3, |
|
"learning_rate": 9.63e-05, |
|
"loss": 0.0091, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 96.4, |
|
"learning_rate": 9.64e-05, |
|
"loss": 0.0076, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 96.5, |
|
"learning_rate": 9.65e-05, |
|
"loss": 0.0102, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 96.6, |
|
"learning_rate": 9.66e-05, |
|
"loss": 0.0089, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 96.7, |
|
"learning_rate": 9.67e-05, |
|
"loss": 0.01, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 96.8, |
|
"learning_rate": 9.680000000000001e-05, |
|
"loss": 0.0104, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 96.9, |
|
"learning_rate": 9.69e-05, |
|
"loss": 0.0108, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 97.0, |
|
"learning_rate": 9.7e-05, |
|
"loss": 0.008, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 97.1, |
|
"learning_rate": 9.71e-05, |
|
"loss": 0.008, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 97.2, |
|
"learning_rate": 9.72e-05, |
|
"loss": 0.0096, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 97.3, |
|
"learning_rate": 9.730000000000001e-05, |
|
"loss": 0.0085, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 97.4, |
|
"learning_rate": 9.74e-05, |
|
"loss": 0.0108, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 97.5, |
|
"learning_rate": 9.75e-05, |
|
"loss": 0.0102, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 97.6, |
|
"learning_rate": 9.76e-05, |
|
"loss": 0.0093, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 97.7, |
|
"learning_rate": 9.77e-05, |
|
"loss": 0.0107, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 97.8, |
|
"learning_rate": 9.78e-05, |
|
"loss": 0.0079, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 97.9, |
|
"learning_rate": 9.790000000000001e-05, |
|
"loss": 0.0092, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 98.0, |
|
"learning_rate": 9.8e-05, |
|
"loss": 0.0079, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 98.0, |
|
"eval_accuracy_safe": 0.8204594431016071, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9964533560467487, |
|
"eval_iou_safe": 0.7348348047661303, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9910375731299209, |
|
"eval_loss": 0.03976387903094292, |
|
"eval_mean_accuracy": 0.9084563995741779, |
|
"eval_mean_iou": 0.8629361889480256, |
|
"eval_overall_accuracy": 0.9912547780506646, |
|
"eval_runtime": 9.9093, |
|
"eval_samples_per_second": 6.761, |
|
"eval_steps_per_second": 0.505, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 98.1, |
|
"learning_rate": 9.81e-05, |
|
"loss": 0.0081, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 98.2, |
|
"learning_rate": 9.82e-05, |
|
"loss": 0.0094, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 98.3, |
|
"learning_rate": 9.83e-05, |
|
"loss": 0.0077, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 98.4, |
|
"learning_rate": 9.84e-05, |
|
"loss": 0.0097, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 98.5, |
|
"learning_rate": 9.850000000000001e-05, |
|
"loss": 0.0086, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 98.6, |
|
"learning_rate": 9.86e-05, |
|
"loss": 0.0075, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 98.7, |
|
"learning_rate": 9.87e-05, |
|
"loss": 0.0087, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 98.8, |
|
"learning_rate": 9.88e-05, |
|
"loss": 0.0111, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 98.9, |
|
"learning_rate": 9.89e-05, |
|
"loss": 0.0081, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 99.0, |
|
"learning_rate": 9.900000000000001e-05, |
|
"loss": 0.0108, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 99.1, |
|
"learning_rate": 9.910000000000001e-05, |
|
"loss": 0.0089, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 99.2, |
|
"learning_rate": 9.92e-05, |
|
"loss": 0.0124, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 99.3, |
|
"learning_rate": 9.93e-05, |
|
"loss": 0.0087, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 99.4, |
|
"learning_rate": 9.94e-05, |
|
"loss": 0.0077, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 99.5, |
|
"learning_rate": 9.95e-05, |
|
"loss": 0.0108, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 99.6, |
|
"learning_rate": 9.960000000000001e-05, |
|
"loss": 0.0088, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 99.7, |
|
"learning_rate": 9.970000000000001e-05, |
|
"loss": 0.0097, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 99.8, |
|
"learning_rate": 9.98e-05, |
|
"loss": 0.0085, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 99.9, |
|
"learning_rate": 9.99e-05, |
|
"loss": 0.0078, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 100.0, |
|
"learning_rate": 0.0001, |
|
"loss": 0.0084, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 100.0, |
|
"eval_accuracy_safe": 0.8262015181128831, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9955301444201959, |
|
"eval_iou_safe": 0.7204070656649691, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.990291515973755, |
|
"eval_loss": 0.049743711948394775, |
|
"eval_mean_accuracy": 0.9108658312665395, |
|
"eval_mean_iou": 0.8553492908193621, |
|
"eval_overall_accuracy": 0.9905284483041337, |
|
"eval_runtime": 9.603, |
|
"eval_samples_per_second": 6.977, |
|
"eval_steps_per_second": 0.521, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 100.1, |
|
"learning_rate": 9.999473684210527e-05, |
|
"loss": 0.0091, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 100.2, |
|
"learning_rate": 9.998947368421053e-05, |
|
"loss": 0.0103, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 100.3, |
|
"learning_rate": 9.998421052631579e-05, |
|
"loss": 0.0083, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 100.4, |
|
"learning_rate": 9.997894736842107e-05, |
|
"loss": 0.0105, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 100.5, |
|
"learning_rate": 9.997368421052632e-05, |
|
"loss": 0.0091, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 100.6, |
|
"learning_rate": 9.996842105263159e-05, |
|
"loss": 0.0114, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 100.7, |
|
"learning_rate": 9.996315789473684e-05, |
|
"loss": 0.0126, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 100.8, |
|
"learning_rate": 9.99578947368421e-05, |
|
"loss": 0.0105, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 100.9, |
|
"learning_rate": 9.995263157894738e-05, |
|
"loss": 0.0076, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 101.0, |
|
"learning_rate": 9.994736842105263e-05, |
|
"loss": 0.0092, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 101.1, |
|
"learning_rate": 9.99421052631579e-05, |
|
"loss": 0.0098, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 101.2, |
|
"learning_rate": 9.993684210526317e-05, |
|
"loss": 0.0097, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 101.3, |
|
"learning_rate": 9.993157894736841e-05, |
|
"loss": 0.0104, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 101.4, |
|
"learning_rate": 9.992631578947369e-05, |
|
"loss": 0.0114, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 101.5, |
|
"learning_rate": 9.992105263157895e-05, |
|
"loss": 0.008, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 101.6, |
|
"learning_rate": 9.991578947368422e-05, |
|
"loss": 0.0085, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 101.7, |
|
"learning_rate": 9.991052631578948e-05, |
|
"loss": 0.0117, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 101.8, |
|
"learning_rate": 9.990526315789474e-05, |
|
"loss": 0.0077, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 101.9, |
|
"learning_rate": 9.99e-05, |
|
"loss": 0.0108, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 102.0, |
|
"learning_rate": 9.989473684210526e-05, |
|
"loss": 0.0088, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 102.0, |
|
"eval_accuracy_safe": 0.8155596932933952, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9960180925072599, |
|
"eval_iou_safe": 0.721209259672765, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9904577652014809, |
|
"eval_loss": 0.039893802255392075, |
|
"eval_mean_accuracy": 0.9057888929003275, |
|
"eval_mean_iou": 0.8558335124371229, |
|
"eval_overall_accuracy": 0.9906876407452484, |
|
"eval_runtime": 9.8062, |
|
"eval_samples_per_second": 6.832, |
|
"eval_steps_per_second": 0.51, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 102.1, |
|
"learning_rate": 9.988947368421053e-05, |
|
"loss": 0.0114, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 102.2, |
|
"learning_rate": 9.988421052631579e-05, |
|
"loss": 0.0108, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 102.3, |
|
"learning_rate": 9.987894736842107e-05, |
|
"loss": 0.0082, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 102.4, |
|
"learning_rate": 9.987368421052631e-05, |
|
"loss": 0.0083, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 102.5, |
|
"learning_rate": 9.986842105263159e-05, |
|
"loss": 0.0091, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 102.6, |
|
"learning_rate": 9.986315789473685e-05, |
|
"loss": 0.0083, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 102.7, |
|
"learning_rate": 9.98578947368421e-05, |
|
"loss": 0.0075, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 102.8, |
|
"learning_rate": 9.985263157894738e-05, |
|
"loss": 0.0085, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 102.9, |
|
"learning_rate": 9.984736842105264e-05, |
|
"loss": 0.0119, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 103.0, |
|
"learning_rate": 9.98421052631579e-05, |
|
"loss": 0.008, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 103.1, |
|
"learning_rate": 9.983684210526316e-05, |
|
"loss": 0.0078, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 103.2, |
|
"learning_rate": 9.983157894736843e-05, |
|
"loss": 0.0078, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 103.3, |
|
"learning_rate": 9.982631578947369e-05, |
|
"loss": 0.0102, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 103.4, |
|
"learning_rate": 9.982105263157895e-05, |
|
"loss": 0.008, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 103.5, |
|
"learning_rate": 9.981578947368421e-05, |
|
"loss": 0.011, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 103.6, |
|
"learning_rate": 9.981052631578948e-05, |
|
"loss": 0.0074, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 103.7, |
|
"learning_rate": 9.980526315789475e-05, |
|
"loss": 0.0077, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 103.8, |
|
"learning_rate": 9.98e-05, |
|
"loss": 0.0094, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 103.9, |
|
"learning_rate": 9.979473684210526e-05, |
|
"loss": 0.0096, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 104.0, |
|
"learning_rate": 9.978947368421054e-05, |
|
"loss": 0.0089, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 104.0, |
|
"eval_accuracy_safe": 0.8493741350264649, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9957055640162428, |
|
"eval_iou_safe": 0.7443530405405405, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9911614156662233, |
|
"eval_loss": 0.03877120092511177, |
|
"eval_mean_accuracy": 0.9225398495213538, |
|
"eval_mean_iou": 0.8677572281033819, |
|
"eval_overall_accuracy": 0.9913831682347539, |
|
"eval_runtime": 9.5733, |
|
"eval_samples_per_second": 6.999, |
|
"eval_steps_per_second": 0.522, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 104.1, |
|
"learning_rate": 9.978421052631579e-05, |
|
"loss": 0.0088, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 104.2, |
|
"learning_rate": 9.977894736842106e-05, |
|
"loss": 0.0075, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 104.3, |
|
"learning_rate": 9.977368421052633e-05, |
|
"loss": 0.0088, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 104.4, |
|
"learning_rate": 9.976842105263159e-05, |
|
"loss": 0.0099, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 104.5, |
|
"learning_rate": 9.976315789473685e-05, |
|
"loss": 0.0084, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 104.6, |
|
"learning_rate": 9.975789473684211e-05, |
|
"loss": 0.0073, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 104.7, |
|
"learning_rate": 9.975263157894737e-05, |
|
"loss": 0.0085, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 104.8, |
|
"learning_rate": 9.974736842105264e-05, |
|
"loss": 0.0082, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 104.9, |
|
"learning_rate": 9.97421052631579e-05, |
|
"loss": 0.0081, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 105.0, |
|
"learning_rate": 9.973684210526316e-05, |
|
"loss": 0.0086, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 105.1, |
|
"learning_rate": 9.973157894736842e-05, |
|
"loss": 0.0082, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 105.2, |
|
"learning_rate": 9.972631578947369e-05, |
|
"loss": 0.0072, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 105.3, |
|
"learning_rate": 9.972105263157895e-05, |
|
"loss": 0.0091, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 105.4, |
|
"learning_rate": 9.971578947368422e-05, |
|
"loss": 0.0079, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 105.5, |
|
"learning_rate": 9.971052631578947e-05, |
|
"loss": 0.0091, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 105.6, |
|
"learning_rate": 9.970526315789475e-05, |
|
"loss": 0.0075, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 105.7, |
|
"learning_rate": 9.970000000000001e-05, |
|
"loss": 0.0094, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 105.8, |
|
"learning_rate": 9.969473684210526e-05, |
|
"loss": 0.0085, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 105.9, |
|
"learning_rate": 9.968947368421054e-05, |
|
"loss": 0.0071, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 106.0, |
|
"learning_rate": 9.968421052631578e-05, |
|
"loss": 0.008, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 106.0, |
|
"eval_accuracy_safe": 0.8497846962810475, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.995183998729, |
|
"eval_iou_safe": 0.7336950190550683, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9906545538346639, |
|
"eval_loss": 0.044908296316862106, |
|
"eval_mean_accuracy": 0.9224843475050237, |
|
"eval_mean_iou": 0.8621747864448661, |
|
"eval_overall_accuracy": 0.9908891364709654, |
|
"eval_runtime": 9.8596, |
|
"eval_samples_per_second": 6.795, |
|
"eval_steps_per_second": 0.507, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 106.1, |
|
"learning_rate": 9.967894736842106e-05, |
|
"loss": 0.0091, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 106.2, |
|
"learning_rate": 9.967368421052632e-05, |
|
"loss": 0.0079, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 106.3, |
|
"learning_rate": 9.966842105263158e-05, |
|
"loss": 0.0083, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 106.4, |
|
"learning_rate": 9.966315789473685e-05, |
|
"loss": 0.0078, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 106.5, |
|
"learning_rate": 9.965789473684211e-05, |
|
"loss": 0.0087, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 106.6, |
|
"learning_rate": 9.965263157894737e-05, |
|
"loss": 0.0073, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 106.7, |
|
"learning_rate": 9.964736842105263e-05, |
|
"loss": 0.0085, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 106.8, |
|
"learning_rate": 9.96421052631579e-05, |
|
"loss": 0.0068, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 106.9, |
|
"learning_rate": 9.963684210526316e-05, |
|
"loss": 0.0085, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 107.0, |
|
"learning_rate": 9.963157894736843e-05, |
|
"loss": 0.0072, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 107.1, |
|
"learning_rate": 9.962631578947368e-05, |
|
"loss": 0.0085, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 107.2, |
|
"learning_rate": 9.962105263157895e-05, |
|
"loss": 0.0085, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 107.3, |
|
"learning_rate": 9.961578947368422e-05, |
|
"loss": 0.008, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 107.4, |
|
"learning_rate": 9.961052631578947e-05, |
|
"loss": 0.0072, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 107.5, |
|
"learning_rate": 9.960526315789475e-05, |
|
"loss": 0.0069, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 107.6, |
|
"learning_rate": 9.960000000000001e-05, |
|
"loss": 0.0077, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 107.7, |
|
"learning_rate": 9.959473684210526e-05, |
|
"loss": 0.0093, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 107.8, |
|
"learning_rate": 9.958947368421053e-05, |
|
"loss": 0.0098, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 107.9, |
|
"learning_rate": 9.95842105263158e-05, |
|
"loss": 0.0072, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 108.0, |
|
"learning_rate": 9.957894736842106e-05, |
|
"loss": 0.0084, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 108.0, |
|
"eval_accuracy_safe": 0.8509604820336082, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9957255113950575, |
|
"eval_iou_safe": 0.7461717878511307, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9912289144692424, |
|
"eval_loss": 0.04285858944058418, |
|
"eval_mean_accuracy": 0.9233429967143328, |
|
"eval_mean_iou": 0.8687003511601865, |
|
"eval_overall_accuracy": 0.9914493845469916, |
|
"eval_runtime": 9.6865, |
|
"eval_samples_per_second": 6.917, |
|
"eval_steps_per_second": 0.516, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 108.1, |
|
"learning_rate": 9.957368421052632e-05, |
|
"loss": 0.0072, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 108.2, |
|
"learning_rate": 9.956842105263158e-05, |
|
"loss": 0.0068, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 108.3, |
|
"learning_rate": 9.956315789473684e-05, |
|
"loss": 0.0077, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 108.4, |
|
"learning_rate": 9.955789473684211e-05, |
|
"loss": 0.0088, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 108.5, |
|
"learning_rate": 9.955263157894737e-05, |
|
"loss": 0.0076, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 108.6, |
|
"learning_rate": 9.954736842105263e-05, |
|
"loss": 0.0081, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 108.7, |
|
"learning_rate": 9.954210526315791e-05, |
|
"loss": 0.0087, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 108.8, |
|
"learning_rate": 9.953684210526316e-05, |
|
"loss": 0.0076, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 108.9, |
|
"learning_rate": 9.953157894736843e-05, |
|
"loss": 0.0072, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 109.0, |
|
"learning_rate": 9.95263157894737e-05, |
|
"loss": 0.0087, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 109.1, |
|
"learning_rate": 9.952105263157894e-05, |
|
"loss": 0.0069, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 109.2, |
|
"learning_rate": 9.951578947368422e-05, |
|
"loss": 0.0079, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 109.3, |
|
"learning_rate": 9.951052631578948e-05, |
|
"loss": 0.0099, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 109.4, |
|
"learning_rate": 9.950526315789474e-05, |
|
"loss": 0.0081, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 109.5, |
|
"learning_rate": 9.95e-05, |
|
"loss": 0.0093, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 109.6, |
|
"learning_rate": 9.949473684210527e-05, |
|
"loss": 0.008, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 109.7, |
|
"learning_rate": 9.948947368421053e-05, |
|
"loss": 0.0071, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 109.8, |
|
"learning_rate": 9.948421052631579e-05, |
|
"loss": 0.008, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 109.9, |
|
"learning_rate": 9.947894736842106e-05, |
|
"loss": 0.0065, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 110.0, |
|
"learning_rate": 9.947368421052632e-05, |
|
"loss": 0.0084, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 110.0, |
|
"eval_accuracy_safe": 0.8374852063022117, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9962660853609355, |
|
"eval_iou_safe": 0.7459730172410833, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9913622729522215, |
|
"eval_loss": 0.04052352160215378, |
|
"eval_mean_accuracy": 0.9168756458315737, |
|
"eval_mean_iou": 0.8686676450966524, |
|
"eval_overall_accuracy": 0.9915759527861182, |
|
"eval_runtime": 9.728, |
|
"eval_samples_per_second": 6.887, |
|
"eval_steps_per_second": 0.514, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 110.1, |
|
"learning_rate": 9.94684210526316e-05, |
|
"loss": 0.0092, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 110.2, |
|
"learning_rate": 9.946315789473684e-05, |
|
"loss": 0.0081, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 110.3, |
|
"learning_rate": 9.94578947368421e-05, |
|
"loss": 0.0067, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 110.4, |
|
"learning_rate": 9.945263157894738e-05, |
|
"loss": 0.0068, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 110.5, |
|
"learning_rate": 9.944736842105263e-05, |
|
"loss": 0.008, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 110.6, |
|
"learning_rate": 9.94421052631579e-05, |
|
"loss": 0.0087, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 110.7, |
|
"learning_rate": 9.943684210526317e-05, |
|
"loss": 0.0069, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 110.8, |
|
"learning_rate": 9.943157894736843e-05, |
|
"loss": 0.0076, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 110.9, |
|
"learning_rate": 9.942631578947369e-05, |
|
"loss": 0.0064, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 111.0, |
|
"learning_rate": 9.942105263157895e-05, |
|
"loss": 0.007, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 111.1, |
|
"learning_rate": 9.941578947368422e-05, |
|
"loss": 0.0071, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 111.2, |
|
"learning_rate": 9.941052631578948e-05, |
|
"loss": 0.008, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 111.3, |
|
"learning_rate": 9.940526315789474e-05, |
|
"loss": 0.0068, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 111.4, |
|
"learning_rate": 9.94e-05, |
|
"loss": 0.0093, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 111.5, |
|
"learning_rate": 9.939473684210527e-05, |
|
"loss": 0.007, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 111.6, |
|
"learning_rate": 9.938947368421053e-05, |
|
"loss": 0.0079, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 111.7, |
|
"learning_rate": 9.938421052631579e-05, |
|
"loss": 0.0071, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 111.8, |
|
"learning_rate": 9.937894736842107e-05, |
|
"loss": 0.0096, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 111.9, |
|
"learning_rate": 9.937368421052632e-05, |
|
"loss": 0.0074, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 112.0, |
|
"learning_rate": 9.936842105263159e-05, |
|
"loss": 0.007, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 112.0, |
|
"eval_accuracy_safe": 0.840355280049036, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9955587747756712, |
|
"eval_iou_safe": 0.7333499298572587, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9907445671244249, |
|
"eval_loss": 0.054369520395994186, |
|
"eval_mean_accuracy": 0.9179570274123536, |
|
"eval_mean_iou": 0.8620472484908418, |
|
"eval_overall_accuracy": 0.9909743123979711, |
|
"eval_runtime": 9.6881, |
|
"eval_samples_per_second": 6.916, |
|
"eval_steps_per_second": 0.516, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 112.1, |
|
"learning_rate": 9.936315789473685e-05, |
|
"loss": 0.0078, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 112.2, |
|
"learning_rate": 9.93578947368421e-05, |
|
"loss": 0.0073, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 112.3, |
|
"learning_rate": 9.935263157894738e-05, |
|
"loss": 0.0069, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 112.4, |
|
"learning_rate": 9.934736842105263e-05, |
|
"loss": 0.0068, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 112.5, |
|
"learning_rate": 9.93421052631579e-05, |
|
"loss": 0.0085, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 112.6, |
|
"learning_rate": 9.933684210526317e-05, |
|
"loss": 0.0063, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 112.7, |
|
"learning_rate": 9.933157894736843e-05, |
|
"loss": 0.007, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 112.8, |
|
"learning_rate": 9.932631578947369e-05, |
|
"loss": 0.0089, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 112.9, |
|
"learning_rate": 9.932105263157895e-05, |
|
"loss": 0.0073, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 113.0, |
|
"learning_rate": 9.931578947368421e-05, |
|
"loss": 0.0086, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 113.1, |
|
"learning_rate": 9.931052631578948e-05, |
|
"loss": 0.0068, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 113.2, |
|
"learning_rate": 9.930526315789474e-05, |
|
"loss": 0.0073, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 113.3, |
|
"learning_rate": 9.93e-05, |
|
"loss": 0.0068, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 113.4, |
|
"learning_rate": 9.929473684210526e-05, |
|
"loss": 0.0078, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 113.5, |
|
"learning_rate": 9.928947368421053e-05, |
|
"loss": 0.0071, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 113.6, |
|
"learning_rate": 9.928421052631579e-05, |
|
"loss": 0.0076, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 113.7, |
|
"learning_rate": 9.927894736842106e-05, |
|
"loss": 0.0071, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 113.8, |
|
"learning_rate": 9.927368421052631e-05, |
|
"loss": 0.0085, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 113.9, |
|
"learning_rate": 9.926842105263159e-05, |
|
"loss": 0.0076, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 114.0, |
|
"learning_rate": 9.926315789473685e-05, |
|
"loss": 0.0079, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 114.0, |
|
"eval_accuracy_safe": 0.8398502704307231, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9954145669605933, |
|
"eval_iou_safe": 0.7298914500134013, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9905859037740224, |
|
"eval_loss": 0.050063032656908035, |
|
"eval_mean_accuracy": 0.9176324186956581, |
|
"eval_mean_iou": 0.8602386768937118, |
|
"eval_overall_accuracy": 0.9908194470761428, |
|
"eval_runtime": 9.9077, |
|
"eval_samples_per_second": 6.762, |
|
"eval_steps_per_second": 0.505, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 114.1, |
|
"learning_rate": 9.92578947368421e-05, |
|
"loss": 0.008, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 114.2, |
|
"learning_rate": 9.925263157894738e-05, |
|
"loss": 0.0073, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 114.3, |
|
"learning_rate": 9.924736842105264e-05, |
|
"loss": 0.0102, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 114.4, |
|
"learning_rate": 9.92421052631579e-05, |
|
"loss": 0.0066, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 114.5, |
|
"learning_rate": 9.923684210526316e-05, |
|
"loss": 0.0072, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 114.6, |
|
"learning_rate": 9.923157894736842e-05, |
|
"loss": 0.0078, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 114.7, |
|
"learning_rate": 9.922631578947369e-05, |
|
"loss": 0.0085, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 114.8, |
|
"learning_rate": 9.922105263157895e-05, |
|
"loss": 0.0059, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 114.9, |
|
"learning_rate": 9.921578947368421e-05, |
|
"loss": 0.0069, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 115.0, |
|
"learning_rate": 9.921052631578947e-05, |
|
"loss": 0.0094, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 115.1, |
|
"learning_rate": 9.920526315789475e-05, |
|
"loss": 0.007, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 115.2, |
|
"learning_rate": 9.92e-05, |
|
"loss": 0.0072, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 115.3, |
|
"learning_rate": 9.919473684210526e-05, |
|
"loss": 0.0075, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 115.4, |
|
"learning_rate": 9.918947368421054e-05, |
|
"loss": 0.0083, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 115.5, |
|
"learning_rate": 9.918421052631579e-05, |
|
"loss": 0.0067, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 115.6, |
|
"learning_rate": 9.917894736842106e-05, |
|
"loss": 0.0074, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 115.7, |
|
"learning_rate": 9.917368421052632e-05, |
|
"loss": 0.008, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 115.8, |
|
"learning_rate": 9.916842105263159e-05, |
|
"loss": 0.0086, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 115.9, |
|
"learning_rate": 9.916315789473685e-05, |
|
"loss": 0.0077, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 116.0, |
|
"learning_rate": 9.915789473684211e-05, |
|
"loss": 0.0084, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 116.0, |
|
"eval_accuracy_safe": 0.8473271113064329, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9951262686679598, |
|
"eval_iou_safe": 0.7303771221219059, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9905233299616627, |
|
"eval_loss": 0.05078176409006119, |
|
"eval_mean_accuracy": 0.9212266899871964, |
|
"eval_mean_iou": 0.8604502260417843, |
|
"eval_overall_accuracy": 0.9907605185437558, |
|
"eval_runtime": 9.6949, |
|
"eval_samples_per_second": 6.911, |
|
"eval_steps_per_second": 0.516, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 116.1, |
|
"learning_rate": 9.915263157894737e-05, |
|
"loss": 0.0076, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 116.2, |
|
"learning_rate": 9.914736842105264e-05, |
|
"loss": 0.0064, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 116.3, |
|
"learning_rate": 9.91421052631579e-05, |
|
"loss": 0.0072, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 116.4, |
|
"learning_rate": 9.913684210526316e-05, |
|
"loss": 0.0068, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 116.5, |
|
"learning_rate": 9.913157894736844e-05, |
|
"loss": 0.009, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 116.6, |
|
"learning_rate": 9.912631578947368e-05, |
|
"loss": 0.0069, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 116.7, |
|
"learning_rate": 9.912105263157895e-05, |
|
"loss": 0.0068, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 116.8, |
|
"learning_rate": 9.911578947368422e-05, |
|
"loss": 0.0072, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 116.9, |
|
"learning_rate": 9.911052631578947e-05, |
|
"loss": 0.0068, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 117.0, |
|
"learning_rate": 9.910526315789475e-05, |
|
"loss": 0.0072, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 117.1, |
|
"learning_rate": 9.910000000000001e-05, |
|
"loss": 0.0066, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 117.2, |
|
"learning_rate": 9.909473684210526e-05, |
|
"loss": 0.0068, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 117.3, |
|
"learning_rate": 9.908947368421053e-05, |
|
"loss": 0.0072, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 117.4, |
|
"learning_rate": 9.90842105263158e-05, |
|
"loss": 0.0068, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 117.5, |
|
"learning_rate": 9.907894736842106e-05, |
|
"loss": 0.0078, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 117.6, |
|
"learning_rate": 9.907368421052632e-05, |
|
"loss": 0.0064, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 117.7, |
|
"learning_rate": 9.906842105263158e-05, |
|
"loss": 0.0059, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 117.8, |
|
"learning_rate": 9.906315789473685e-05, |
|
"loss": 0.008, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 117.9, |
|
"learning_rate": 9.905789473684211e-05, |
|
"loss": 0.007, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 118.0, |
|
"learning_rate": 9.905263157894737e-05, |
|
"loss": 0.0113, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 118.0, |
|
"eval_accuracy_safe": 0.8506829194952988, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9949560705916616, |
|
"eval_iou_safe": 0.7297523872514572, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9904546185117991, |
|
"eval_loss": 0.05113836005330086, |
|
"eval_mean_accuracy": 0.9228194950434803, |
|
"eval_mean_iou": 0.8601035028816282, |
|
"eval_overall_accuracy": 0.9906944730388585, |
|
"eval_runtime": 9.8084, |
|
"eval_samples_per_second": 6.831, |
|
"eval_steps_per_second": 0.51, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 118.1, |
|
"learning_rate": 9.904736842105263e-05, |
|
"loss": 0.006, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 118.2, |
|
"learning_rate": 9.904210526315791e-05, |
|
"loss": 0.0067, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 118.3, |
|
"learning_rate": 9.903684210526316e-05, |
|
"loss": 0.0083, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 118.4, |
|
"learning_rate": 9.903157894736843e-05, |
|
"loss": 0.0071, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 118.5, |
|
"learning_rate": 9.90263157894737e-05, |
|
"loss": 0.008, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 118.6, |
|
"learning_rate": 9.902105263157894e-05, |
|
"loss": 0.0092, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 118.7, |
|
"learning_rate": 9.901578947368422e-05, |
|
"loss": 0.0069, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 118.8, |
|
"learning_rate": 9.901052631578947e-05, |
|
"loss": 0.008, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 118.9, |
|
"learning_rate": 9.900526315789475e-05, |
|
"loss": 0.0061, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 119.0, |
|
"learning_rate": 9.900000000000001e-05, |
|
"loss": 0.0092, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 119.1, |
|
"learning_rate": 9.899473684210527e-05, |
|
"loss": 0.0081, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 119.2, |
|
"learning_rate": 9.898947368421053e-05, |
|
"loss": 0.0065, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 119.3, |
|
"learning_rate": 9.89842105263158e-05, |
|
"loss": 0.0068, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 119.4, |
|
"learning_rate": 9.897894736842106e-05, |
|
"loss": 0.0066, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 119.5, |
|
"learning_rate": 9.897368421052632e-05, |
|
"loss": 0.0074, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 119.6, |
|
"learning_rate": 9.896842105263158e-05, |
|
"loss": 0.0077, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 119.7, |
|
"learning_rate": 9.896315789473684e-05, |
|
"loss": 0.0073, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 119.8, |
|
"learning_rate": 9.89578947368421e-05, |
|
"loss": 0.0069, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 119.9, |
|
"learning_rate": 9.895263157894737e-05, |
|
"loss": 0.0071, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 120.0, |
|
"learning_rate": 9.894736842105263e-05, |
|
"loss": 0.0076, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 120.0, |
|
"eval_accuracy_safe": 0.8581597603710086, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9946534571212905, |
|
"eval_iou_safe": 0.7299408295665262, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9903777407663423, |
|
"eval_loss": 0.05562065914273262, |
|
"eval_mean_accuracy": 0.9264066087461496, |
|
"eval_mean_iou": 0.8601592851664343, |
|
"eval_overall_accuracy": 0.9906216521761311, |
|
"eval_runtime": 9.7647, |
|
"eval_samples_per_second": 6.861, |
|
"eval_steps_per_second": 0.512, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 120.1, |
|
"learning_rate": 9.89421052631579e-05, |
|
"loss": 0.0062, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 120.2, |
|
"learning_rate": 9.893684210526316e-05, |
|
"loss": 0.0079, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 120.3, |
|
"learning_rate": 9.893157894736843e-05, |
|
"loss": 0.0073, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 120.4, |
|
"learning_rate": 9.89263157894737e-05, |
|
"loss": 0.0064, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 120.5, |
|
"learning_rate": 9.892105263157894e-05, |
|
"loss": 0.0071, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 120.6, |
|
"learning_rate": 9.891578947368422e-05, |
|
"loss": 0.0064, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 120.7, |
|
"learning_rate": 9.891052631578948e-05, |
|
"loss": 0.0071, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 120.8, |
|
"learning_rate": 9.890526315789474e-05, |
|
"loss": 0.0085, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 120.9, |
|
"learning_rate": 9.89e-05, |
|
"loss": 0.0062, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 121.0, |
|
"learning_rate": 9.889473684210527e-05, |
|
"loss": 0.0078, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 121.1, |
|
"learning_rate": 9.888947368421053e-05, |
|
"loss": 0.0072, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 121.2, |
|
"learning_rate": 9.888421052631579e-05, |
|
"loss": 0.0065, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 121.3, |
|
"learning_rate": 9.887894736842105e-05, |
|
"loss": 0.0067, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 121.4, |
|
"learning_rate": 9.887368421052632e-05, |
|
"loss": 0.0059, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 121.5, |
|
"learning_rate": 9.886842105263159e-05, |
|
"loss": 0.0076, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 121.6, |
|
"learning_rate": 9.886315789473684e-05, |
|
"loss": 0.0062, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 121.7, |
|
"learning_rate": 9.88578947368421e-05, |
|
"loss": 0.0092, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 121.8, |
|
"learning_rate": 9.885263157894738e-05, |
|
"loss": 0.0069, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 121.9, |
|
"learning_rate": 9.884736842105263e-05, |
|
"loss": 0.0076, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 122.0, |
|
"learning_rate": 9.88421052631579e-05, |
|
"loss": 0.0081, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 122.0, |
|
"eval_accuracy_safe": 0.8559276949587704, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9953280305377943, |
|
"eval_iou_safe": 0.742030462790445, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9909823780252695, |
|
"eval_loss": 0.04713164269924164, |
|
"eval_mean_accuracy": 0.9256278627482823, |
|
"eval_mean_iou": 0.8665064204078572, |
|
"eval_overall_accuracy": 0.9912103681421992, |
|
"eval_runtime": 9.8825, |
|
"eval_samples_per_second": 6.78, |
|
"eval_steps_per_second": 0.506, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 122.1, |
|
"learning_rate": 9.883684210526317e-05, |
|
"loss": 0.0066, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 122.2, |
|
"learning_rate": 9.883157894736843e-05, |
|
"loss": 0.0067, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 122.3, |
|
"learning_rate": 9.882631578947369e-05, |
|
"loss": 0.0072, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 122.4, |
|
"learning_rate": 9.882105263157895e-05, |
|
"loss": 0.0068, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 122.5, |
|
"learning_rate": 9.881578947368422e-05, |
|
"loss": 0.0064, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 122.6, |
|
"learning_rate": 9.881052631578948e-05, |
|
"loss": 0.007, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 122.7, |
|
"learning_rate": 9.880526315789474e-05, |
|
"loss": 0.0088, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 122.8, |
|
"learning_rate": 9.88e-05, |
|
"loss": 0.0071, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 122.9, |
|
"learning_rate": 9.879473684210528e-05, |
|
"loss": 0.008, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 123.0, |
|
"learning_rate": 9.878947368421053e-05, |
|
"loss": 0.0069, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 123.1, |
|
"learning_rate": 9.878421052631579e-05, |
|
"loss": 0.0073, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 123.2, |
|
"learning_rate": 9.877894736842107e-05, |
|
"loss": 0.0077, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 123.3, |
|
"learning_rate": 9.877368421052631e-05, |
|
"loss": 0.0079, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 123.4, |
|
"learning_rate": 9.876842105263159e-05, |
|
"loss": 0.006, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 123.5, |
|
"learning_rate": 9.876315789473685e-05, |
|
"loss": 0.0054, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 123.6, |
|
"learning_rate": 9.87578947368421e-05, |
|
"loss": 0.0072, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 123.7, |
|
"learning_rate": 9.875263157894738e-05, |
|
"loss": 0.0074, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 123.8, |
|
"learning_rate": 9.874736842105264e-05, |
|
"loss": 0.0067, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 123.9, |
|
"learning_rate": 9.87421052631579e-05, |
|
"loss": 0.0065, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 124.0, |
|
"learning_rate": 9.873684210526316e-05, |
|
"loss": 0.0054, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 124.0, |
|
"eval_accuracy_safe": 0.8388672364408772, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9959063871858977, |
|
"eval_iou_safe": 0.7394206705324877, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9910458289201084, |
|
"eval_loss": 0.05043962970376015, |
|
"eval_mean_accuracy": 0.9173868118133874, |
|
"eval_mean_iou": 0.865233249726298, |
|
"eval_overall_accuracy": 0.9912677024727437, |
|
"eval_runtime": 9.6912, |
|
"eval_samples_per_second": 6.913, |
|
"eval_steps_per_second": 0.516, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 124.1, |
|
"learning_rate": 9.873157894736843e-05, |
|
"loss": 0.0069, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 124.2, |
|
"learning_rate": 9.872631578947369e-05, |
|
"loss": 0.0058, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 124.3, |
|
"learning_rate": 9.872105263157895e-05, |
|
"loss": 0.007, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 124.4, |
|
"learning_rate": 9.871578947368421e-05, |
|
"loss": 0.0083, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 124.5, |
|
"learning_rate": 9.871052631578948e-05, |
|
"loss": 0.0066, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 124.6, |
|
"learning_rate": 9.870526315789475e-05, |
|
"loss": 0.0052, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 124.7, |
|
"learning_rate": 9.87e-05, |
|
"loss": 0.0062, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 124.8, |
|
"learning_rate": 9.869473684210528e-05, |
|
"loss": 0.0071, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 124.9, |
|
"learning_rate": 9.868947368421052e-05, |
|
"loss": 0.0101, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 125.0, |
|
"learning_rate": 9.868421052631579e-05, |
|
"loss": 0.0049, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 125.1, |
|
"learning_rate": 9.867894736842106e-05, |
|
"loss": 0.0051, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 125.2, |
|
"learning_rate": 9.867368421052631e-05, |
|
"loss": 0.0067, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 125.3, |
|
"learning_rate": 9.866842105263159e-05, |
|
"loss": 0.0054, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 125.4, |
|
"learning_rate": 9.866315789473685e-05, |
|
"loss": 0.007, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 125.5, |
|
"learning_rate": 9.86578947368421e-05, |
|
"loss": 0.0058, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 125.6, |
|
"learning_rate": 9.865263157894737e-05, |
|
"loss": 0.0071, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 125.7, |
|
"learning_rate": 9.864736842105264e-05, |
|
"loss": 0.0073, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 125.8, |
|
"learning_rate": 9.86421052631579e-05, |
|
"loss": 0.0081, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 125.9, |
|
"learning_rate": 9.863684210526316e-05, |
|
"loss": 0.0074, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 126.0, |
|
"learning_rate": 9.863157894736842e-05, |
|
"loss": 0.0054, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 126.0, |
|
"eval_accuracy_safe": 0.8460800074016677, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9957325516464038, |
|
"eval_iou_safe": 0.7420428035297697, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9910893621494099, |
|
"eval_loss": 0.05023125186562538, |
|
"eval_mean_accuracy": 0.9209062795240357, |
|
"eval_mean_iou": 0.8665660828395898, |
|
"eval_overall_accuracy": 0.9913120554454291, |
|
"eval_runtime": 9.8617, |
|
"eval_samples_per_second": 6.794, |
|
"eval_steps_per_second": 0.507, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 126.1, |
|
"learning_rate": 9.862631578947369e-05, |
|
"loss": 0.0054, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 126.2, |
|
"learning_rate": 9.862105263157895e-05, |
|
"loss": 0.0081, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 126.3, |
|
"learning_rate": 9.861578947368421e-05, |
|
"loss": 0.0066, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 126.4, |
|
"learning_rate": 9.861052631578947e-05, |
|
"loss": 0.0077, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 126.5, |
|
"learning_rate": 9.860526315789475e-05, |
|
"loss": 0.006, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 126.6, |
|
"learning_rate": 9.86e-05, |
|
"loss": 0.0056, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 126.7, |
|
"learning_rate": 9.859473684210527e-05, |
|
"loss": 0.0064, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 126.8, |
|
"learning_rate": 9.858947368421054e-05, |
|
"loss": 0.0058, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 126.9, |
|
"learning_rate": 9.858421052631578e-05, |
|
"loss": 0.008, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 127.0, |
|
"learning_rate": 9.857894736842106e-05, |
|
"loss": 0.0073, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 127.1, |
|
"learning_rate": 9.857368421052632e-05, |
|
"loss": 0.0062, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 127.2, |
|
"learning_rate": 9.856842105263159e-05, |
|
"loss": 0.0074, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 127.3, |
|
"learning_rate": 9.856315789473685e-05, |
|
"loss": 0.0057, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 127.4, |
|
"learning_rate": 9.855789473684211e-05, |
|
"loss": 0.0069, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 127.5, |
|
"learning_rate": 9.855263157894737e-05, |
|
"loss": 0.0058, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 127.6, |
|
"learning_rate": 9.854736842105263e-05, |
|
"loss": 0.0064, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 127.7, |
|
"learning_rate": 9.85421052631579e-05, |
|
"loss": 0.0072, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 127.8, |
|
"learning_rate": 9.853684210526316e-05, |
|
"loss": 0.0081, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 127.9, |
|
"learning_rate": 9.853157894736844e-05, |
|
"loss": 0.007, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 128.0, |
|
"learning_rate": 9.852631578947368e-05, |
|
"loss": 0.0092, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 128.0, |
|
"eval_accuracy_safe": 0.8491852382990043, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9953976703573619, |
|
"eval_iou_safe": 0.7376483470852979, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9908492562294104, |
|
"eval_loss": 0.05396096780896187, |
|
"eval_mean_accuracy": 0.922291454328183, |
|
"eval_mean_iou": 0.8642488016573542, |
|
"eval_overall_accuracy": 0.9910787895544252, |
|
"eval_runtime": 9.656, |
|
"eval_samples_per_second": 6.939, |
|
"eval_steps_per_second": 0.518, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 128.1, |
|
"learning_rate": 9.852105263157895e-05, |
|
"loss": 0.0066, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 128.2, |
|
"learning_rate": 9.851578947368422e-05, |
|
"loss": 0.007, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 128.3, |
|
"learning_rate": 9.851052631578947e-05, |
|
"loss": 0.0067, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 128.4, |
|
"learning_rate": 9.850526315789475e-05, |
|
"loss": 0.0078, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 128.5, |
|
"learning_rate": 9.850000000000001e-05, |
|
"loss": 0.0052, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 128.6, |
|
"learning_rate": 9.849473684210527e-05, |
|
"loss": 0.0057, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 128.7, |
|
"learning_rate": 9.848947368421053e-05, |
|
"loss": 0.0071, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 128.8, |
|
"learning_rate": 9.84842105263158e-05, |
|
"loss": 0.0064, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 128.9, |
|
"learning_rate": 9.847894736842106e-05, |
|
"loss": 0.0068, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 129.0, |
|
"learning_rate": 9.847368421052632e-05, |
|
"loss": 0.0076, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 129.1, |
|
"learning_rate": 9.846842105263158e-05, |
|
"loss": 0.0081, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 129.2, |
|
"learning_rate": 9.846315789473685e-05, |
|
"loss": 0.0079, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 129.3, |
|
"learning_rate": 9.845789473684211e-05, |
|
"loss": 0.0069, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 129.4, |
|
"learning_rate": 9.845263157894737e-05, |
|
"loss": 0.0058, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 129.5, |
|
"learning_rate": 9.844736842105263e-05, |
|
"loss": 0.0058, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 129.6, |
|
"learning_rate": 9.844210526315791e-05, |
|
"loss": 0.0054, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 129.7, |
|
"learning_rate": 9.843684210526316e-05, |
|
"loss": 0.0065, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 129.8, |
|
"learning_rate": 9.843157894736843e-05, |
|
"loss": 0.0072, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 129.9, |
|
"learning_rate": 9.84263157894737e-05, |
|
"loss": 0.0055, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 130.0, |
|
"learning_rate": 9.842105263157894e-05, |
|
"loss": 0.007, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 130.0, |
|
"eval_accuracy_safe": 0.8458968932270886, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.995481801360951, |
|
"eval_iou_safe": 0.7365604030788132, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9908342844396093, |
|
"eval_loss": 0.05330672860145569, |
|
"eval_mean_accuracy": 0.9206893472940199, |
|
"eval_mean_iou": 0.8636973437592113, |
|
"eval_overall_accuracy": 0.9910633030222423, |
|
"eval_runtime": 9.9044, |
|
"eval_samples_per_second": 6.765, |
|
"eval_steps_per_second": 0.505, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 130.1, |
|
"learning_rate": 9.841578947368422e-05, |
|
"loss": 0.0058, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 130.2, |
|
"learning_rate": 9.841052631578948e-05, |
|
"loss": 0.0059, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 130.3, |
|
"learning_rate": 9.840526315789474e-05, |
|
"loss": 0.0065, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 130.4, |
|
"learning_rate": 9.84e-05, |
|
"loss": 0.0068, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 130.5, |
|
"learning_rate": 9.839473684210527e-05, |
|
"loss": 0.0068, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 130.6, |
|
"learning_rate": 9.838947368421053e-05, |
|
"loss": 0.0054, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 130.7, |
|
"learning_rate": 9.83842105263158e-05, |
|
"loss": 0.0056, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 130.8, |
|
"learning_rate": 9.837894736842106e-05, |
|
"loss": 0.0066, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 130.9, |
|
"learning_rate": 9.837368421052632e-05, |
|
"loss": 0.0078, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 131.0, |
|
"learning_rate": 9.83684210526316e-05, |
|
"loss": 0.0068, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 131.1, |
|
"learning_rate": 9.836315789473684e-05, |
|
"loss": 0.0065, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 131.2, |
|
"learning_rate": 9.83578947368421e-05, |
|
"loss": 0.0058, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 131.3, |
|
"learning_rate": 9.835263157894737e-05, |
|
"loss": 0.0059, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 131.4, |
|
"learning_rate": 9.834736842105263e-05, |
|
"loss": 0.0075, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 131.5, |
|
"learning_rate": 9.83421052631579e-05, |
|
"loss": 0.0068, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 131.6, |
|
"learning_rate": 9.833684210526315e-05, |
|
"loss": 0.0074, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 131.7, |
|
"learning_rate": 9.833157894736843e-05, |
|
"loss": 0.0067, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 131.8, |
|
"learning_rate": 9.832631578947369e-05, |
|
"loss": 0.0066, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 131.9, |
|
"learning_rate": 9.832105263157894e-05, |
|
"loss": 0.0071, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 132.0, |
|
"learning_rate": 9.831578947368422e-05, |
|
"loss": 0.0063, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 132.0, |
|
"eval_accuracy_safe": 0.8567468899503086, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9951015104507251, |
|
"eval_iou_safe": 0.7379793324212679, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9907814434418125, |
|
"eval_loss": 0.05263291671872139, |
|
"eval_mean_accuracy": 0.9259242002005168, |
|
"eval_mean_iou": 0.8643803879315402, |
|
"eval_overall_accuracy": 0.9910147368018307, |
|
"eval_runtime": 9.7459, |
|
"eval_samples_per_second": 6.875, |
|
"eval_steps_per_second": 0.513, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 132.1, |
|
"learning_rate": 9.831052631578948e-05, |
|
"loss": 0.0071, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 132.2, |
|
"learning_rate": 9.830526315789474e-05, |
|
"loss": 0.0069, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 132.3, |
|
"learning_rate": 9.83e-05, |
|
"loss": 0.0066, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 132.4, |
|
"learning_rate": 9.829473684210527e-05, |
|
"loss": 0.0056, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 132.5, |
|
"learning_rate": 9.828947368421053e-05, |
|
"loss": 0.0057, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 132.6, |
|
"learning_rate": 9.828421052631579e-05, |
|
"loss": 0.0104, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 132.7, |
|
"learning_rate": 9.827894736842105e-05, |
|
"loss": 0.0066, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 132.8, |
|
"learning_rate": 9.827368421052632e-05, |
|
"loss": 0.0059, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 132.9, |
|
"learning_rate": 9.826842105263159e-05, |
|
"loss": 0.0067, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 133.0, |
|
"learning_rate": 9.826315789473684e-05, |
|
"loss": 0.0083, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 133.1, |
|
"learning_rate": 9.825789473684212e-05, |
|
"loss": 0.0063, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 133.2, |
|
"learning_rate": 9.825263157894738e-05, |
|
"loss": 0.0065, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 133.3, |
|
"learning_rate": 9.824736842105263e-05, |
|
"loss": 0.0083, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 133.4, |
|
"learning_rate": 9.82421052631579e-05, |
|
"loss": 0.0078, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 133.5, |
|
"learning_rate": 9.823684210526317e-05, |
|
"loss": 0.0066, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 133.6, |
|
"learning_rate": 9.823157894736843e-05, |
|
"loss": 0.0111, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 133.7, |
|
"learning_rate": 9.822631578947369e-05, |
|
"loss": 0.0073, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 133.8, |
|
"learning_rate": 9.822105263157895e-05, |
|
"loss": 0.0076, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 133.9, |
|
"learning_rate": 9.821578947368421e-05, |
|
"loss": 0.0191, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 134.0, |
|
"learning_rate": 9.821052631578948e-05, |
|
"loss": 0.0101, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 134.0, |
|
"eval_accuracy_safe": 0.816633320611717, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9968146382783394, |
|
"eval_iou_safe": 0.7392670686935522, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9912820769829505, |
|
"eval_loss": 0.046224527060985565, |
|
"eval_mean_accuracy": 0.9067239794450281, |
|
"eval_mean_iou": 0.8652745728382514, |
|
"eval_overall_accuracy": 0.991492371060955, |
|
"eval_runtime": 9.7164, |
|
"eval_samples_per_second": 6.896, |
|
"eval_steps_per_second": 0.515, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 134.1, |
|
"learning_rate": 9.820526315789474e-05, |
|
"loss": 0.0079, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 134.2, |
|
"learning_rate": 9.82e-05, |
|
"loss": 0.0087, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 134.3, |
|
"learning_rate": 9.819473684210528e-05, |
|
"loss": 0.0081, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 134.4, |
|
"learning_rate": 9.818947368421053e-05, |
|
"loss": 0.0252, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 134.5, |
|
"learning_rate": 9.818421052631579e-05, |
|
"loss": 0.0115, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 134.6, |
|
"learning_rate": 9.817894736842106e-05, |
|
"loss": 0.0102, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 134.7, |
|
"learning_rate": 9.817368421052631e-05, |
|
"loss": 0.0129, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 134.8, |
|
"learning_rate": 9.816842105263159e-05, |
|
"loss": 0.0308, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 134.9, |
|
"learning_rate": 9.816315789473685e-05, |
|
"loss": 0.0124, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 135.0, |
|
"learning_rate": 9.815789473684211e-05, |
|
"loss": 0.016, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 135.1, |
|
"learning_rate": 9.815263157894738e-05, |
|
"loss": 0.0105, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 135.2, |
|
"learning_rate": 9.814736842105264e-05, |
|
"loss": 0.0092, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 135.3, |
|
"learning_rate": 9.81421052631579e-05, |
|
"loss": 0.0171, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 135.4, |
|
"learning_rate": 9.813684210526316e-05, |
|
"loss": 0.0116, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 135.5, |
|
"learning_rate": 9.813157894736843e-05, |
|
"loss": 0.0148, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 135.6, |
|
"learning_rate": 9.812631578947369e-05, |
|
"loss": 0.0111, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 135.7, |
|
"learning_rate": 9.812105263157895e-05, |
|
"loss": 0.0139, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 135.8, |
|
"learning_rate": 9.811578947368421e-05, |
|
"loss": 0.033, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 135.9, |
|
"learning_rate": 9.811052631578947e-05, |
|
"loss": 0.0113, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 136.0, |
|
"learning_rate": 9.810526315789475e-05, |
|
"loss": 0.0183, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 136.0, |
|
"eval_accuracy_safe": 0.8089367427265123, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9958718312855394, |
|
"eval_iou_safe": 0.7123303946961956, |
|
"eval_iou_unlabeled": 0.0, |
|
"eval_iou_unsafe": 0.990113841776438, |
|
"eval_loss": 0.05164387822151184, |
|
"eval_mean_accuracy": 0.9024042870060258, |
|
"eval_mean_iou": 0.5674814121575446, |
|
"eval_overall_accuracy": 0.9903500685051306, |
|
"eval_runtime": 9.7346, |
|
"eval_samples_per_second": 6.883, |
|
"eval_steps_per_second": 0.514, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 136.1, |
|
"learning_rate": 9.81e-05, |
|
"loss": 0.0125, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 136.2, |
|
"learning_rate": 9.809473684210528e-05, |
|
"loss": 0.0109, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 136.3, |
|
"learning_rate": 9.808947368421054e-05, |
|
"loss": 0.0108, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 136.4, |
|
"learning_rate": 9.808421052631579e-05, |
|
"loss": 0.0183, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 136.5, |
|
"learning_rate": 9.807894736842106e-05, |
|
"loss": 0.0244, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 136.6, |
|
"learning_rate": 9.807368421052631e-05, |
|
"loss": 0.0092, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 136.7, |
|
"learning_rate": 9.806842105263159e-05, |
|
"loss": 0.0144, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 136.8, |
|
"learning_rate": 9.806315789473685e-05, |
|
"loss": 0.0169, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 136.9, |
|
"learning_rate": 9.805789473684211e-05, |
|
"loss": 0.0134, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 137.0, |
|
"learning_rate": 9.805263157894737e-05, |
|
"loss": 0.0136, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 137.1, |
|
"learning_rate": 9.804736842105264e-05, |
|
"loss": 0.0148, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 137.2, |
|
"learning_rate": 9.80421052631579e-05, |
|
"loss": 0.0111, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 137.3, |
|
"learning_rate": 9.803684210526316e-05, |
|
"loss": 0.0107, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 137.4, |
|
"learning_rate": 9.803157894736844e-05, |
|
"loss": 0.0217, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 137.5, |
|
"learning_rate": 9.802631578947369e-05, |
|
"loss": 0.0135, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 137.6, |
|
"learning_rate": 9.802105263157895e-05, |
|
"loss": 0.0146, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 137.7, |
|
"learning_rate": 9.801578947368421e-05, |
|
"loss": 0.0159, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 137.8, |
|
"learning_rate": 9.801052631578947e-05, |
|
"loss": 0.0182, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 137.9, |
|
"learning_rate": 9.800526315789475e-05, |
|
"loss": 0.0105, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 138.0, |
|
"learning_rate": 9.8e-05, |
|
"loss": 0.0102, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 138.0, |
|
"eval_accuracy_safe": 0.7459897995767172, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9972245569129812, |
|
"eval_iou_safe": 0.6836510283355326, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.989573738778865, |
|
"eval_loss": 0.03879334032535553, |
|
"eval_mean_accuracy": 0.8716071782448491, |
|
"eval_mean_iou": 0.8366123835571988, |
|
"eval_overall_accuracy": 0.9898034850163246, |
|
"eval_runtime": 9.7495, |
|
"eval_samples_per_second": 6.872, |
|
"eval_steps_per_second": 0.513, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 138.1, |
|
"learning_rate": 9.799473684210527e-05, |
|
"loss": 0.0133, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 138.2, |
|
"learning_rate": 9.798947368421054e-05, |
|
"loss": 0.014, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 138.3, |
|
"learning_rate": 9.798421052631578e-05, |
|
"loss": 0.0106, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 138.4, |
|
"learning_rate": 9.797894736842106e-05, |
|
"loss": 0.0284, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 138.5, |
|
"learning_rate": 9.797368421052632e-05, |
|
"loss": 0.0121, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 138.6, |
|
"learning_rate": 9.796842105263158e-05, |
|
"loss": 0.009, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 138.7, |
|
"learning_rate": 9.796315789473685e-05, |
|
"loss": 0.0116, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 138.8, |
|
"learning_rate": 9.795789473684211e-05, |
|
"loss": 0.0106, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 138.9, |
|
"learning_rate": 9.795263157894737e-05, |
|
"loss": 0.0173, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 139.0, |
|
"learning_rate": 9.794736842105263e-05, |
|
"loss": 0.0119, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 139.1, |
|
"learning_rate": 9.79421052631579e-05, |
|
"loss": 0.011, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 139.2, |
|
"learning_rate": 9.793684210526316e-05, |
|
"loss": 0.0089, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 139.3, |
|
"learning_rate": 9.793157894736843e-05, |
|
"loss": 0.0124, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 139.4, |
|
"learning_rate": 9.792631578947368e-05, |
|
"loss": 0.0255, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 139.5, |
|
"learning_rate": 9.792105263157895e-05, |
|
"loss": 0.0135, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 139.6, |
|
"learning_rate": 9.791578947368422e-05, |
|
"loss": 0.0158, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 139.7, |
|
"learning_rate": 9.791052631578947e-05, |
|
"loss": 0.0158, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 139.8, |
|
"learning_rate": 9.790526315789475e-05, |
|
"loss": 0.0181, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 139.9, |
|
"learning_rate": 9.790000000000001e-05, |
|
"loss": 0.0279, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 140.0, |
|
"learning_rate": 9.789473684210527e-05, |
|
"loss": 0.0277, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 140.0, |
|
"eval_accuracy_safe": 0.9313302570152004, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9866384829760269, |
|
"eval_iou_safe": 0.6472144419574387, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.984580578335306, |
|
"eval_loss": 0.06494779139757156, |
|
"eval_mean_accuracy": 0.9589843699956137, |
|
"eval_mean_iou": 0.8158975101463724, |
|
"eval_overall_accuracy": 0.9850047666635087, |
|
"eval_runtime": 9.7405, |
|
"eval_samples_per_second": 6.879, |
|
"eval_steps_per_second": 0.513, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 140.1, |
|
"learning_rate": 9.788947368421053e-05, |
|
"loss": 0.0182, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 140.2, |
|
"learning_rate": 9.78842105263158e-05, |
|
"loss": 0.0342, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 140.3, |
|
"learning_rate": 9.787894736842106e-05, |
|
"loss": 0.0454, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 140.4, |
|
"learning_rate": 9.787368421052632e-05, |
|
"loss": 0.0149, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 140.5, |
|
"learning_rate": 9.786842105263158e-05, |
|
"loss": 0.031, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 140.6, |
|
"learning_rate": 9.786315789473684e-05, |
|
"loss": 0.0244, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 140.7, |
|
"learning_rate": 9.785789473684212e-05, |
|
"loss": 0.0192, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 140.8, |
|
"learning_rate": 9.785263157894737e-05, |
|
"loss": 0.0101, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 140.9, |
|
"learning_rate": 9.784736842105263e-05, |
|
"loss": 0.0135, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 141.0, |
|
"learning_rate": 9.784210526315791e-05, |
|
"loss": 0.0253, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 141.1, |
|
"learning_rate": 9.783684210526316e-05, |
|
"loss": 0.012, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 141.2, |
|
"learning_rate": 9.783157894736843e-05, |
|
"loss": 0.017, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 141.3, |
|
"learning_rate": 9.78263157894737e-05, |
|
"loss": 0.035, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 141.4, |
|
"learning_rate": 9.782105263157894e-05, |
|
"loss": 0.0118, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 141.5, |
|
"learning_rate": 9.781578947368422e-05, |
|
"loss": 0.0137, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 141.6, |
|
"learning_rate": 9.781052631578948e-05, |
|
"loss": 0.0156, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 141.7, |
|
"learning_rate": 9.780526315789474e-05, |
|
"loss": 0.0244, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 141.8, |
|
"learning_rate": 9.78e-05, |
|
"loss": 0.0164, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 141.9, |
|
"learning_rate": 9.779473684210527e-05, |
|
"loss": 0.0377, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 142.0, |
|
"learning_rate": 9.778947368421053e-05, |
|
"loss": 0.0169, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 142.0, |
|
"eval_accuracy_safe": 0.8354786604523499, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9940888289633124, |
|
"eval_iou_safe": 0.6996095597338098, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9891356201328647, |
|
"eval_loss": 0.034038759768009186, |
|
"eval_mean_accuracy": 0.9147837447078311, |
|
"eval_mean_iou": 0.8443725899333372, |
|
"eval_overall_accuracy": 0.9894037389043552, |
|
"eval_runtime": 9.7571, |
|
"eval_samples_per_second": 6.867, |
|
"eval_steps_per_second": 0.512, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 142.1, |
|
"learning_rate": 9.778421052631579e-05, |
|
"loss": 0.0106, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 142.2, |
|
"learning_rate": 9.777894736842105e-05, |
|
"loss": 0.0184, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 142.3, |
|
"learning_rate": 9.777368421052632e-05, |
|
"loss": 0.0214, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 142.4, |
|
"learning_rate": 9.776842105263159e-05, |
|
"loss": 0.0173, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 142.5, |
|
"learning_rate": 9.776315789473684e-05, |
|
"loss": 0.0129, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 142.6, |
|
"learning_rate": 9.775789473684212e-05, |
|
"loss": 0.0097, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 142.7, |
|
"learning_rate": 9.775263157894738e-05, |
|
"loss": 0.0141, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 142.8, |
|
"learning_rate": 9.774736842105263e-05, |
|
"loss": 0.0209, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 142.9, |
|
"learning_rate": 9.77421052631579e-05, |
|
"loss": 0.0142, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 143.0, |
|
"learning_rate": 9.773684210526315e-05, |
|
"loss": 0.0132, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 143.1, |
|
"learning_rate": 9.773157894736843e-05, |
|
"loss": 0.016, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 143.2, |
|
"learning_rate": 9.772631578947369e-05, |
|
"loss": 0.0196, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 143.3, |
|
"learning_rate": 9.772105263157894e-05, |
|
"loss": 0.0139, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 143.4, |
|
"learning_rate": 9.771578947368422e-05, |
|
"loss": 0.0148, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 143.5, |
|
"learning_rate": 9.771052631578948e-05, |
|
"loss": 0.0133, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 143.6, |
|
"learning_rate": 9.770526315789474e-05, |
|
"loss": 0.0098, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 143.7, |
|
"learning_rate": 9.77e-05, |
|
"loss": 0.0103, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 143.8, |
|
"learning_rate": 9.769473684210528e-05, |
|
"loss": 0.0112, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 143.9, |
|
"learning_rate": 9.768947368421053e-05, |
|
"loss": 0.0147, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 144.0, |
|
"learning_rate": 9.768421052631579e-05, |
|
"loss": 0.0359, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 144.0, |
|
"eval_accuracy_safe": 0.7996711654928085, |
|
"eval_accuracy_unlabeled": NaN, |
|
"eval_accuracy_unsafe": 0.9976300167217703, |
|
"eval_iou_safe": 0.7419035833461493, |
|
"eval_iou_unlabeled": NaN, |
|
"eval_iou_unsafe": 0.9915838313134517, |
|
"eval_loss": 0.03139420226216316, |
|
"eval_mean_accuracy": 0.8986505911072894, |
|
"eval_mean_iou": 0.8667437073298006, |
|
"eval_overall_accuracy": 0.991782629667823, |
|
"eval_runtime": 9.5746, |
|
"eval_samples_per_second": 6.998, |
|
"eval_steps_per_second": 0.522, |
|
"step": 1440 |
|
} |
|
], |
|
"max_steps": 20000, |
|
"num_train_epochs": 2000, |
|
"total_flos": 8.852046300674261e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|