|
{ |
|
"best_metric": 0.21080316603183746, |
|
"best_model_checkpoint": "./models/bart/bart_balanced_subset_without_title_less_than_1024_double_comment/checkpoint-431935", |
|
"epoch": 5.0, |
|
"global_step": 431935, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9996141394731464e-05, |
|
"loss": 1.4863, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9992282789462922e-05, |
|
"loss": 0.2641, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9988424184194384e-05, |
|
"loss": 0.246, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9984565578925843e-05, |
|
"loss": 0.2419, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9980706973657305e-05, |
|
"loss": 0.2478, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9976848368388764e-05, |
|
"loss": 0.2509, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9972989763120226e-05, |
|
"loss": 0.2526, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9969131157851684e-05, |
|
"loss": 0.2584, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9965272552583146e-05, |
|
"loss": 0.2449, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9961413947314605e-05, |
|
"loss": 0.2346, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9957555342046067e-05, |
|
"loss": 0.239, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9953696736777526e-05, |
|
"loss": 0.2449, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9949838131508988e-05, |
|
"loss": 0.2428, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9945979526240446e-05, |
|
"loss": 0.2435, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9942120920971908e-05, |
|
"loss": 0.2322, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9938262315703367e-05, |
|
"loss": 0.2374, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.993440371043483e-05, |
|
"loss": 0.2335, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9930545105166287e-05, |
|
"loss": 0.2344, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.992668649989775e-05, |
|
"loss": 0.2241, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9922827894629208e-05, |
|
"loss": 0.2374, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.991896928936067e-05, |
|
"loss": 0.2461, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.991511068409213e-05, |
|
"loss": 0.243, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.991125207882359e-05, |
|
"loss": 0.2309, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.990739347355505e-05, |
|
"loss": 0.2403, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.990353486828651e-05, |
|
"loss": 0.2425, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.989967626301797e-05, |
|
"loss": 0.2204, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9895817657749432e-05, |
|
"loss": 0.237, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.989195905248089e-05, |
|
"loss": 0.2314, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9888100447212353e-05, |
|
"loss": 0.2321, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.988424184194381e-05, |
|
"loss": 0.2366, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9880383236675273e-05, |
|
"loss": 0.2368, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9876524631406732e-05, |
|
"loss": 0.2292, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9872666026138194e-05, |
|
"loss": 0.2334, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9868807420869653e-05, |
|
"loss": 0.237, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9864948815601115e-05, |
|
"loss": 0.237, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9861090210332573e-05, |
|
"loss": 0.2338, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9857231605064035e-05, |
|
"loss": 0.2305, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9853372999795494e-05, |
|
"loss": 0.2334, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9849514394526956e-05, |
|
"loss": 0.2396, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9845655789258415e-05, |
|
"loss": 0.2432, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9841797183989877e-05, |
|
"loss": 0.2294, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9837938578721335e-05, |
|
"loss": 0.2305, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9834079973452797e-05, |
|
"loss": 0.2415, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9830221368184256e-05, |
|
"loss": 0.2412, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9826362762915718e-05, |
|
"loss": 0.2329, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9822504157647176e-05, |
|
"loss": 0.2354, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.981864555237864e-05, |
|
"loss": 0.2372, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9814786947110097e-05, |
|
"loss": 0.2401, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.981092834184156e-05, |
|
"loss": 0.2333, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9807069736573018e-05, |
|
"loss": 0.2301, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.980321113130448e-05, |
|
"loss": 0.2423, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.979935252603594e-05, |
|
"loss": 0.2171, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.97954939207674e-05, |
|
"loss": 0.2347, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.979163531549886e-05, |
|
"loss": 0.2294, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.978777671023032e-05, |
|
"loss": 0.2286, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.978391810496178e-05, |
|
"loss": 0.2355, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9780059499693242e-05, |
|
"loss": 0.2211, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.97762008944247e-05, |
|
"loss": 0.2222, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9772342289156162e-05, |
|
"loss": 0.2264, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.976848368388762e-05, |
|
"loss": 0.2388, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9764625078619083e-05, |
|
"loss": 0.219, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9760766473350545e-05, |
|
"loss": 0.2357, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9756907868082004e-05, |
|
"loss": 0.227, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9753049262813466e-05, |
|
"loss": 0.2228, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9749190657544924e-05, |
|
"loss": 0.2368, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9745332052276386e-05, |
|
"loss": 0.2261, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9741473447007845e-05, |
|
"loss": 0.2299, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9737614841739307e-05, |
|
"loss": 0.2293, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9733756236470766e-05, |
|
"loss": 0.2346, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9729897631202228e-05, |
|
"loss": 0.2277, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9726039025933686e-05, |
|
"loss": 0.2304, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9722180420665148e-05, |
|
"loss": 0.2336, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9718321815396607e-05, |
|
"loss": 0.243, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.971446321012807e-05, |
|
"loss": 0.2327, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9710604604859528e-05, |
|
"loss": 0.2234, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.970674599959099e-05, |
|
"loss": 0.2265, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9702887394322448e-05, |
|
"loss": 0.2273, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.969902878905391e-05, |
|
"loss": 0.2335, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.969517018378537e-05, |
|
"loss": 0.2294, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.969131157851683e-05, |
|
"loss": 0.2265, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.968745297324829e-05, |
|
"loss": 0.2246, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.968359436797975e-05, |
|
"loss": 0.2334, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.967973576271121e-05, |
|
"loss": 0.2305, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9675877157442672e-05, |
|
"loss": 0.2199, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9672018552174134e-05, |
|
"loss": 0.2193, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9668159946905593e-05, |
|
"loss": 0.233, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9664301341637055e-05, |
|
"loss": 0.227, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9660442736368513e-05, |
|
"loss": 0.2221, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9656584131099975e-05, |
|
"loss": 0.2312, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9652725525831434e-05, |
|
"loss": 0.2187, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9648866920562896e-05, |
|
"loss": 0.2156, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9645008315294355e-05, |
|
"loss": 0.2227, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9641149710025817e-05, |
|
"loss": 0.2205, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9637291104757275e-05, |
|
"loss": 0.2324, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.9633432499488737e-05, |
|
"loss": 0.2159, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.9629573894220196e-05, |
|
"loss": 0.225, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.9625715288951658e-05, |
|
"loss": 0.2124, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9621856683683117e-05, |
|
"loss": 0.231, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.961799807841458e-05, |
|
"loss": 0.2264, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.9614139473146037e-05, |
|
"loss": 0.2243, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.96102808678775e-05, |
|
"loss": 0.2271, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.9606422262608958e-05, |
|
"loss": 0.2227, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.960256365734042e-05, |
|
"loss": 0.234, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.959870505207188e-05, |
|
"loss": 0.2233, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.959484644680334e-05, |
|
"loss": 0.2253, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9590987841534803e-05, |
|
"loss": 0.2229, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.958712923626626e-05, |
|
"loss": 0.2359, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9583270630997723e-05, |
|
"loss": 0.2232, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9579412025729182e-05, |
|
"loss": 0.2238, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.9575553420460644e-05, |
|
"loss": 0.2239, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.9571694815192103e-05, |
|
"loss": 0.2401, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.9567836209923565e-05, |
|
"loss": 0.2299, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.9563977604655023e-05, |
|
"loss": 0.223, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.9560118999386485e-05, |
|
"loss": 0.2191, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.9556260394117944e-05, |
|
"loss": 0.2291, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.9552401788849406e-05, |
|
"loss": 0.2176, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.9548543183580864e-05, |
|
"loss": 0.2133, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.9544684578312326e-05, |
|
"loss": 0.218, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.9540825973043785e-05, |
|
"loss": 0.2279, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.9536967367775247e-05, |
|
"loss": 0.2174, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.9533108762506706e-05, |
|
"loss": 0.2236, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.9529250157238168e-05, |
|
"loss": 0.2231, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.9525391551969626e-05, |
|
"loss": 0.2193, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.952153294670109e-05, |
|
"loss": 0.213, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.9517674341432547e-05, |
|
"loss": 0.2459, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.951381573616401e-05, |
|
"loss": 0.2233, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.9509957130895468e-05, |
|
"loss": 0.2391, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.950609852562693e-05, |
|
"loss": 0.2248, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.950223992035839e-05, |
|
"loss": 0.225, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.949838131508985e-05, |
|
"loss": 0.2222, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.949452270982131e-05, |
|
"loss": 0.2346, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.949066410455277e-05, |
|
"loss": 0.2231, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.948680549928423e-05, |
|
"loss": 0.2254, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.948294689401569e-05, |
|
"loss": 0.228, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.947908828874715e-05, |
|
"loss": 0.2155, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.9475229683478612e-05, |
|
"loss": 0.228, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.9471371078210074e-05, |
|
"loss": 0.2197, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9467512472941533e-05, |
|
"loss": 0.2074, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9463653867672995e-05, |
|
"loss": 0.2183, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.9459795262404454e-05, |
|
"loss": 0.2262, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.9455936657135916e-05, |
|
"loss": 0.2234, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.9452078051867374e-05, |
|
"loss": 0.2309, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.9448219446598836e-05, |
|
"loss": 0.2221, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.9444360841330295e-05, |
|
"loss": 0.2099, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.9440502236061757e-05, |
|
"loss": 0.224, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.9436643630793215e-05, |
|
"loss": 0.2205, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.9432785025524678e-05, |
|
"loss": 0.2203, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9428926420256136e-05, |
|
"loss": 0.231, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9425067814987598e-05, |
|
"loss": 0.2247, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.9421209209719057e-05, |
|
"loss": 0.2291, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.941735060445052e-05, |
|
"loss": 0.2284, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.9413491999181977e-05, |
|
"loss": 0.2292, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.940963339391344e-05, |
|
"loss": 0.2257, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.9405774788644898e-05, |
|
"loss": 0.2409, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.940191618337636e-05, |
|
"loss": 0.2293, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.939805757810782e-05, |
|
"loss": 0.2214, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.939419897283928e-05, |
|
"loss": 0.2246, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.939034036757074e-05, |
|
"loss": 0.2216, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.93864817623022e-05, |
|
"loss": 0.2226, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.938262315703366e-05, |
|
"loss": 0.2324, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.9378764551765122e-05, |
|
"loss": 0.2224, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.937490594649658e-05, |
|
"loss": 0.2378, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.9371047341228043e-05, |
|
"loss": 0.2169, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.93671887359595e-05, |
|
"loss": 0.2206, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.9363330130690963e-05, |
|
"loss": 0.2191, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.9359471525422422e-05, |
|
"loss": 0.2363, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.9355612920153884e-05, |
|
"loss": 0.2175, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.9351754314885343e-05, |
|
"loss": 0.21, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.9347895709616805e-05, |
|
"loss": 0.2203, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.9344037104348263e-05, |
|
"loss": 0.2166, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.9340178499079725e-05, |
|
"loss": 0.211, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.9336319893811184e-05, |
|
"loss": 0.2254, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_bart_score": -6.78, |
|
"eval_bertscore_f1": 0.75, |
|
"eval_bertscore_precision": 0.79, |
|
"eval_bertscore_recall": 0.72, |
|
"eval_bleu": 0.0, |
|
"eval_loss": 0.21441325545310974, |
|
"eval_meteor": 0.09, |
|
"eval_rogue_score": { |
|
"rouge1": 0.1571033255826546, |
|
"rouge2": 0.024564791577206757, |
|
"rougeL": 0.12796016101205676, |
|
"rougeLsum": 0.12797893703375637 |
|
}, |
|
"eval_runtime": 5950.6237, |
|
"eval_samples_per_second": 3.629, |
|
"eval_simple_accuracy": 0.74, |
|
"eval_steps_per_second": 1.815, |
|
"step": 86387 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.9332461288542646e-05, |
|
"loss": 0.2202, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.9328602683274105e-05, |
|
"loss": 0.214, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.9324744078005567e-05, |
|
"loss": 0.2221, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.9320885472737025e-05, |
|
"loss": 0.2164, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.9317026867468487e-05, |
|
"loss": 0.2125, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.9313168262199946e-05, |
|
"loss": 0.221, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.9309309656931408e-05, |
|
"loss": 0.1987, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.9305451051662866e-05, |
|
"loss": 0.213, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.930159244639433e-05, |
|
"loss": 0.2199, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.9297733841125787e-05, |
|
"loss": 0.2154, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.929387523585725e-05, |
|
"loss": 0.2205, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.9290016630588708e-05, |
|
"loss": 0.2095, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.928615802532017e-05, |
|
"loss": 0.2091, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.928229942005163e-05, |
|
"loss": 0.2237, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.927844081478309e-05, |
|
"loss": 0.2231, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.927458220951455e-05, |
|
"loss": 0.2099, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.927072360424601e-05, |
|
"loss": 0.2142, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.926686499897747e-05, |
|
"loss": 0.2092, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.926300639370893e-05, |
|
"loss": 0.2152, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.925914778844039e-05, |
|
"loss": 0.2156, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.9255289183171852e-05, |
|
"loss": 0.2097, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.925143057790331e-05, |
|
"loss": 0.2114, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.9247571972634773e-05, |
|
"loss": 0.2254, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.924371336736623e-05, |
|
"loss": 0.2287, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.9239854762097694e-05, |
|
"loss": 0.2145, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.9235996156829152e-05, |
|
"loss": 0.2137, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.9232137551560614e-05, |
|
"loss": 0.213, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.9228278946292073e-05, |
|
"loss": 0.2161, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.9224420341023535e-05, |
|
"loss": 0.2103, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.9220561735754994e-05, |
|
"loss": 0.2161, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.9216703130486456e-05, |
|
"loss": 0.2086, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.9212844525217914e-05, |
|
"loss": 0.2084, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.9208985919949376e-05, |
|
"loss": 0.2172, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.9205127314680838e-05, |
|
"loss": 0.2099, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.9201268709412297e-05, |
|
"loss": 0.2137, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.919741010414376e-05, |
|
"loss": 0.2189, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.9193551498875217e-05, |
|
"loss": 0.2327, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.918969289360668e-05, |
|
"loss": 0.2216, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.9185834288338138e-05, |
|
"loss": 0.2269, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.91819756830696e-05, |
|
"loss": 0.2005, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.917811707780106e-05, |
|
"loss": 0.2256, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.917425847253252e-05, |
|
"loss": 0.216, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.917039986726398e-05, |
|
"loss": 0.2208, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.916654126199544e-05, |
|
"loss": 0.2083, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.91626826567269e-05, |
|
"loss": 0.2169, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.9158824051458362e-05, |
|
"loss": 0.2078, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.915496544618982e-05, |
|
"loss": 0.2169, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.9151106840921283e-05, |
|
"loss": 0.2129, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.914724823565274e-05, |
|
"loss": 0.2267, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.9143389630384203e-05, |
|
"loss": 0.2084, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.9139531025115662e-05, |
|
"loss": 0.2116, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.9135672419847124e-05, |
|
"loss": 0.2181, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.9131813814578583e-05, |
|
"loss": 0.2233, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.9127955209310045e-05, |
|
"loss": 0.2162, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.9124096604041503e-05, |
|
"loss": 0.2271, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.9120237998772965e-05, |
|
"loss": 0.2173, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.9116379393504424e-05, |
|
"loss": 0.2141, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.9112520788235886e-05, |
|
"loss": 0.2227, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.9108662182967345e-05, |
|
"loss": 0.2234, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.9104803577698807e-05, |
|
"loss": 0.248, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.9100944972430265e-05, |
|
"loss": 0.2096, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.9097086367161727e-05, |
|
"loss": 0.2293, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.9093227761893186e-05, |
|
"loss": 0.2123, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.9089369156624648e-05, |
|
"loss": 0.2301, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.9085510551356107e-05, |
|
"loss": 0.2186, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.908165194608757e-05, |
|
"loss": 0.2147, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.9077793340819027e-05, |
|
"loss": 0.2113, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.907393473555049e-05, |
|
"loss": 0.2166, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.9070076130281948e-05, |
|
"loss": 0.2179, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.906621752501341e-05, |
|
"loss": 0.2195, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.906235891974487e-05, |
|
"loss": 0.2145, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.905850031447633e-05, |
|
"loss": 0.2066, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.905464170920779e-05, |
|
"loss": 0.2126, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.905078310393925e-05, |
|
"loss": 0.213, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.904692449867071e-05, |
|
"loss": 0.2131, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.9043065893402172e-05, |
|
"loss": 0.2212, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.903920728813363e-05, |
|
"loss": 0.2128, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.9035348682865092e-05, |
|
"loss": 0.21, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.9031490077596554e-05, |
|
"loss": 0.2242, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.9027631472328013e-05, |
|
"loss": 0.2108, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.9023772867059475e-05, |
|
"loss": 0.2146, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.9019914261790934e-05, |
|
"loss": 0.2113, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.9016055656522396e-05, |
|
"loss": 0.2182, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.9012197051253854e-05, |
|
"loss": 0.2126, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.9008338445985316e-05, |
|
"loss": 0.2153, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.9004479840716775e-05, |
|
"loss": 0.2256, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.9000621235448237e-05, |
|
"loss": 0.2197, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.8996762630179696e-05, |
|
"loss": 0.2269, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.8992904024911158e-05, |
|
"loss": 0.2277, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.8989045419642616e-05, |
|
"loss": 0.2209, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.8985186814374078e-05, |
|
"loss": 0.2263, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.8981328209105537e-05, |
|
"loss": 0.2171, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.8977469603837e-05, |
|
"loss": 0.2275, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.8973610998568458e-05, |
|
"loss": 0.2185, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.896975239329992e-05, |
|
"loss": 0.2281, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.8965893788031378e-05, |
|
"loss": 0.2125, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.896203518276284e-05, |
|
"loss": 0.2184, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.89581765774943e-05, |
|
"loss": 0.2189, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.895431797222576e-05, |
|
"loss": 0.2203, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.8950459366957223e-05, |
|
"loss": 0.2199, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.894660076168868e-05, |
|
"loss": 0.2251, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.8942742156420144e-05, |
|
"loss": 0.2167, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.8938883551151602e-05, |
|
"loss": 0.2149, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.8935024945883064e-05, |
|
"loss": 0.2071, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.8931166340614523e-05, |
|
"loss": 0.2185, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.8927307735345985e-05, |
|
"loss": 0.2177, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.8923449130077443e-05, |
|
"loss": 0.2144, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.8919590524808905e-05, |
|
"loss": 0.2183, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.8915731919540364e-05, |
|
"loss": 0.2252, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.8911873314271826e-05, |
|
"loss": 0.2193, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.8908014709003285e-05, |
|
"loss": 0.2004, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.8904156103734747e-05, |
|
"loss": 0.2087, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.8900297498466205e-05, |
|
"loss": 0.212, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.8896438893197667e-05, |
|
"loss": 0.2102, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.8892580287929126e-05, |
|
"loss": 0.2225, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.8888721682660588e-05, |
|
"loss": 0.2155, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.8884863077392047e-05, |
|
"loss": 0.2233, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.888100447212351e-05, |
|
"loss": 0.2187, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.8877145866854967e-05, |
|
"loss": 0.2213, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.887328726158643e-05, |
|
"loss": 0.2218, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.886942865631789e-05, |
|
"loss": 0.2169, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.886557005104935e-05, |
|
"loss": 0.2116, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.8861711445780812e-05, |
|
"loss": 0.2143, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.885785284051227e-05, |
|
"loss": 0.2143, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.8853994235243733e-05, |
|
"loss": 0.2101, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.885013562997519e-05, |
|
"loss": 0.2144, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.8846277024706653e-05, |
|
"loss": 0.2074, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.8842418419438112e-05, |
|
"loss": 0.2179, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.8838559814169574e-05, |
|
"loss": 0.2129, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.8834701208901033e-05, |
|
"loss": 0.223, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.8830842603632495e-05, |
|
"loss": 0.2211, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.8826983998363953e-05, |
|
"loss": 0.2135, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.8823125393095415e-05, |
|
"loss": 0.2194, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.8819266787826874e-05, |
|
"loss": 0.227, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.8815408182558336e-05, |
|
"loss": 0.2168, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.8811549577289795e-05, |
|
"loss": 0.2179, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.8807690972021257e-05, |
|
"loss": 0.2077, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.8803832366752715e-05, |
|
"loss": 0.2145, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.8799973761484177e-05, |
|
"loss": 0.215, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.8796115156215636e-05, |
|
"loss": 0.2131, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.8792256550947098e-05, |
|
"loss": 0.2105, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.8788397945678556e-05, |
|
"loss": 0.2007, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.878453934041002e-05, |
|
"loss": 0.2126, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.8780680735141477e-05, |
|
"loss": 0.2001, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.877682212987294e-05, |
|
"loss": 0.216, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.8772963524604398e-05, |
|
"loss": 0.2141, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.876910491933586e-05, |
|
"loss": 0.2238, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.876524631406732e-05, |
|
"loss": 0.2206, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.876138770879878e-05, |
|
"loss": 0.2142, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.875752910353024e-05, |
|
"loss": 0.2031, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.87536704982617e-05, |
|
"loss": 0.2244, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.874981189299316e-05, |
|
"loss": 0.2138, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.874595328772462e-05, |
|
"loss": 0.2192, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.874209468245608e-05, |
|
"loss": 0.2242, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8738236077187542e-05, |
|
"loss": 0.2166, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8734377471919e-05, |
|
"loss": 0.2148, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8730518866650463e-05, |
|
"loss": 0.2141, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.872666026138192e-05, |
|
"loss": 0.2236, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8722801656113384e-05, |
|
"loss": 0.2218, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8718943050844842e-05, |
|
"loss": 0.2188, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.8715084445576304e-05, |
|
"loss": 0.2179, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.8711225840307763e-05, |
|
"loss": 0.2305, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.8707367235039225e-05, |
|
"loss": 0.2174, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.8703508629770684e-05, |
|
"loss": 0.2067, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.8699650024502146e-05, |
|
"loss": 0.2163, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.8695791419233604e-05, |
|
"loss": 0.2188, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.8691932813965066e-05, |
|
"loss": 0.2166, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.8688074208696525e-05, |
|
"loss": 0.2215, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.8684215603427987e-05, |
|
"loss": 0.2269, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.868035699815945e-05, |
|
"loss": 0.2182, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.8676498392890907e-05, |
|
"loss": 0.2131, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.867263978762237e-05, |
|
"loss": 0.2125, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.8668781182353828e-05, |
|
"loss": 0.2141, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_bart_score": -6.65, |
|
"eval_bertscore_f1": 0.75, |
|
"eval_bertscore_precision": 0.79, |
|
"eval_bertscore_recall": 0.72, |
|
"eval_bleu": 0.0, |
|
"eval_loss": 0.21217894554138184, |
|
"eval_meteor": 0.09, |
|
"eval_rogue_score": { |
|
"rouge1": 0.15388393304626252, |
|
"rouge2": 0.024424609903622555, |
|
"rougeL": 0.1256245521025564, |
|
"rougeLsum": 0.125613877504259 |
|
}, |
|
"eval_runtime": 4279.9159, |
|
"eval_samples_per_second": 5.046, |
|
"eval_simple_accuracy": 0.72, |
|
"eval_steps_per_second": 2.523, |
|
"step": 172774 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.866492257708529e-05, |
|
"loss": 0.2107, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.866106397181675e-05, |
|
"loss": 0.2049, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.865720536654821e-05, |
|
"loss": 0.2227, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.865334676127967e-05, |
|
"loss": 0.2124, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.864948815601113e-05, |
|
"loss": 0.209, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.864562955074259e-05, |
|
"loss": 0.2163, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.8641770945474052e-05, |
|
"loss": 0.2165, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.863791234020551e-05, |
|
"loss": 0.2116, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.8634053734936973e-05, |
|
"loss": 0.2007, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.863019512966843e-05, |
|
"loss": 0.202, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.8626336524399893e-05, |
|
"loss": 0.2164, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.8622477919131352e-05, |
|
"loss": 0.2102, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.8618619313862814e-05, |
|
"loss": 0.208, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.8614760708594273e-05, |
|
"loss": 0.2062, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.8610902103325735e-05, |
|
"loss": 0.2099, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.8607043498057193e-05, |
|
"loss": 0.2065, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.8603184892788655e-05, |
|
"loss": 0.211, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.8599326287520114e-05, |
|
"loss": 0.206, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.8595467682251576e-05, |
|
"loss": 0.2096, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.8591609076983035e-05, |
|
"loss": 0.2172, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.8587750471714497e-05, |
|
"loss": 0.2085, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.8583891866445955e-05, |
|
"loss": 0.2056, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.8580033261177417e-05, |
|
"loss": 0.2204, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.8576174655908876e-05, |
|
"loss": 0.2164, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.8572316050640338e-05, |
|
"loss": 0.2, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.8568457445371797e-05, |
|
"loss": 0.2051, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.856459884010326e-05, |
|
"loss": 0.2133, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.8560740234834717e-05, |
|
"loss": 0.2145, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.855688162956618e-05, |
|
"loss": 0.2151, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.8553023024297638e-05, |
|
"loss": 0.2026, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.85491644190291e-05, |
|
"loss": 0.201, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.854530581376056e-05, |
|
"loss": 0.1968, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.854144720849202e-05, |
|
"loss": 0.2079, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.853758860322348e-05, |
|
"loss": 0.2186, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.853372999795494e-05, |
|
"loss": 0.2127, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.85298713926864e-05, |
|
"loss": 0.2043, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.8526012787417862e-05, |
|
"loss": 0.2161, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.852215418214932e-05, |
|
"loss": 0.2066, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.8518295576880782e-05, |
|
"loss": 0.2058, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.851443697161224e-05, |
|
"loss": 0.215, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.8510578366343703e-05, |
|
"loss": 0.2035, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.850671976107516e-05, |
|
"loss": 0.2185, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.8502861155806624e-05, |
|
"loss": 0.2193, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.8499002550538082e-05, |
|
"loss": 0.2254, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.8495143945269544e-05, |
|
"loss": 0.1887, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.8491285340001003e-05, |
|
"loss": 0.215, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.8487426734732465e-05, |
|
"loss": 0.2042, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.8483568129463924e-05, |
|
"loss": 0.2144, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.8479709524195386e-05, |
|
"loss": 0.2181, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.8475850918926844e-05, |
|
"loss": 0.207, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.8471992313658306e-05, |
|
"loss": 0.2116, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.8468133708389765e-05, |
|
"loss": 0.2043, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.8464275103121227e-05, |
|
"loss": 0.2182, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.8460416497852686e-05, |
|
"loss": 0.2135, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.8456557892584148e-05, |
|
"loss": 0.2106, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.8452699287315606e-05, |
|
"loss": 0.2089, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.8448840682047068e-05, |
|
"loss": 0.1977, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.8444982076778527e-05, |
|
"loss": 0.214, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.844112347150999e-05, |
|
"loss": 0.2093, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.8437264866241447e-05, |
|
"loss": 0.2065, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.843340626097291e-05, |
|
"loss": 0.214, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.8429547655704368e-05, |
|
"loss": 0.1995, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.842568905043583e-05, |
|
"loss": 0.2057, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.842183044516729e-05, |
|
"loss": 0.2043, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.841797183989875e-05, |
|
"loss": 0.212, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.8414113234630213e-05, |
|
"loss": 0.2144, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.841025462936167e-05, |
|
"loss": 0.2184, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.8406396024093133e-05, |
|
"loss": 0.2056, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.8402537418824592e-05, |
|
"loss": 0.2134, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.8398678813556054e-05, |
|
"loss": 0.2065, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.8394820208287513e-05, |
|
"loss": 0.2027, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.8390961603018975e-05, |
|
"loss": 0.2082, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.8387102997750433e-05, |
|
"loss": 0.1966, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8383244392481895e-05, |
|
"loss": 0.1995, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8379385787213354e-05, |
|
"loss": 0.2144, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.8375527181944816e-05, |
|
"loss": 0.2134, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.8371668576676275e-05, |
|
"loss": 0.2119, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.8367809971407737e-05, |
|
"loss": 0.2095, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.8363951366139195e-05, |
|
"loss": 0.2183, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.8360092760870657e-05, |
|
"loss": 0.2161, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.8356234155602116e-05, |
|
"loss": 0.222, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.8352375550333578e-05, |
|
"loss": 0.217, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.8348516945065037e-05, |
|
"loss": 0.2002, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.83446583397965e-05, |
|
"loss": 0.2039, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.8340799734527957e-05, |
|
"loss": 0.1983, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.833694112925942e-05, |
|
"loss": 0.2086, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.8333082523990878e-05, |
|
"loss": 0.2134, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.832922391872234e-05, |
|
"loss": 0.2086, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.83253653134538e-05, |
|
"loss": 0.2135, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.832150670818526e-05, |
|
"loss": 0.2074, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.831764810291672e-05, |
|
"loss": 0.2122, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.831378949764818e-05, |
|
"loss": 0.2052, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.830993089237964e-05, |
|
"loss": 0.2106, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.8306072287111102e-05, |
|
"loss": 0.2095, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.8302213681842564e-05, |
|
"loss": 0.2093, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.8298355076574022e-05, |
|
"loss": 0.2108, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.8294496471305484e-05, |
|
"loss": 0.2116, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.8290637866036943e-05, |
|
"loss": 0.2107, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.8286779260768405e-05, |
|
"loss": 0.205, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.8282920655499864e-05, |
|
"loss": 0.2249, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.8279062050231326e-05, |
|
"loss": 0.2167, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.8275203444962784e-05, |
|
"loss": 0.2096, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.8271344839694246e-05, |
|
"loss": 0.2097, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.8267486234425705e-05, |
|
"loss": 0.2161, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.8263627629157167e-05, |
|
"loss": 0.2248, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.8259769023888626e-05, |
|
"loss": 0.2203, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.8255910418620088e-05, |
|
"loss": 0.2082, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.8252051813351546e-05, |
|
"loss": 0.214, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.824819320808301e-05, |
|
"loss": 0.2061, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.8244334602814467e-05, |
|
"loss": 0.2137, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.824047599754593e-05, |
|
"loss": 0.2112, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.8236617392277388e-05, |
|
"loss": 0.2074, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.823275878700885e-05, |
|
"loss": 0.2163, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.8228900181740308e-05, |
|
"loss": 0.2022, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.822504157647177e-05, |
|
"loss": 0.2119, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.8221182971203232e-05, |
|
"loss": 0.2286, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.821732436593469e-05, |
|
"loss": 0.2184, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.8213465760666153e-05, |
|
"loss": 0.2022, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.820960715539761e-05, |
|
"loss": 0.2126, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.8205748550129074e-05, |
|
"loss": 0.2115, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.8201889944860532e-05, |
|
"loss": 0.2203, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.8198031339591994e-05, |
|
"loss": 0.2111, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.8194172734323453e-05, |
|
"loss": 0.2151, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.8190314129054915e-05, |
|
"loss": 0.217, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.8186455523786374e-05, |
|
"loss": 0.2146, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.8182596918517836e-05, |
|
"loss": 0.2045, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.8178738313249294e-05, |
|
"loss": 0.2103, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 1.8174879707980756e-05, |
|
"loss": 0.2011, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 1.8171021102712215e-05, |
|
"loss": 0.2086, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 1.8167162497443677e-05, |
|
"loss": 0.215, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 1.8163303892175135e-05, |
|
"loss": 0.2273, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 1.8159445286906597e-05, |
|
"loss": 0.2007, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 1.8155586681638056e-05, |
|
"loss": 0.1992, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 1.8151728076369518e-05, |
|
"loss": 0.2022, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 1.8147869471100977e-05, |
|
"loss": 0.2108, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 1.814401086583244e-05, |
|
"loss": 0.2087, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.81401522605639e-05, |
|
"loss": 0.2147, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 1.813629365529536e-05, |
|
"loss": 0.2164, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 1.813243505002682e-05, |
|
"loss": 0.2077, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.812857644475828e-05, |
|
"loss": 0.2195, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.8124717839489742e-05, |
|
"loss": 0.2205, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.81208592342212e-05, |
|
"loss": 0.2114, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.8117000628952663e-05, |
|
"loss": 0.2166, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.811314202368412e-05, |
|
"loss": 0.2018, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.8109283418415583e-05, |
|
"loss": 0.2214, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.8105424813147042e-05, |
|
"loss": 0.2076, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.8101566207878504e-05, |
|
"loss": 0.2237, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.8097707602609963e-05, |
|
"loss": 0.2065, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.8093848997341425e-05, |
|
"loss": 0.2041, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.8089990392072883e-05, |
|
"loss": 0.2188, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.8086131786804345e-05, |
|
"loss": 0.2124, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.8082273181535804e-05, |
|
"loss": 0.2011, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.8078414576267266e-05, |
|
"loss": 0.2155, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8074555970998725e-05, |
|
"loss": 0.213, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8070697365730187e-05, |
|
"loss": 0.2052, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.8066838760461645e-05, |
|
"loss": 0.2012, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.8062980155193107e-05, |
|
"loss": 0.2143, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.8059121549924566e-05, |
|
"loss": 0.216, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.8055262944656028e-05, |
|
"loss": 0.2058, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.8051404339387486e-05, |
|
"loss": 0.2036, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.804754573411895e-05, |
|
"loss": 0.207, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.8043687128850407e-05, |
|
"loss": 0.208, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.803982852358187e-05, |
|
"loss": 0.2076, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.8035969918313328e-05, |
|
"loss": 0.2008, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.803211131304479e-05, |
|
"loss": 0.2127, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.802825270777625e-05, |
|
"loss": 0.2069, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.802439410250771e-05, |
|
"loss": 0.2116, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.802053549723917e-05, |
|
"loss": 0.2151, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.801667689197063e-05, |
|
"loss": 0.2058, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.801281828670209e-05, |
|
"loss": 0.2093, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.8008959681433552e-05, |
|
"loss": 0.2013, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.800510107616501e-05, |
|
"loss": 0.2152, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.8001242470896472e-05, |
|
"loss": 0.2219, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_bart_score": -6.71, |
|
"eval_bertscore_f1": 0.75, |
|
"eval_bertscore_precision": 0.78, |
|
"eval_bertscore_recall": 0.71, |
|
"eval_bleu": 0.0, |
|
"eval_loss": 0.21118199825286865, |
|
"eval_meteor": 0.09, |
|
"eval_rogue_score": { |
|
"rouge1": 0.15362210929602754, |
|
"rouge2": 0.02403959138428216, |
|
"rougeL": 0.12578179536151207, |
|
"rougeLsum": 0.1257791367260927 |
|
}, |
|
"eval_runtime": 4356.2183, |
|
"eval_samples_per_second": 4.958, |
|
"eval_simple_accuracy": 0.75, |
|
"eval_steps_per_second": 2.479, |
|
"step": 259161 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.799738386562793e-05, |
|
"loss": 0.1986, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 1.7993525260359393e-05, |
|
"loss": 0.2009, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.798966665509085e-05, |
|
"loss": 0.1981, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.7985808049822314e-05, |
|
"loss": 0.2096, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 1.7981949444553772e-05, |
|
"loss": 0.1995, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 1.7978090839285234e-05, |
|
"loss": 0.201, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 1.7974232234016693e-05, |
|
"loss": 0.1993, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 1.7970373628748155e-05, |
|
"loss": 0.2068, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 1.7966515023479614e-05, |
|
"loss": 0.2091, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 1.7962656418211076e-05, |
|
"loss": 0.2002, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 1.7958797812942534e-05, |
|
"loss": 0.2058, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.7954939207673996e-05, |
|
"loss": 0.1924, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.7951080602405455e-05, |
|
"loss": 0.1967, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 1.7947221997136917e-05, |
|
"loss": 0.2102, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 1.7943363391868376e-05, |
|
"loss": 0.1996, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 1.7939504786599838e-05, |
|
"loss": 0.195, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 1.7935646181331296e-05, |
|
"loss": 0.1942, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 1.7931787576062758e-05, |
|
"loss": 0.1986, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 1.7927928970794217e-05, |
|
"loss": 0.2123, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 1.792407036552568e-05, |
|
"loss": 0.1924, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 1.7920211760257137e-05, |
|
"loss": 0.2026, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 1.79163531549886e-05, |
|
"loss": 0.2038, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 1.7912494549720058e-05, |
|
"loss": 0.1966, |
|
"step": 270500 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 1.790863594445152e-05, |
|
"loss": 0.1984, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 1.790477733918298e-05, |
|
"loss": 0.1985, |
|
"step": 271500 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 1.790091873391444e-05, |
|
"loss": 0.2068, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 1.78970601286459e-05, |
|
"loss": 0.2091, |
|
"step": 272500 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 1.789320152337736e-05, |
|
"loss": 0.2041, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.7889342918108823e-05, |
|
"loss": 0.2076, |
|
"step": 273500 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.7885484312840282e-05, |
|
"loss": 0.2049, |
|
"step": 274000 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 1.7881625707571744e-05, |
|
"loss": 0.2042, |
|
"step": 274500 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 1.7877767102303203e-05, |
|
"loss": 0.2046, |
|
"step": 275000 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.7873908497034665e-05, |
|
"loss": 0.2111, |
|
"step": 275500 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.7870049891766123e-05, |
|
"loss": 0.2082, |
|
"step": 276000 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 1.7866191286497585e-05, |
|
"loss": 0.2052, |
|
"step": 276500 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 1.7862332681229044e-05, |
|
"loss": 0.1885, |
|
"step": 277000 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 1.7858474075960506e-05, |
|
"loss": 0.2165, |
|
"step": 277500 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 1.7854615470691965e-05, |
|
"loss": 0.2122, |
|
"step": 278000 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 1.7850756865423427e-05, |
|
"loss": 0.2084, |
|
"step": 278500 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.7846898260154885e-05, |
|
"loss": 0.2104, |
|
"step": 279000 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 1.7843039654886347e-05, |
|
"loss": 0.2143, |
|
"step": 279500 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 1.7839181049617806e-05, |
|
"loss": 0.207, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 1.7835322444349268e-05, |
|
"loss": 0.2075, |
|
"step": 280500 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 1.7831463839080727e-05, |
|
"loss": 0.2019, |
|
"step": 281000 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 1.782760523381219e-05, |
|
"loss": 0.2103, |
|
"step": 281500 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 1.7823746628543647e-05, |
|
"loss": 0.2108, |
|
"step": 282000 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.781988802327511e-05, |
|
"loss": 0.2062, |
|
"step": 282500 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 1.7816029418006568e-05, |
|
"loss": 0.1966, |
|
"step": 283000 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 1.781217081273803e-05, |
|
"loss": 0.1954, |
|
"step": 283500 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 1.780831220746949e-05, |
|
"loss": 0.1995, |
|
"step": 284000 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 1.780445360220095e-05, |
|
"loss": 0.2042, |
|
"step": 284500 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 1.780059499693241e-05, |
|
"loss": 0.2065, |
|
"step": 285000 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 1.779673639166387e-05, |
|
"loss": 0.2019, |
|
"step": 285500 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 1.779287778639533e-05, |
|
"loss": 0.2114, |
|
"step": 286000 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 1.7789019181126792e-05, |
|
"loss": 0.2043, |
|
"step": 286500 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 1.778516057585825e-05, |
|
"loss": 0.2149, |
|
"step": 287000 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.7781301970589712e-05, |
|
"loss": 0.198, |
|
"step": 287500 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.777744336532117e-05, |
|
"loss": 0.2076, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 1.7773584760052633e-05, |
|
"loss": 0.2019, |
|
"step": 288500 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 1.7769726154784092e-05, |
|
"loss": 0.2132, |
|
"step": 289000 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 1.7765867549515554e-05, |
|
"loss": 0.2149, |
|
"step": 289500 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 1.7762008944247012e-05, |
|
"loss": 0.2053, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 1.7758150338978474e-05, |
|
"loss": 0.1947, |
|
"step": 290500 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 1.7754291733709933e-05, |
|
"loss": 0.2054, |
|
"step": 291000 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 1.7750433128441395e-05, |
|
"loss": 0.2098, |
|
"step": 291500 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 1.7746574523172854e-05, |
|
"loss": 0.211, |
|
"step": 292000 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 1.7742715917904316e-05, |
|
"loss": 0.2129, |
|
"step": 292500 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 1.7738857312635774e-05, |
|
"loss": 0.2183, |
|
"step": 293000 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 1.7734998707367236e-05, |
|
"loss": 0.2111, |
|
"step": 293500 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 1.7731140102098695e-05, |
|
"loss": 0.2039, |
|
"step": 294000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 1.7727281496830157e-05, |
|
"loss": 0.2056, |
|
"step": 294500 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 1.7723422891561616e-05, |
|
"loss": 0.2209, |
|
"step": 295000 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 1.7719564286293078e-05, |
|
"loss": 0.2174, |
|
"step": 295500 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 1.7715705681024536e-05, |
|
"loss": 0.2048, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 1.7711847075755998e-05, |
|
"loss": 0.203, |
|
"step": 296500 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 1.7707988470487457e-05, |
|
"loss": 0.2019, |
|
"step": 297000 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 1.770412986521892e-05, |
|
"loss": 0.2108, |
|
"step": 297500 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 1.7700271259950378e-05, |
|
"loss": 0.2037, |
|
"step": 298000 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 1.769641265468184e-05, |
|
"loss": 0.2025, |
|
"step": 298500 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 1.7692554049413298e-05, |
|
"loss": 0.208, |
|
"step": 299000 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 1.768869544414476e-05, |
|
"loss": 0.21, |
|
"step": 299500 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 1.768483683887622e-05, |
|
"loss": 0.2082, |
|
"step": 300000 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 1.768097823360768e-05, |
|
"loss": 0.2073, |
|
"step": 300500 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 1.767711962833914e-05, |
|
"loss": 0.2088, |
|
"step": 301000 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 1.76732610230706e-05, |
|
"loss": 0.221, |
|
"step": 301500 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 1.766940241780206e-05, |
|
"loss": 0.2059, |
|
"step": 302000 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 1.7665543812533522e-05, |
|
"loss": 0.2065, |
|
"step": 302500 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 1.7661685207264984e-05, |
|
"loss": 0.203, |
|
"step": 303000 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 1.7657826601996443e-05, |
|
"loss": 0.1965, |
|
"step": 303500 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 1.7653967996727905e-05, |
|
"loss": 0.1983, |
|
"step": 304000 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 1.7650109391459363e-05, |
|
"loss": 0.2099, |
|
"step": 304500 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 1.7646250786190825e-05, |
|
"loss": 0.2169, |
|
"step": 305000 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 1.7642392180922284e-05, |
|
"loss": 0.2161, |
|
"step": 305500 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 1.7638533575653746e-05, |
|
"loss": 0.199, |
|
"step": 306000 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 1.7634674970385205e-05, |
|
"loss": 0.2072, |
|
"step": 306500 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 1.7630816365116667e-05, |
|
"loss": 0.2115, |
|
"step": 307000 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 1.7626957759848125e-05, |
|
"loss": 0.2051, |
|
"step": 307500 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 1.7623099154579587e-05, |
|
"loss": 0.2063, |
|
"step": 308000 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 1.7619240549311046e-05, |
|
"loss": 0.2062, |
|
"step": 308500 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 1.7615381944042508e-05, |
|
"loss": 0.2107, |
|
"step": 309000 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 1.7611523338773967e-05, |
|
"loss": 0.1978, |
|
"step": 309500 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 1.760766473350543e-05, |
|
"loss": 0.2074, |
|
"step": 310000 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 1.7603806128236887e-05, |
|
"loss": 0.2014, |
|
"step": 310500 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 1.759994752296835e-05, |
|
"loss": 0.2128, |
|
"step": 311000 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.7596088917699808e-05, |
|
"loss": 0.211, |
|
"step": 311500 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.759223031243127e-05, |
|
"loss": 0.2056, |
|
"step": 312000 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 1.758837170716273e-05, |
|
"loss": 0.2023, |
|
"step": 312500 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 1.758451310189419e-05, |
|
"loss": 0.212, |
|
"step": 313000 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 1.7580654496625653e-05, |
|
"loss": 0.2084, |
|
"step": 313500 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 1.757679589135711e-05, |
|
"loss": 0.1958, |
|
"step": 314000 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 1.7572937286088573e-05, |
|
"loss": 0.1997, |
|
"step": 314500 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 1.7569078680820032e-05, |
|
"loss": 0.206, |
|
"step": 315000 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 1.7565220075551494e-05, |
|
"loss": 0.2068, |
|
"step": 315500 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.7561361470282953e-05, |
|
"loss": 0.2209, |
|
"step": 316000 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.7557502865014415e-05, |
|
"loss": 0.2086, |
|
"step": 316500 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.7553644259745873e-05, |
|
"loss": 0.1992, |
|
"step": 317000 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.7549785654477335e-05, |
|
"loss": 0.1929, |
|
"step": 317500 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.7545927049208794e-05, |
|
"loss": 0.2042, |
|
"step": 318000 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 1.7542068443940256e-05, |
|
"loss": 0.2031, |
|
"step": 318500 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 1.7538209838671714e-05, |
|
"loss": 0.2076, |
|
"step": 319000 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 1.7534351233403176e-05, |
|
"loss": 0.2038, |
|
"step": 319500 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 1.7530492628134635e-05, |
|
"loss": 0.2113, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 1.7526634022866097e-05, |
|
"loss": 0.207, |
|
"step": 320500 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 1.7522775417597556e-05, |
|
"loss": 0.2133, |
|
"step": 321000 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 1.7518916812329018e-05, |
|
"loss": 0.2027, |
|
"step": 321500 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 1.7515058207060476e-05, |
|
"loss": 0.2002, |
|
"step": 322000 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 1.751119960179194e-05, |
|
"loss": 0.2173, |
|
"step": 322500 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.7507340996523397e-05, |
|
"loss": 0.2068, |
|
"step": 323000 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.750348239125486e-05, |
|
"loss": 0.1951, |
|
"step": 323500 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.749962378598632e-05, |
|
"loss": 0.2117, |
|
"step": 324000 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 1.749576518071778e-05, |
|
"loss": 0.199, |
|
"step": 324500 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 1.7491906575449242e-05, |
|
"loss": 0.2016, |
|
"step": 325000 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.74880479701807e-05, |
|
"loss": 0.203, |
|
"step": 325500 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.7484189364912162e-05, |
|
"loss": 0.204, |
|
"step": 326000 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 1.748033075964362e-05, |
|
"loss": 0.2135, |
|
"step": 326500 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.7476472154375083e-05, |
|
"loss": 0.2115, |
|
"step": 327000 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.747261354910654e-05, |
|
"loss": 0.2088, |
|
"step": 327500 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 1.7468754943838004e-05, |
|
"loss": 0.2045, |
|
"step": 328000 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 1.7464896338569462e-05, |
|
"loss": 0.2076, |
|
"step": 328500 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.7461037733300924e-05, |
|
"loss": 0.2094, |
|
"step": 329000 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.7457179128032383e-05, |
|
"loss": 0.2053, |
|
"step": 329500 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 1.7453320522763845e-05, |
|
"loss": 0.222, |
|
"step": 330000 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 1.7449461917495304e-05, |
|
"loss": 0.2011, |
|
"step": 330500 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 1.7445603312226766e-05, |
|
"loss": 0.1948, |
|
"step": 331000 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 1.7441744706958224e-05, |
|
"loss": 0.2022, |
|
"step": 331500 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 1.7437886101689686e-05, |
|
"loss": 0.2084, |
|
"step": 332000 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 1.7434027496421145e-05, |
|
"loss": 0.2012, |
|
"step": 332500 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 1.7430168891152607e-05, |
|
"loss": 0.2099, |
|
"step": 333000 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 1.7426310285884066e-05, |
|
"loss": 0.2229, |
|
"step": 333500 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.7422451680615528e-05, |
|
"loss": 0.2107, |
|
"step": 334000 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.7418593075346986e-05, |
|
"loss": 0.2112, |
|
"step": 334500 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 1.7414734470078448e-05, |
|
"loss": 0.2041, |
|
"step": 335000 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 1.7410875864809907e-05, |
|
"loss": 0.198, |
|
"step": 335500 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 1.740701725954137e-05, |
|
"loss": 0.2063, |
|
"step": 336000 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 1.7403158654272827e-05, |
|
"loss": 0.2216, |
|
"step": 336500 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 1.739930004900429e-05, |
|
"loss": 0.2078, |
|
"step": 337000 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 1.7395441443735748e-05, |
|
"loss": 0.2013, |
|
"step": 337500 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 1.739158283846721e-05, |
|
"loss": 0.2011, |
|
"step": 338000 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 1.738772423319867e-05, |
|
"loss": 0.2074, |
|
"step": 338500 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 1.738386562793013e-05, |
|
"loss": 0.2103, |
|
"step": 339000 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 1.738000702266159e-05, |
|
"loss": 0.2089, |
|
"step": 339500 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.737614841739305e-05, |
|
"loss": 0.2029, |
|
"step": 340000 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.737228981212451e-05, |
|
"loss": 0.2057, |
|
"step": 340500 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 1.7368431206855972e-05, |
|
"loss": 0.1963, |
|
"step": 341000 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 1.736457260158743e-05, |
|
"loss": 0.2057, |
|
"step": 341500 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 1.7360713996318893e-05, |
|
"loss": 0.1949, |
|
"step": 342000 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 1.7356855391050355e-05, |
|
"loss": 0.1999, |
|
"step": 342500 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 1.7352996785781813e-05, |
|
"loss": 0.2147, |
|
"step": 343000 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 1.7349138180513275e-05, |
|
"loss": 0.2092, |
|
"step": 343500 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 1.7345279575244734e-05, |
|
"loss": 0.2003, |
|
"step": 344000 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 1.7341420969976196e-05, |
|
"loss": 0.2057, |
|
"step": 344500 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 1.7337562364707655e-05, |
|
"loss": 0.1958, |
|
"step": 345000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 1.7333703759439117e-05, |
|
"loss": 0.201, |
|
"step": 345500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_bart_score": -6.65, |
|
"eval_bertscore_f1": 0.75, |
|
"eval_bertscore_precision": 0.79, |
|
"eval_bertscore_recall": 0.72, |
|
"eval_bleu": 0.0, |
|
"eval_loss": 0.2109740972518921, |
|
"eval_meteor": 0.09, |
|
"eval_rogue_score": { |
|
"rouge1": 0.16131107323752153, |
|
"rouge2": 0.025998526352501692, |
|
"rougeL": 0.1308022660230437, |
|
"rougeLsum": 0.13079982676404756 |
|
}, |
|
"eval_runtime": 4284.4815, |
|
"eval_samples_per_second": 5.041, |
|
"eval_simple_accuracy": 0.78, |
|
"eval_steps_per_second": 2.52, |
|
"step": 345548 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 1.7329845154170575e-05, |
|
"loss": 0.192, |
|
"step": 346000 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 1.7325986548902037e-05, |
|
"loss": 0.2071, |
|
"step": 346500 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 1.7322127943633496e-05, |
|
"loss": 0.2114, |
|
"step": 347000 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 1.7318269338364958e-05, |
|
"loss": 0.1951, |
|
"step": 347500 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 1.7314410733096417e-05, |
|
"loss": 0.2019, |
|
"step": 348000 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 1.731055212782788e-05, |
|
"loss": 0.1942, |
|
"step": 348500 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 1.7306693522559337e-05, |
|
"loss": 0.2016, |
|
"step": 349000 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 1.73028349172908e-05, |
|
"loss": 0.2076, |
|
"step": 349500 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 1.7298976312022258e-05, |
|
"loss": 0.206, |
|
"step": 350000 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 1.729511770675372e-05, |
|
"loss": 0.1963, |
|
"step": 350500 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 1.729125910148518e-05, |
|
"loss": 0.1892, |
|
"step": 351000 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 1.728740049621664e-05, |
|
"loss": 0.2025, |
|
"step": 351500 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 1.72835418909481e-05, |
|
"loss": 0.201, |
|
"step": 352000 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 1.727968328567956e-05, |
|
"loss": 0.2041, |
|
"step": 352500 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 1.727582468041102e-05, |
|
"loss": 0.2077, |
|
"step": 353000 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 1.7271966075142482e-05, |
|
"loss": 0.1999, |
|
"step": 353500 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 1.726810746987394e-05, |
|
"loss": 0.1976, |
|
"step": 354000 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 1.7264248864605402e-05, |
|
"loss": 0.1915, |
|
"step": 354500 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 1.726039025933686e-05, |
|
"loss": 0.1992, |
|
"step": 355000 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.7256531654068323e-05, |
|
"loss": 0.1924, |
|
"step": 355500 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.7252673048799782e-05, |
|
"loss": 0.1934, |
|
"step": 356000 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 1.7248814443531244e-05, |
|
"loss": 0.2034, |
|
"step": 356500 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 1.7244955838262702e-05, |
|
"loss": 0.1922, |
|
"step": 357000 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 1.7241097232994164e-05, |
|
"loss": 0.191, |
|
"step": 357500 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 1.7237238627725623e-05, |
|
"loss": 0.2009, |
|
"step": 358000 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 1.7233380022457085e-05, |
|
"loss": 0.1917, |
|
"step": 358500 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 1.7229521417188544e-05, |
|
"loss": 0.1917, |
|
"step": 359000 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 1.7225662811920006e-05, |
|
"loss": 0.1958, |
|
"step": 359500 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 1.7221804206651464e-05, |
|
"loss": 0.2036, |
|
"step": 360000 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 1.7217945601382926e-05, |
|
"loss": 0.2019, |
|
"step": 360500 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 1.7214086996114385e-05, |
|
"loss": 0.2069, |
|
"step": 361000 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 1.7210228390845847e-05, |
|
"loss": 0.2, |
|
"step": 361500 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 1.7206369785577306e-05, |
|
"loss": 0.192, |
|
"step": 362000 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 1.7202511180308768e-05, |
|
"loss": 0.2127, |
|
"step": 362500 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 1.7198652575040226e-05, |
|
"loss": 0.2039, |
|
"step": 363000 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 1.7194793969771688e-05, |
|
"loss": 0.2094, |
|
"step": 363500 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 1.7190935364503147e-05, |
|
"loss": 0.2071, |
|
"step": 364000 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 1.718707675923461e-05, |
|
"loss": 0.189, |
|
"step": 364500 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 1.7183218153966068e-05, |
|
"loss": 0.2053, |
|
"step": 365000 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 1.717935954869753e-05, |
|
"loss": 0.2061, |
|
"step": 365500 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 1.7175500943428988e-05, |
|
"loss": 0.2076, |
|
"step": 366000 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 1.717164233816045e-05, |
|
"loss": 0.2189, |
|
"step": 366500 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 1.716778373289191e-05, |
|
"loss": 0.2027, |
|
"step": 367000 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 1.716392512762337e-05, |
|
"loss": 0.2017, |
|
"step": 367500 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 1.716006652235483e-05, |
|
"loss": 0.1943, |
|
"step": 368000 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 1.715620791708629e-05, |
|
"loss": 0.1958, |
|
"step": 368500 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 1.715234931181775e-05, |
|
"loss": 0.2038, |
|
"step": 369000 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 1.7148490706549212e-05, |
|
"loss": 0.2037, |
|
"step": 369500 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 1.714463210128067e-05, |
|
"loss": 0.1953, |
|
"step": 370000 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 1.7140773496012133e-05, |
|
"loss": 0.1976, |
|
"step": 370500 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 1.713691489074359e-05, |
|
"loss": 0.1929, |
|
"step": 371000 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 1.7133056285475053e-05, |
|
"loss": 0.205, |
|
"step": 371500 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 1.7129197680206512e-05, |
|
"loss": 0.2011, |
|
"step": 372000 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 1.7125339074937974e-05, |
|
"loss": 0.2038, |
|
"step": 372500 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 1.7121480469669433e-05, |
|
"loss": 0.2029, |
|
"step": 373000 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 1.7117621864400895e-05, |
|
"loss": 0.2002, |
|
"step": 373500 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 1.7113763259132353e-05, |
|
"loss": 0.1967, |
|
"step": 374000 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 1.7109904653863815e-05, |
|
"loss": 0.2038, |
|
"step": 374500 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 1.7106046048595274e-05, |
|
"loss": 0.206, |
|
"step": 375000 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 1.7102187443326736e-05, |
|
"loss": 0.207, |
|
"step": 375500 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 1.7098328838058198e-05, |
|
"loss": 0.2014, |
|
"step": 376000 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 1.7094470232789657e-05, |
|
"loss": 0.1977, |
|
"step": 376500 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 1.709061162752112e-05, |
|
"loss": 0.1958, |
|
"step": 377000 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 1.7086753022252577e-05, |
|
"loss": 0.1975, |
|
"step": 377500 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 1.708289441698404e-05, |
|
"loss": 0.2001, |
|
"step": 378000 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 1.7079035811715498e-05, |
|
"loss": 0.201, |
|
"step": 378500 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 1.707517720644696e-05, |
|
"loss": 0.2023, |
|
"step": 379000 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 1.707131860117842e-05, |
|
"loss": 0.2018, |
|
"step": 379500 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 1.706745999590988e-05, |
|
"loss": 0.2061, |
|
"step": 380000 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 1.706360139064134e-05, |
|
"loss": 0.2104, |
|
"step": 380500 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 1.70597427853728e-05, |
|
"loss": 0.2083, |
|
"step": 381000 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 1.705588418010426e-05, |
|
"loss": 0.1992, |
|
"step": 381500 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 1.7052025574835722e-05, |
|
"loss": 0.204, |
|
"step": 382000 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 1.704816696956718e-05, |
|
"loss": 0.2064, |
|
"step": 382500 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 1.7044308364298643e-05, |
|
"loss": 0.1923, |
|
"step": 383000 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 1.70404497590301e-05, |
|
"loss": 0.1915, |
|
"step": 383500 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 1.7036591153761563e-05, |
|
"loss": 0.2041, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 1.7032732548493022e-05, |
|
"loss": 0.2057, |
|
"step": 384500 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 1.7028873943224484e-05, |
|
"loss": 0.1899, |
|
"step": 385000 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 1.7025015337955942e-05, |
|
"loss": 0.1987, |
|
"step": 385500 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 1.7021156732687404e-05, |
|
"loss": 0.211, |
|
"step": 386000 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 1.7017298127418863e-05, |
|
"loss": 0.2023, |
|
"step": 386500 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 1.7013439522150325e-05, |
|
"loss": 0.2048, |
|
"step": 387000 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 1.7009580916881784e-05, |
|
"loss": 0.2035, |
|
"step": 387500 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 1.7005722311613246e-05, |
|
"loss": 0.2011, |
|
"step": 388000 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 1.7001863706344704e-05, |
|
"loss": 0.207, |
|
"step": 388500 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 1.6998005101076166e-05, |
|
"loss": 0.2022, |
|
"step": 389000 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 1.6994146495807625e-05, |
|
"loss": 0.1954, |
|
"step": 389500 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 1.6990287890539087e-05, |
|
"loss": 0.2071, |
|
"step": 390000 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 1.6986429285270546e-05, |
|
"loss": 0.1948, |
|
"step": 390500 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 1.6982570680002008e-05, |
|
"loss": 0.197, |
|
"step": 391000 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 1.6978712074733466e-05, |
|
"loss": 0.1984, |
|
"step": 391500 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 1.697485346946493e-05, |
|
"loss": 0.2043, |
|
"step": 392000 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 1.6970994864196387e-05, |
|
"loss": 0.1983, |
|
"step": 392500 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 1.696713625892785e-05, |
|
"loss": 0.1983, |
|
"step": 393000 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 1.6963277653659308e-05, |
|
"loss": 0.2012, |
|
"step": 393500 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 1.695941904839077e-05, |
|
"loss": 0.2066, |
|
"step": 394000 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 1.6955560443122228e-05, |
|
"loss": 0.2046, |
|
"step": 394500 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 1.695170183785369e-05, |
|
"loss": 0.1981, |
|
"step": 395000 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 1.694784323258515e-05, |
|
"loss": 0.2034, |
|
"step": 395500 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 1.694398462731661e-05, |
|
"loss": 0.2048, |
|
"step": 396000 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 1.6940126022048073e-05, |
|
"loss": 0.2106, |
|
"step": 396500 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 1.693626741677953e-05, |
|
"loss": 0.1976, |
|
"step": 397000 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 1.6932408811510994e-05, |
|
"loss": 0.2032, |
|
"step": 397500 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 1.6928550206242452e-05, |
|
"loss": 0.2059, |
|
"step": 398000 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 1.6924691600973914e-05, |
|
"loss": 0.2039, |
|
"step": 398500 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 1.6920832995705373e-05, |
|
"loss": 0.1946, |
|
"step": 399000 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 1.6916974390436835e-05, |
|
"loss": 0.2094, |
|
"step": 399500 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 1.6913115785168293e-05, |
|
"loss": 0.2111, |
|
"step": 400000 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 1.6909257179899755e-05, |
|
"loss": 0.2026, |
|
"step": 400500 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 1.6905398574631214e-05, |
|
"loss": 0.2056, |
|
"step": 401000 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 1.6901539969362676e-05, |
|
"loss": 0.2042, |
|
"step": 401500 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 1.6897681364094135e-05, |
|
"loss": 0.1958, |
|
"step": 402000 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 1.6893822758825597e-05, |
|
"loss": 0.1907, |
|
"step": 402500 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 1.6889964153557055e-05, |
|
"loss": 0.2064, |
|
"step": 403000 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 1.6886105548288517e-05, |
|
"loss": 0.2025, |
|
"step": 403500 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 1.6882246943019976e-05, |
|
"loss": 0.2003, |
|
"step": 404000 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 1.6878388337751438e-05, |
|
"loss": 0.1886, |
|
"step": 404500 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 1.6874529732482897e-05, |
|
"loss": 0.1954, |
|
"step": 405000 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 1.687067112721436e-05, |
|
"loss": 0.2081, |
|
"step": 405500 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 1.6866812521945817e-05, |
|
"loss": 0.2059, |
|
"step": 406000 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.686295391667728e-05, |
|
"loss": 0.1964, |
|
"step": 406500 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.6859095311408738e-05, |
|
"loss": 0.2028, |
|
"step": 407000 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.68552367061402e-05, |
|
"loss": 0.2032, |
|
"step": 407500 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.6851378100871662e-05, |
|
"loss": 0.2033, |
|
"step": 408000 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.684751949560312e-05, |
|
"loss": 0.1917, |
|
"step": 408500 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.6843660890334583e-05, |
|
"loss": 0.2036, |
|
"step": 409000 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 1.683980228506604e-05, |
|
"loss": 0.2028, |
|
"step": 409500 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.6835943679797503e-05, |
|
"loss": 0.2007, |
|
"step": 410000 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.6832085074528962e-05, |
|
"loss": 0.2072, |
|
"step": 410500 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.6828226469260424e-05, |
|
"loss": 0.2059, |
|
"step": 411000 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.6824367863991883e-05, |
|
"loss": 0.1921, |
|
"step": 411500 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.6820509258723345e-05, |
|
"loss": 0.2001, |
|
"step": 412000 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.6816650653454803e-05, |
|
"loss": 0.2051, |
|
"step": 412500 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.6812792048186265e-05, |
|
"loss": 0.2081, |
|
"step": 413000 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 1.6808933442917724e-05, |
|
"loss": 0.2087, |
|
"step": 413500 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 1.6805074837649186e-05, |
|
"loss": 0.2157, |
|
"step": 414000 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 1.6801216232380645e-05, |
|
"loss": 0.2083, |
|
"step": 414500 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 1.6797357627112107e-05, |
|
"loss": 0.2008, |
|
"step": 415000 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 1.6793499021843565e-05, |
|
"loss": 0.1974, |
|
"step": 415500 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 1.6789640416575027e-05, |
|
"loss": 0.2012, |
|
"step": 416000 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 1.6785781811306486e-05, |
|
"loss": 0.1853, |
|
"step": 416500 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 1.6781923206037948e-05, |
|
"loss": 0.2057, |
|
"step": 417000 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 1.6778064600769406e-05, |
|
"loss": 0.2052, |
|
"step": 417500 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 1.677420599550087e-05, |
|
"loss": 0.2027, |
|
"step": 418000 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 1.677034739023233e-05, |
|
"loss": 0.2042, |
|
"step": 418500 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 1.676648878496379e-05, |
|
"loss": 0.1939, |
|
"step": 419000 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 1.676263017969525e-05, |
|
"loss": 0.2066, |
|
"step": 419500 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 1.675877157442671e-05, |
|
"loss": 0.2091, |
|
"step": 420000 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.6754912969158172e-05, |
|
"loss": 0.2111, |
|
"step": 420500 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.675105436388963e-05, |
|
"loss": 0.2057, |
|
"step": 421000 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 1.6747195758621092e-05, |
|
"loss": 0.1918, |
|
"step": 421500 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 1.674333715335255e-05, |
|
"loss": 0.2003, |
|
"step": 422000 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 1.6739478548084013e-05, |
|
"loss": 0.2063, |
|
"step": 422500 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 1.673561994281547e-05, |
|
"loss": 0.2117, |
|
"step": 423000 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 1.6731761337546934e-05, |
|
"loss": 0.193, |
|
"step": 423500 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.6727902732278392e-05, |
|
"loss": 0.2022, |
|
"step": 424000 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.6724044127009854e-05, |
|
"loss": 0.213, |
|
"step": 424500 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 1.6720185521741313e-05, |
|
"loss": 0.2084, |
|
"step": 425000 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 1.6716326916472775e-05, |
|
"loss": 0.2009, |
|
"step": 425500 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 1.6712468311204234e-05, |
|
"loss": 0.198, |
|
"step": 426000 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 1.6708609705935696e-05, |
|
"loss": 0.2142, |
|
"step": 426500 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 1.6704751100667154e-05, |
|
"loss": 0.1949, |
|
"step": 427000 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 1.6700892495398616e-05, |
|
"loss": 0.2135, |
|
"step": 427500 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 1.6697033890130075e-05, |
|
"loss": 0.2038, |
|
"step": 428000 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 1.6693175284861537e-05, |
|
"loss": 0.1992, |
|
"step": 428500 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 1.6689316679592996e-05, |
|
"loss": 0.2016, |
|
"step": 429000 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 1.6685458074324458e-05, |
|
"loss": 0.2019, |
|
"step": 429500 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 1.6681599469055916e-05, |
|
"loss": 0.1995, |
|
"step": 430000 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 1.6677740863787378e-05, |
|
"loss": 0.1968, |
|
"step": 430500 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 1.6673882258518837e-05, |
|
"loss": 0.1999, |
|
"step": 431000 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 1.66700236532503e-05, |
|
"loss": 0.2088, |
|
"step": 431500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_bart_score": -6.65, |
|
"eval_bertscore_f1": 0.75, |
|
"eval_bertscore_precision": 0.79, |
|
"eval_bertscore_recall": 0.72, |
|
"eval_bleu": 0.0, |
|
"eval_loss": 0.21080316603183746, |
|
"eval_meteor": 0.09, |
|
"eval_rogue_score": { |
|
"rouge1": 0.16481557726494733, |
|
"rouge2": 0.02718331876189925, |
|
"rougeL": 0.13357267291161234, |
|
"rougeLsum": 0.1335407688877468 |
|
}, |
|
"eval_runtime": 4313.7823, |
|
"eval_samples_per_second": 5.007, |
|
"eval_simple_accuracy": 0.81, |
|
"eval_steps_per_second": 2.503, |
|
"step": 431935 |
|
} |
|
], |
|
"max_steps": 2591610, |
|
"num_train_epochs": 30, |
|
"total_flos": 5.267330341797888e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|