|
{ |
|
"best_metric": 87.64485843737634, |
|
"best_model_checkpoint": "/home/ccasimiro/ccasimiro/berta/src/qa/roberta-base-ca-v2-qa/catalanqa/checkpoint-1134", |
|
"epoch": 10.0, |
|
"global_step": 5670, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.955908289241622e-05, |
|
"loss": 5.04, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.9118165784832456e-05, |
|
"loss": 3.614, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.8677248677248676e-05, |
|
"loss": 3.1231, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.82363315696649e-05, |
|
"loss": 2.7369, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.779541446208113e-05, |
|
"loss": 2.3199, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.7354497354497356e-05, |
|
"loss": 2.0626, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.691358024691358e-05, |
|
"loss": 1.8434, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.647266313932981e-05, |
|
"loss": 1.5806, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.603174603174603e-05, |
|
"loss": 1.4498, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.559082892416226e-05, |
|
"loss": 1.2956, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.5149911816578484e-05, |
|
"loss": 1.1497, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_exact_match": 69.11247803163445, |
|
"eval_f1": 85.23057847114657, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.470899470899471e-05, |
|
"loss": 0.9974, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.426807760141094e-05, |
|
"loss": 0.9077, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.3827160493827164e-05, |
|
"loss": 0.8397, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.3386243386243384e-05, |
|
"loss": 0.7758, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.294532627865962e-05, |
|
"loss": 0.7537, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.250440917107584e-05, |
|
"loss": 0.7319, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.2063492063492065e-05, |
|
"loss": 0.7263, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.162257495590829e-05, |
|
"loss": 0.7032, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.118165784832452e-05, |
|
"loss": 0.6714, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.074074074074074e-05, |
|
"loss": 0.6854, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.029982363315697e-05, |
|
"loss": 0.6446, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_exact_match": 73.85764499121265, |
|
"eval_f1": 87.64485843737634, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.985890652557319e-05, |
|
"loss": 0.588, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.941798941798942e-05, |
|
"loss": 0.4666, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.8977072310405645e-05, |
|
"loss": 0.4807, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.853615520282187e-05, |
|
"loss": 0.4549, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 3.809523809523809e-05, |
|
"loss": 0.4482, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.7654320987654326e-05, |
|
"loss": 0.4739, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 3.7213403880070546e-05, |
|
"loss": 0.4336, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 3.677248677248677e-05, |
|
"loss": 0.3979, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 3.6331569664903e-05, |
|
"loss": 0.5031, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.5890652557319226e-05, |
|
"loss": 0.4193, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.5449735449735446e-05, |
|
"loss": 0.4714, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.500881834215168e-05, |
|
"loss": 0.416, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_exact_match": 73.94551845342707, |
|
"eval_f1": 87.39678260774708, |
|
"step": 1701 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 3.45679012345679e-05, |
|
"loss": 0.2783, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 3.412698412698413e-05, |
|
"loss": 0.271, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 3.3686067019400353e-05, |
|
"loss": 0.2807, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 3.324514991181658e-05, |
|
"loss": 0.3413, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 3.280423280423281e-05, |
|
"loss": 0.3236, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 3.2363315696649034e-05, |
|
"loss": 0.3034, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.1922398589065254e-05, |
|
"loss": 0.3067, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.148148148148148e-05, |
|
"loss": 0.2978, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.104056437389771e-05, |
|
"loss": 0.3084, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 3.0599647266313934e-05, |
|
"loss": 0.3331, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 3.0158730158730158e-05, |
|
"loss": 0.3132, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_exact_match": 73.59402460456943, |
|
"eval_f1": 87.53281609001948, |
|
"step": 2268 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 2.9717813051146388e-05, |
|
"loss": 0.2315, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 2.927689594356261e-05, |
|
"loss": 0.2073, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 2.8835978835978838e-05, |
|
"loss": 0.1865, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 2.839506172839506e-05, |
|
"loss": 0.1868, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 2.795414462081129e-05, |
|
"loss": 0.2252, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 2.7513227513227512e-05, |
|
"loss": 0.2252, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 2.7072310405643742e-05, |
|
"loss": 0.198, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.6631393298059965e-05, |
|
"loss": 0.2137, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 2.6190476190476192e-05, |
|
"loss": 0.2006, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 2.5749559082892416e-05, |
|
"loss": 0.2478, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 2.5308641975308646e-05, |
|
"loss": 0.2094, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_exact_match": 72.84710017574693, |
|
"eval_f1": 87.02126420060829, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 2.4867724867724866e-05, |
|
"loss": 0.1825, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 2.4426807760141093e-05, |
|
"loss": 0.1354, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 2.398589065255732e-05, |
|
"loss": 0.1383, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 2.3544973544973546e-05, |
|
"loss": 0.1417, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 2.310405643738977e-05, |
|
"loss": 0.174, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 2.2663139329805996e-05, |
|
"loss": 0.1297, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.1254, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 2.1781305114638447e-05, |
|
"loss": 0.1467, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 2.1340388007054674e-05, |
|
"loss": 0.1429, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 2.08994708994709e-05, |
|
"loss": 0.1401, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 2.0458553791887124e-05, |
|
"loss": 0.1309, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 2.001763668430335e-05, |
|
"loss": 0.1286, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_exact_match": 73.76977152899825, |
|
"eval_f1": 87.38988637413004, |
|
"step": 3402 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 1.9576719576719577e-05, |
|
"loss": 0.102, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 1.91358024691358e-05, |
|
"loss": 0.1046, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 1.8694885361552028e-05, |
|
"loss": 0.0967, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 1.8253968253968254e-05, |
|
"loss": 0.1127, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 1.781305114638448e-05, |
|
"loss": 0.093, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 1.7372134038800705e-05, |
|
"loss": 0.1085, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 1.693121693121693e-05, |
|
"loss": 0.0913, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 1.6490299823633158e-05, |
|
"loss": 0.1008, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 1.604938271604938e-05, |
|
"loss": 0.1191, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 1.560846560846561e-05, |
|
"loss": 0.095, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.5167548500881834e-05, |
|
"loss": 0.1145, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_exact_match": 73.94551845342707, |
|
"eval_f1": 87.51076671024968, |
|
"step": 3969 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.472663139329806e-05, |
|
"loss": 0.0907, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 1.4285714285714285e-05, |
|
"loss": 0.0702, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 1.384479717813051e-05, |
|
"loss": 0.0683, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 1.3403880070546737e-05, |
|
"loss": 0.0877, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 1.2962962962962962e-05, |
|
"loss": 0.085, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 1.252204585537919e-05, |
|
"loss": 0.0673, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 1.2081128747795414e-05, |
|
"loss": 0.0664, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 1.164021164021164e-05, |
|
"loss": 0.0984, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 1.1199294532627866e-05, |
|
"loss": 0.0635, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 1.0758377425044091e-05, |
|
"loss": 0.0866, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 1.0317460317460318e-05, |
|
"loss": 0.0744, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_exact_match": 73.85764499121265, |
|
"eval_f1": 87.23571814342841, |
|
"step": 4536 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 9.876543209876543e-06, |
|
"loss": 0.0788, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 9.435626102292768e-06, |
|
"loss": 0.0556, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 8.994708994708995e-06, |
|
"loss": 0.0415, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 8.55379188712522e-06, |
|
"loss": 0.052, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 8.112874779541445e-06, |
|
"loss": 0.0636, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 7.671957671957672e-06, |
|
"loss": 0.0481, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 7.231040564373897e-06, |
|
"loss": 0.0545, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 6.790123456790123e-06, |
|
"loss": 0.0597, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 6.349206349206349e-06, |
|
"loss": 0.0437, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 5.908289241622575e-06, |
|
"loss": 0.0722, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 5.4673721340388e-06, |
|
"loss": 0.062, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 5.026455026455026e-06, |
|
"loss": 0.0719, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_exact_match": 74.07732864674868, |
|
"eval_f1": 87.42888206161075, |
|
"step": 5103 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 4.585537918871252e-06, |
|
"loss": 0.0346, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 4.144620811287478e-06, |
|
"loss": 0.0373, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 0.0333, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"learning_rate": 3.2627865961199293e-06, |
|
"loss": 0.0384, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 2.8218694885361552e-06, |
|
"loss": 0.0413, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"learning_rate": 2.3809523809523808e-06, |
|
"loss": 0.0511, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"learning_rate": 1.9400352733686067e-06, |
|
"loss": 0.0453, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 1.4991181657848325e-06, |
|
"loss": 0.0422, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 1.0582010582010582e-06, |
|
"loss": 0.0572, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 6.17283950617284e-07, |
|
"loss": 0.0494, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 1.763668430335097e-07, |
|
"loss": 0.0444, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_exact_match": 74.34094903339192, |
|
"eval_f1": 87.50452307749782, |
|
"step": 5670 |
|
} |
|
], |
|
"max_steps": 5670, |
|
"num_train_epochs": 10, |
|
"total_flos": 6912296207806464.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|