|
{ |
|
"best_metric": 89136693248.0, |
|
"best_model_checkpoint": "W:/res/Transformers/AST-VoxCelebSpoof-Synthetic-Voice-Detection/checkpoint-88581", |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 88581, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.97177724342692e-05, |
|
"loss": 981229121797357.6, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.94355448685384e-05, |
|
"loss": 293114647320461.3, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.91533173028076e-05, |
|
"loss": 93657499965587.45, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8871089737076805e-05, |
|
"loss": 0.0, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8588862171346e-05, |
|
"loss": 5643194574307.328, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8306634605615204e-05, |
|
"loss": 73667803047002.11, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.80244070398844e-05, |
|
"loss": 64841546469474.305, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.77421794741536e-05, |
|
"loss": 9083863347232.768, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.74599519084228e-05, |
|
"loss": 13152772555800.576, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.717772434269201e-05, |
|
"loss": 20340400325656.574, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.68954967769612e-05, |
|
"loss": 26080007788953.6, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6613269211230405e-05, |
|
"loss": 7317327729131.52, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6331041645499604e-05, |
|
"loss": 12972364602015.744, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.60488140797688e-05, |
|
"loss": 11636490186522.623, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5766586514038e-05, |
|
"loss": 27702524026814.465, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.54843589483072e-05, |
|
"loss": 25504280307826.688, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.52021313825764e-05, |
|
"loss": 11533152199639.04, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.49199038168456e-05, |
|
"loss": 6039433761521.664, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4637676251114805e-05, |
|
"loss": 0.0, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4355448685384e-05, |
|
"loss": 13410034654380.031, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.40732211196532e-05, |
|
"loss": 8384607774834.688, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.37909935539224e-05, |
|
"loss": 9342839137763.328, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.35087659881916e-05, |
|
"loss": 13016159309791.232, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.32265384224608e-05, |
|
"loss": 5719609021825.024, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.294431085673e-05, |
|
"loss": 9965717843607.553, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.26620832909992e-05, |
|
"loss": 1556572920610.816, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2379855725268404e-05, |
|
"loss": 8649103672082.432, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.20976281595376e-05, |
|
"loss": 11390139452358.656, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.18154005938068e-05, |
|
"loss": 4287775373262.848, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1533173028076e-05, |
|
"loss": 11550081886978.049, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.12509454623452e-05, |
|
"loss": 4851886578466.816, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.09687178966144e-05, |
|
"loss": 3983609279021.056, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.06864903308836e-05, |
|
"loss": 1347694299709.44, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.04042627651528e-05, |
|
"loss": 5110744257396.736, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0122035199422e-05, |
|
"loss": 5899718475382.784, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.98398076336912e-05, |
|
"loss": 0.0, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9557580067960395e-05, |
|
"loss": 5675016523874.304, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.92753525022296e-05, |
|
"loss": 4547420104949.76, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.89931249364988e-05, |
|
"loss": 4149328444653.568, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8710897370768e-05, |
|
"loss": 3490896267968.512, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.84286698050372e-05, |
|
"loss": 4580182115483.648, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8146442239306404e-05, |
|
"loss": 4821170046107.648, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7864214673575596e-05, |
|
"loss": 1243646301044.736, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.75819871078448e-05, |
|
"loss": 0.0, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.7299759542114e-05, |
|
"loss": 6636355832512.512, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.70175319763832e-05, |
|
"loss": 2408073540927.488, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.67353044106524e-05, |
|
"loss": 2302174948229.12, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.64530768449216e-05, |
|
"loss": 1779972423286.784, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.61708492791908e-05, |
|
"loss": 4300719062515.712, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5888621713459997e-05, |
|
"loss": 1038094467858.432, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.56063941477292e-05, |
|
"loss": 0.0, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.5324166581998395e-05, |
|
"loss": 927895002284.032, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.50419390162676e-05, |
|
"loss": 0.0, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.47597114505368e-05, |
|
"loss": 5167577412141.056, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4477483884806e-05, |
|
"loss": 1088813065568.256, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.41952563190752e-05, |
|
"loss": 2567835050049.536, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3913028753344404e-05, |
|
"loss": 2749761006338.048, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3630801187613596e-05, |
|
"loss": 930680087248.896, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.33485736218828e-05, |
|
"loss": 2218896740319.232, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9997713908327724, |
|
"eval_f1": 0.9998122809944936, |
|
"eval_loss": 611463921664.0, |
|
"eval_precision": 0.9999374191664233, |
|
"eval_recall": 0.9996871741397289, |
|
"eval_runtime": 5137.6511, |
|
"eval_samples_per_second": 15.325, |
|
"eval_steps_per_second": 1.916, |
|
"step": 29527 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3066346056152e-05, |
|
"loss": 0.0, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.27841184904212e-05, |
|
"loss": 3861603485220.864, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.25018909246904e-05, |
|
"loss": 3710917241995.264, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.22196633589596e-05, |
|
"loss": 455580871294.976, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.19374357932288e-05, |
|
"loss": 4421536224116.736, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.1655208227497996e-05, |
|
"loss": 0.0, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.1372980661767195e-05, |
|
"loss": 751868921774.08, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1090753096036394e-05, |
|
"loss": 5901689865371.648, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.08085255303056e-05, |
|
"loss": 1815885194985.472, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.05262979645748e-05, |
|
"loss": 1309930531323.904, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0244070398843998e-05, |
|
"loss": 433426691784.704, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.9961842833113197e-05, |
|
"loss": 1553386189094.912, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.96796152673824e-05, |
|
"loss": 2184752152969.216, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9397387701651595e-05, |
|
"loss": 3375935831146.496, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9115160135920798e-05, |
|
"loss": 1693437019553.792, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8832932570189997e-05, |
|
"loss": 0.0, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.85507050044592e-05, |
|
"loss": 0.0, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8268477438728395e-05, |
|
"loss": 0.0, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.7986249872997594e-05, |
|
"loss": 1764714954620.928, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7704022307266797e-05, |
|
"loss": 0.0, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7421794741535996e-05, |
|
"loss": 0.0, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7139567175805198e-05, |
|
"loss": 0.0, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6857339610074394e-05, |
|
"loss": 0.0, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6575112044343596e-05, |
|
"loss": 1287525700206.592, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6292884478612795e-05, |
|
"loss": 947075990683.648, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6010656912881998e-05, |
|
"loss": 0.0, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5728429347151194e-05, |
|
"loss": 0.0, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.54462017814204e-05, |
|
"loss": 1175255120871.424, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5163974215689595e-05, |
|
"loss": 1263805132701.696, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4881746649958794e-05, |
|
"loss": 2792309099855.872, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4599519084227997e-05, |
|
"loss": 113429733965.824, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4317291518497196e-05, |
|
"loss": 1864144655482.88, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4035063952766395e-05, |
|
"loss": 677714231033.856, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3752836387035597e-05, |
|
"loss": 0.0, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3470608821304796e-05, |
|
"loss": 1446208904101.888, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3188381255573995e-05, |
|
"loss": 0.0, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2906153689843194e-05, |
|
"loss": 2399757678936.064, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2623926124112397e-05, |
|
"loss": 0.0, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2341698558381596e-05, |
|
"loss": 0.0, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.2059470992650795e-05, |
|
"loss": 1886529622376.448, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1777243426919997e-05, |
|
"loss": 0.0, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1495015861189193e-05, |
|
"loss": 317279199821.824, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1212788295458396e-05, |
|
"loss": 0.0, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.0930560729727595e-05, |
|
"loss": 1996925045833.728, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0648333163996794e-05, |
|
"loss": 0.0, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0366105598265993e-05, |
|
"loss": 0.0, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0083878032535195e-05, |
|
"loss": 341458523521.024, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9801650466804394e-05, |
|
"loss": 0.0, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9519422901073593e-05, |
|
"loss": 1089140623933.44, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9237195335342796e-05, |
|
"loss": 1722647092133.888, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.8954967769611995e-05, |
|
"loss": 1563876512497.664, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8672740203881194e-05, |
|
"loss": 288611937288.192, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8390512638150396e-05, |
|
"loss": 0.0, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8108285072419596e-05, |
|
"loss": 0.0, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7826057506688795e-05, |
|
"loss": 0.0, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7543829940957997e-05, |
|
"loss": 0.0, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7261602375227193e-05, |
|
"loss": 0.0, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.6979374809496392e-05, |
|
"loss": 0.0, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6697147243765594e-05, |
|
"loss": 522149441830.912, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9996951877770298, |
|
"eval_f1": 0.9997497027720419, |
|
"eval_loss": 284563668992.0, |
|
"eval_precision": 0.999895694259012, |
|
"eval_recall": 0.9996037539103233, |
|
"eval_runtime": 5225.6012, |
|
"eval_samples_per_second": 15.068, |
|
"eval_steps_per_second": 1.884, |
|
"step": 59054 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6414919678034793e-05, |
|
"loss": 0.0, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6132692112303992e-05, |
|
"loss": 0.0, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5850464546573195e-05, |
|
"loss": 191304554250.24, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5568236980842394e-05, |
|
"loss": 0.0, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5286009415111593e-05, |
|
"loss": 491350298460.16, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5003781849380794e-05, |
|
"loss": 0.0, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4721554283649993e-05, |
|
"loss": 0.0, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4439326717919194e-05, |
|
"loss": 1899711548096.512, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4157099152188394e-05, |
|
"loss": 681978059816.96, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3874871586457593e-05, |
|
"loss": 0.0, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3592644020726794e-05, |
|
"loss": 0.0, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3310416454995995e-05, |
|
"loss": 0.0, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3028188889265194e-05, |
|
"loss": 0.0, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2745961323534391e-05, |
|
"loss": 301877312880.64, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2463733757803594e-05, |
|
"loss": 593167598485.504, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2181506192072793e-05, |
|
"loss": 251006361272.32, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1899278626341992e-05, |
|
"loss": 19797129560.064, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1617051060611193e-05, |
|
"loss": 181410543435.776, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1334823494880392e-05, |
|
"loss": 215261328179.2, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1052595929149593e-05, |
|
"loss": 30928501997.568, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0770368363418792e-05, |
|
"loss": 257392692428.8, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0488140797687992e-05, |
|
"loss": 0.0, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0205913231957193e-05, |
|
"loss": 257848730714.112, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.923685666226392e-06, |
|
"loss": 307711254200.32, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.641458100495591e-06, |
|
"loss": 0.0, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.359230534764792e-06, |
|
"loss": 158387253805.056, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.077002969033991e-06, |
|
"loss": 0.0, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.794775403303192e-06, |
|
"loss": 0.0, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.512547837572393e-06, |
|
"loss": 0.0, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.230320271841592e-06, |
|
"loss": 1090791367770.112, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.948092706110793e-06, |
|
"loss": 0.0, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.665865140379992e-06, |
|
"loss": 569641780903.936, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.383637574649191e-06, |
|
"loss": 0.0, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.1014100089183914e-06, |
|
"loss": 0.0, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.819182443187591e-06, |
|
"loss": 0.0, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.536954877456791e-06, |
|
"loss": 426132293812.224, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.254727311725991e-06, |
|
"loss": 0.0, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.972499745995191e-06, |
|
"loss": 0.0, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.690272180264391e-06, |
|
"loss": 39255350968.32, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.408044614533591e-06, |
|
"loss": 5119055233.024, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.125817048802791e-06, |
|
"loss": 57651782221.824, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.843589483071991e-06, |
|
"loss": 211899325087.744, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.561361917341191e-06, |
|
"loss": 0.0, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.279134351610391e-06, |
|
"loss": 0.0, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.9969067858795905e-06, |
|
"loss": 92870765707.264, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.7146792201487904e-06, |
|
"loss": 0.0, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.4324516544179904e-06, |
|
"loss": 0.0, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1502240886871903e-06, |
|
"loss": 2615742038.016, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.86799652295639e-06, |
|
"loss": 0.0, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.5857689572255905e-06, |
|
"loss": 0.0, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.30354139149479e-06, |
|
"loss": 9586501222.4, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.0213138257639904e-06, |
|
"loss": 0.0, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.73908626003319e-06, |
|
"loss": 0.0, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.45685869430239e-06, |
|
"loss": 86379425955.84, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.17463112857159e-06, |
|
"loss": 0.0, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.924035628407898e-07, |
|
"loss": 0.0, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.101759971099898e-07, |
|
"loss": 0.0, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.279484313791897e-07, |
|
"loss": 0.0, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 4.572086564838961e-08, |
|
"loss": 0.0, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9998856954163862, |
|
"eval_f1": 0.9999061434336903, |
|
"eval_loss": 89136693248.0, |
|
"eval_precision": 1.0, |
|
"eval_recall": 0.9998123044838373, |
|
"eval_runtime": 5135.9845, |
|
"eval_samples_per_second": 15.33, |
|
"eval_steps_per_second": 1.916, |
|
"step": 88581 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 88581, |
|
"total_flos": 4.803336053113592e+19, |
|
"train_loss": 10893808030881.182, |
|
"train_runtime": 445813.7666, |
|
"train_samples_per_second": 1.59, |
|
"train_steps_per_second": 0.199 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 88581, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 4.803336053113592e+19, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|