|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.986666666666667, |
|
"eval_steps": 500, |
|
"global_step": 168, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.17777777777777778, |
|
"grad_norm": 7.6314873695373535, |
|
"learning_rate": 2.9411764705882355e-06, |
|
"logits/chosen": -0.6844646334648132, |
|
"logits/rejected": -0.35283079743385315, |
|
"logps/chosen": -2.9458744525909424, |
|
"logps/rejected": -3.7147631645202637, |
|
"loss": 2.9904, |
|
"odds_ratio_loss": 25.87075424194336, |
|
"rewards/accuracies": 0.8125, |
|
"rewards/chosen": -0.2945874333381653, |
|
"rewards/margins": 0.07688888907432556, |
|
"rewards/rejected": -0.37147635221481323, |
|
"sft_loss": 0.40333542227745056, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.35555555555555557, |
|
"grad_norm": 9.168193817138672, |
|
"learning_rate": 4.995131923687488e-06, |
|
"logits/chosen": -0.3836471438407898, |
|
"logits/rejected": 0.008832636289298534, |
|
"logps/chosen": -2.8614296913146973, |
|
"logps/rejected": -3.626382350921631, |
|
"loss": 2.9085, |
|
"odds_ratio_loss": 25.276187896728516, |
|
"rewards/accuracies": 0.793749988079071, |
|
"rewards/chosen": -0.2861429452896118, |
|
"rewards/margins": 0.07649530470371246, |
|
"rewards/rejected": -0.3626382648944855, |
|
"sft_loss": 0.3808932900428772, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 8.867982864379883, |
|
"learning_rate": 4.90911473983908e-06, |
|
"logits/chosen": -0.5513006448745728, |
|
"logits/rejected": -0.1812809407711029, |
|
"logps/chosen": -2.488701343536377, |
|
"logps/rejected": -3.2581515312194824, |
|
"loss": 2.5338, |
|
"odds_ratio_loss": 22.105247497558594, |
|
"rewards/accuracies": 0.8500000238418579, |
|
"rewards/chosen": -0.2488701343536377, |
|
"rewards/margins": 0.07694502174854279, |
|
"rewards/rejected": -0.3258151412010193, |
|
"sft_loss": 0.32325050234794617, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.7111111111111111, |
|
"grad_norm": 6.930299282073975, |
|
"learning_rate": 4.71919261421297e-06, |
|
"logits/chosen": -0.542073667049408, |
|
"logits/rejected": -0.3160572946071625, |
|
"logps/chosen": -1.9436540603637695, |
|
"logps/rejected": -2.893744707107544, |
|
"loss": 1.9838, |
|
"odds_ratio_loss": 17.531436920166016, |
|
"rewards/accuracies": 0.8687499761581421, |
|
"rewards/chosen": -0.19436539709568024, |
|
"rewards/margins": 0.09500905126333237, |
|
"rewards/rejected": -0.289374440908432, |
|
"sft_loss": 0.2306901514530182, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 5.411482334136963, |
|
"learning_rate": 4.43355687413747e-06, |
|
"logits/chosen": -0.3556787371635437, |
|
"logits/rejected": -0.13702432811260223, |
|
"logps/chosen": -1.401972770690918, |
|
"logps/rejected": -2.2697913646698, |
|
"loss": 1.4458, |
|
"odds_ratio_loss": 12.927480697631836, |
|
"rewards/accuracies": 0.84375, |
|
"rewards/chosen": -0.1401972770690918, |
|
"rewards/margins": 0.08678187429904938, |
|
"rewards/rejected": -0.2269791066646576, |
|
"sft_loss": 0.15308110415935516, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.0666666666666667, |
|
"grad_norm": 3.6417393684387207, |
|
"learning_rate": 4.064526968101844e-06, |
|
"logits/chosen": -0.08338828384876251, |
|
"logits/rejected": 0.10747692734003067, |
|
"logps/chosen": -1.0365681648254395, |
|
"logps/rejected": -2.0373435020446777, |
|
"loss": 1.0768, |
|
"odds_ratio_loss": 9.728913307189941, |
|
"rewards/accuracies": 0.8374999761581421, |
|
"rewards/chosen": -0.10365680605173111, |
|
"rewards/margins": 0.1000775545835495, |
|
"rewards/rejected": -0.2037343531847, |
|
"sft_loss": 0.10390937328338623, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.2444444444444445, |
|
"grad_norm": 4.931552886962891, |
|
"learning_rate": 3.6280191288478437e-06, |
|
"logits/chosen": -0.3539902865886688, |
|
"logits/rejected": -0.07786711305379868, |
|
"logps/chosen": -0.7550846338272095, |
|
"logps/rejected": -1.6581230163574219, |
|
"loss": 0.7984, |
|
"odds_ratio_loss": 7.2239203453063965, |
|
"rewards/accuracies": 0.875, |
|
"rewards/chosen": -0.07550846040248871, |
|
"rewards/margins": 0.09030384570360184, |
|
"rewards/rejected": -0.16581231355667114, |
|
"sft_loss": 0.07596994936466217, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.4222222222222223, |
|
"grad_norm": 4.805271148681641, |
|
"learning_rate": 3.142859907420615e-06, |
|
"logits/chosen": 0.25180932879447937, |
|
"logits/rejected": 0.5221790671348572, |
|
"logps/chosen": -0.5656775236129761, |
|
"logps/rejected": -1.4922306537628174, |
|
"loss": 0.6071, |
|
"odds_ratio_loss": 5.452211380004883, |
|
"rewards/accuracies": 0.862500011920929, |
|
"rewards/chosen": -0.05656775087118149, |
|
"rewards/margins": 0.09265531599521637, |
|
"rewards/rejected": -0.14922307431697845, |
|
"sft_loss": 0.06183191016316414, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 3.405627965927124, |
|
"learning_rate": 2.629974185404951e-06, |
|
"logits/chosen": -0.19811873137950897, |
|
"logits/rejected": 0.17863574624061584, |
|
"logps/chosen": -0.47559866309165955, |
|
"logps/rejected": -1.4184300899505615, |
|
"loss": 0.5238, |
|
"odds_ratio_loss": 4.697209358215332, |
|
"rewards/accuracies": 0.831250011920929, |
|
"rewards/chosen": -0.04755987599492073, |
|
"rewards/margins": 0.09428314864635468, |
|
"rewards/rejected": -0.1418430209159851, |
|
"sft_loss": 0.0540793314576149, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 3.66274356842041, |
|
"learning_rate": 2.1114826863194882e-06, |
|
"logits/chosen": -0.39210397005081177, |
|
"logits/rejected": 0.007559856865555048, |
|
"logps/chosen": -0.3131728768348694, |
|
"logps/rejected": -1.3085200786590576, |
|
"loss": 0.3503, |
|
"odds_ratio_loss": 3.154588222503662, |
|
"rewards/accuracies": 0.875, |
|
"rewards/chosen": -0.03131728619337082, |
|
"rewards/margins": 0.09953471273183823, |
|
"rewards/rejected": -0.13085201382637024, |
|
"sft_loss": 0.03485164791345596, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.9555555555555557, |
|
"grad_norm": 2.7570531368255615, |
|
"learning_rate": 1.6097479104361328e-06, |
|
"logits/chosen": -0.7070721387863159, |
|
"logits/rejected": -0.22419829666614532, |
|
"logps/chosen": -0.27365046739578247, |
|
"logps/rejected": -1.0460803508758545, |
|
"loss": 0.322, |
|
"odds_ratio_loss": 2.940398693084717, |
|
"rewards/accuracies": 0.8125, |
|
"rewards/chosen": -0.027365049347281456, |
|
"rewards/margins": 0.07724298536777496, |
|
"rewards/rejected": -0.10460802167654037, |
|
"sft_loss": 0.027918804436922073, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 2.1333333333333333, |
|
"grad_norm": 2.4038069248199463, |
|
"learning_rate": 1.1464096417858821e-06, |
|
"logits/chosen": -0.5680712461471558, |
|
"logits/rejected": -0.28739961981773376, |
|
"logps/chosen": -0.2213420867919922, |
|
"logps/rejected": -1.1936922073364258, |
|
"loss": 0.2601, |
|
"odds_ratio_loss": 2.3274455070495605, |
|
"rewards/accuracies": 0.862500011920929, |
|
"rewards/chosen": -0.0221342071890831, |
|
"rewards/margins": 0.09723500907421112, |
|
"rewards/rejected": -0.11936922371387482, |
|
"sft_loss": 0.02737104333937168, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.311111111111111, |
|
"grad_norm": 1.1880685091018677, |
|
"learning_rate": 7.414516258630245e-07, |
|
"logits/chosen": -0.5090128779411316, |
|
"logits/rejected": -0.18801167607307434, |
|
"logps/chosen": -0.20155474543571472, |
|
"logps/rejected": -1.0635544061660767, |
|
"loss": 0.2454, |
|
"odds_ratio_loss": 2.2017226219177246, |
|
"rewards/accuracies": 0.831250011920929, |
|
"rewards/chosen": -0.02015547640621662, |
|
"rewards/margins": 0.08619997650384903, |
|
"rewards/rejected": -0.1063554510474205, |
|
"sft_loss": 0.02521040476858616, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.488888888888889, |
|
"grad_norm": 6.385291576385498, |
|
"learning_rate": 4.123396721497977e-07, |
|
"logits/chosen": -0.4758281111717224, |
|
"logits/rejected": 0.05975949764251709, |
|
"logps/chosen": -0.21704864501953125, |
|
"logps/rejected": -1.0010992288589478, |
|
"loss": 0.2694, |
|
"odds_ratio_loss": 2.4016900062561035, |
|
"rewards/accuracies": 0.8374999761581421, |
|
"rewards/chosen": -0.021704863756895065, |
|
"rewards/margins": 0.07840504497289658, |
|
"rewards/rejected": -0.10010991245508194, |
|
"sft_loss": 0.029203277081251144, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 3.057594060897827, |
|
"learning_rate": 1.7326835503629542e-07, |
|
"logits/chosen": -0.66801917552948, |
|
"logits/rejected": -0.19290518760681152, |
|
"logps/chosen": -0.17285582423210144, |
|
"logps/rejected": -1.0114670991897583, |
|
"loss": 0.2015, |
|
"odds_ratio_loss": 1.7902212142944336, |
|
"rewards/accuracies": 0.887499988079071, |
|
"rewards/chosen": -0.017285581678152084, |
|
"rewards/margins": 0.08386112749576569, |
|
"rewards/rejected": -0.10114671289920807, |
|
"sft_loss": 0.02243014983832836, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.8444444444444446, |
|
"grad_norm": 2.3622488975524902, |
|
"learning_rate": 3.4548802869627806e-08, |
|
"logits/chosen": -0.27373093366622925, |
|
"logits/rejected": 0.14009419083595276, |
|
"logps/chosen": -0.1832386553287506, |
|
"logps/rejected": -0.950977623462677, |
|
"loss": 0.2228, |
|
"odds_ratio_loss": 2.0095624923706055, |
|
"rewards/accuracies": 0.862500011920929, |
|
"rewards/chosen": -0.018323864787817, |
|
"rewards/margins": 0.07677389681339264, |
|
"rewards/rejected": -0.09509776532649994, |
|
"sft_loss": 0.021797562018036842, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.986666666666667, |
|
"step": 168, |
|
"total_flos": 3.471912421559501e+16, |
|
"train_loss": 1.00575195536727, |
|
"train_runtime": 639.1511, |
|
"train_samples_per_second": 4.224, |
|
"train_steps_per_second": 0.263 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 168, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.471912421559501e+16, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|