zephyr-7b-dpo-full-beta-0.083 / all_results.json
tianlinliu0121's picture
Model save
2af8cc5
raw
history blame
733 Bytes
{
"epoch": 3.0,
"eval_logits/chosen": -2.612835168838501,
"eval_logits/rejected": -2.5376927852630615,
"eval_logps/chosen": -343.6803283691406,
"eval_logps/rejected": -345.713134765625,
"eval_loss": 0.6980592012405396,
"eval_rewards/accuracies": 0.79296875,
"eval_rewards/chosen": -5.03585958480835,
"eval_rewards/margins": 3.8046231269836426,
"eval_rewards/rejected": -8.840482711791992,
"eval_runtime": 317.955,
"eval_samples": 2000,
"eval_samples_per_second": 6.29,
"eval_steps_per_second": 0.101,
"train_loss": 0.20528750838680607,
"train_runtime": 83798.7665,
"train_samples": 61966,
"train_samples_per_second": 2.218,
"train_steps_per_second": 0.069
}