joseph10's picture
Training in progress, epoch 2
4c59bf9
raw
history blame
1.43 kB
{
"best_metric": 0.8816270566727605,
"best_model_checkpoint": "distilbert-hate_speech18\\run-3\\checkpoint-480",
"epoch": 2.0,
"eval_steps": 500,
"global_step": 480,
"is_hyper_param_search": true,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 1.0,
"learning_rate": 2.741556032723218e-06,
"loss": 0.3969,
"step": 240
},
{
"epoch": 1.0,
"eval_accuracy": 0.8606032906764168,
"eval_loss": 0.38725385069847107,
"eval_runtime": 13.634,
"eval_samples_per_second": 160.481,
"eval_steps_per_second": 5.061,
"step": 240
},
{
"epoch": 2.0,
"learning_rate": 2.0561670245424133e-06,
"loss": 0.3833,
"step": 480
},
{
"epoch": 2.0,
"eval_accuracy": 0.8816270566727605,
"eval_loss": 0.3825770616531372,
"eval_runtime": 13.6065,
"eval_samples_per_second": 160.806,
"eval_steps_per_second": 5.071,
"step": 480
}
],
"logging_steps": 500,
"max_steps": 1200,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 500,
"total_flos": 328019606410944.0,
"trial_name": null,
"trial_params": {
"alpha": 0.3534065692399305,
"learning_rate": 3.4269450409040222e-06,
"num_train_epochs": 5,
"per_device_eval_batch_size": 32,
"per_device_train_batch_size": 32,
"temperature": 11
}
}