{ | |
"base_model_name": "Llama-2-13B-fp16-padded", | |
"base_model_class": "LlamaForCausalLM", | |
"base_loaded_in_4bit": true, | |
"base_loaded_in_8bit": false, | |
"projections": "q, v", | |
"loss": 1.07, | |
"learning_rate": 1.7080653142008908e-05, | |
"epoch": 0.74, | |
"current_steps": 9678, | |
"train_runtime": 20262.2951, | |
"train_samples_per_second": 12.817, | |
"train_steps_per_second": 0.1, | |
"total_flos": 4.119249176808653e+17, | |
"train_loss": 1.1322209582423532 | |
} |