File size: 533 Bytes
d5ac5ab |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 |
{
"base_model_name": "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
"base_model_class": "LlamaForCausalLM",
"base_loaded_in_4bit": false,
"base_loaded_in_8bit": true,
"projections": "q, k, v, o",
"loss": 1.4473,
"learning_rate": 2.364532019704433e-06,
"epoch": 0.25,
"current_steps": 2149,
"current_steps_adjusted": 2149,
"epoch_adjusted": 0.25,
"train_runtime": 6758.723,
"train_samples_per_second": 1.907,
"train_steps_per_second": 0.159,
"total_flos": 6.25660356722688e+16,
"train_loss": 1.498897494826206
} |