license: apache-2.0 | |
datasets: | |
- tatsu-lab/alpaca | |
language: | |
- en | |
model_name_or_path = yahma/llama-13b-hf | |
learning_rate = 1e-4 | |
max_steps = 10000 | |
epoch = 3.08 | |
train_loss = 0.7169 | |
train_runtime = 1 day, 7:23:32.67 | |
train_samples_per_second = 1.416 | |
train_steps_per_second = 0.088 |