File size: 474 Bytes
b267a46
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
---
license: apache-2.0
datasets:
- tatsu-lab/alpaca
language:
- en
---
    model_name_or_path       = yahma/llama-13b-hf
    
    learning_rate            = 1e-4
    
    max_steps                = 10000
    
    epoch                    =              3.08
    
    train_loss               =            0.7169
    
    train_runtime            = 1 day, 7:23:32.67
    
    train_samples_per_second =             1.416
    
    train_steps_per_second   =             0.088