File size: 469 Bytes
856d3b9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
{
  "base_model_name": "FlagAlpha_Llama2-Chinese-13b-Chat",
  "base_model_class": "LlamaForCausalLM",
  "base_loaded_in_4bit": false,
  "base_loaded_in_8bit": true,
  "projections": "q, v",
  "train_runtime": 46448.7655,
  "train_samples_per_second": 1.887,
  "train_steps_per_second": 0.015,
  "total_flos": 1.731547399056261e+18,
  "train_loss": 0.8565397503083212,
  "epoch": 3.0,
  "current_steps": 5465,
  "loss": 0.7528,
  "learning_rate": 3.576751117734724e-06
}