File size: 3,049 Bytes
6ccc301
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
{
   "output_dir":"/content/drive/MyDrive/Programming/hf-trainer/opt-peter-2pt7B-ps_DS-msgs_Ep-4_Bs-16",
   "overwrite_output_dir":true,
   "do_train":false,
   "do_eval":false,
   "do_predict":false,
   "evaluation_strategy":"no",
   "prediction_loss_only":false,
   "per_device_train_batch_size":16,
   "per_device_eval_batch_size":16,
   "per_gpu_train_batch_size":"None",
   "per_gpu_eval_batch_size":"None",
   "gradient_accumulation_steps":4,
   "eval_accumulation_steps":2,
   "eval_delay":0,
   "learning_rate":1e-05,
   "weight_decay":0.1,
   "adam_beta1":0.9,
   "adam_beta2":0.999,
   "adam_epsilon":1e-08,
   "max_grad_norm":1,
   "num_train_epochs":4,
   "max_steps":-1,
   "lr_scheduler_type":"cosine",
   "warmup_ratio":0.05,
   "warmup_steps":0,
   "log_level":-1,
   "log_level_replica":-1,
   "log_on_each_node":true,
   "logging_dir":"/content/drive/MyDrive/Programming/hf-trainer/opt-peter-2pt7B-ps_DS-msgs_Ep-4_Bs-16/logs",
   "logging_strategy":"steps",
   "logging_first_step":false,
   "logging_steps":5,
   "logging_nan_inf_filter":true,
   "save_strategy":"epoch",
   "save_steps":500,
   "save_total_limit":1,
   "save_on_each_node":false,
   "no_cuda":false,
   "seed":42,
   "data_seed":"None",
   "bf16":true,
   "fp16":false,
   "fp16_opt_level":"O1",
   "half_precision_backend":"amp",
   "bf16_full_eval":true,
   "fp16_full_eval":false,
   "tf32":"None",
   "local_rank":0,
   "xpu_backend":"None",
   "tpu_num_cores":"None",
   "tpu_metrics_debug":false,
   "debug":"[]",
   "dataloader_drop_last":false,
   "eval_steps":"None",
   "dataloader_num_workers":0,
   "past_index":-1,
   "run_name":"/content/drive/MyDrive/Programming/hf-trainer/opt-peter-2pt7B-ps_DS-msgs_Ep-4_Bs-16",
   "disable_tqdm":false,
   "remove_unused_columns":true,
   "label_names":"None",
   "load_best_model_at_end":false,
   "metric_for_best_model":"None",
   "greater_is_better":"None",
   "ignore_data_skip":false,
   "sharded_ddp":"[]",
   "fsdp":"[]",
   "fsdp_min_num_params":0,
   "deepspeed":"ds_config_zero2_bf16.json",
   "label_smoothing_factor":0.0,
   "optim":"adamw_hf",
   "adafactor":false,
   "group_by_length":false,
   "length_column_name":"length",
   "report_to":"['tensorboard']",
   "ddp_find_unused_parameters":"None",
   "ddp_bucket_cap_mb":"None",
   "dataloader_pin_memory":true,
   "skip_memory_metrics":true,
   "use_legacy_prediction_loop":false,
   "push_to_hub":true,
   "resume_from_checkpoint":"None",
   "hub_model_id":"opt-peter-2pt7B-ps_DS-msgs_Ep-4_Bs-16",
   "hub_strategy":"end",
   "hub_token":"<HUB_TOKEN>",
   "hub_private_repo":false,
   "gradient_checkpointing":true,
   "include_inputs_for_metrics":false,
   "fp16_backend":"auto",
   "push_to_hub_model_id":"None",
   "push_to_hub_organization":"None",
   "push_to_hub_token":"<PUSH_TO_HUB_TOKEN>",
   "_n_gpu":1,
   "mp_parameters":"",
   "auto_find_batch_size":false,
   "full_determinism":false,
   "train_batch_size":16,
   "eval_batch_size":16,
   "configs_src":"opt-peter-2pt7B-ps_DS-msgs_Ep-4_Bs-16",
   "data_tag":"text-file-input"
}