Training in progress, epoch 0
Browse files- adapter_config.json +42 -0
- adapter_model.safetensors +3 -0
- chat_template.jinja +4 -0
- runs/Oct20_03-46-36_8b029ae94c29/events.out.tfevents.1760932037.8b029ae94c29.13540.0 +3 -0
- special_tokens_map.json +30 -0
- tokenizer.json +0 -0
- tokenizer.model +3 -0
- tokenizer_config.json +44 -0
- trainer_log.jsonl +26 -0
- training_args.bin +3 -0
adapter_config.json
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"alpha_pattern": {},
|
| 3 |
+
"auto_mapping": null,
|
| 4 |
+
"base_model_name_or_path": "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
|
| 5 |
+
"bias": "none",
|
| 6 |
+
"corda_config": null,
|
| 7 |
+
"eva_config": null,
|
| 8 |
+
"exclude_modules": null,
|
| 9 |
+
"fan_in_fan_out": false,
|
| 10 |
+
"inference_mode": true,
|
| 11 |
+
"init_lora_weights": true,
|
| 12 |
+
"layer_replication": null,
|
| 13 |
+
"layers_pattern": null,
|
| 14 |
+
"layers_to_transform": null,
|
| 15 |
+
"loftq_config": {},
|
| 16 |
+
"lora_alpha": 16,
|
| 17 |
+
"lora_bias": false,
|
| 18 |
+
"lora_dropout": 0.0,
|
| 19 |
+
"megatron_config": null,
|
| 20 |
+
"megatron_core": "megatron.core",
|
| 21 |
+
"modules_to_save": null,
|
| 22 |
+
"peft_type": "LORA",
|
| 23 |
+
"qalora_group_size": 16,
|
| 24 |
+
"r": 8,
|
| 25 |
+
"rank_pattern": {},
|
| 26 |
+
"revision": null,
|
| 27 |
+
"target_modules": [
|
| 28 |
+
"v_proj",
|
| 29 |
+
"gate_proj",
|
| 30 |
+
"k_proj",
|
| 31 |
+
"up_proj",
|
| 32 |
+
"down_proj",
|
| 33 |
+
"q_proj",
|
| 34 |
+
"o_proj"
|
| 35 |
+
],
|
| 36 |
+
"target_parameters": null,
|
| 37 |
+
"task_type": "CAUSAL_LM",
|
| 38 |
+
"trainable_token_indices": null,
|
| 39 |
+
"use_dora": false,
|
| 40 |
+
"use_qalora": false,
|
| 41 |
+
"use_rslora": false
|
| 42 |
+
}
|
adapter_model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3aa69b2b3433f682d36a2a3b5037e414fd8e89220be652d187fbbb4d6210e8a6
|
| 3 |
+
size 25271744
|
chat_template.jinja
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ 'System: ' + system_message + '</s>' + '
|
| 2 |
+
' }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ 'Human: ' + content + '</s>' + '
|
| 3 |
+
Assistant:' }}{% elif message['role'] == 'assistant' %}{{ content + '</s>' + '
|
| 4 |
+
' }}{% endif %}{% endfor %}
|
runs/Oct20_03-46-36_8b029ae94c29/events.out.tfevents.1760932037.8b029ae94c29.13540.0
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:258e38083ebcf5e46dfbf6027fc07421315eeff89c1722fc8656236e3693f574
|
| 3 |
+
size 11064
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token": {
|
| 3 |
+
"content": "<s>",
|
| 4 |
+
"lstrip": false,
|
| 5 |
+
"normalized": false,
|
| 6 |
+
"rstrip": false,
|
| 7 |
+
"single_word": false
|
| 8 |
+
},
|
| 9 |
+
"eos_token": {
|
| 10 |
+
"content": "</s>",
|
| 11 |
+
"lstrip": false,
|
| 12 |
+
"normalized": false,
|
| 13 |
+
"rstrip": false,
|
| 14 |
+
"single_word": false
|
| 15 |
+
},
|
| 16 |
+
"pad_token": {
|
| 17 |
+
"content": "</s>",
|
| 18 |
+
"lstrip": false,
|
| 19 |
+
"normalized": false,
|
| 20 |
+
"rstrip": false,
|
| 21 |
+
"single_word": false
|
| 22 |
+
},
|
| 23 |
+
"unk_token": {
|
| 24 |
+
"content": "<unk>",
|
| 25 |
+
"lstrip": false,
|
| 26 |
+
"normalized": false,
|
| 27 |
+
"rstrip": false,
|
| 28 |
+
"single_word": false
|
| 29 |
+
}
|
| 30 |
+
}
|
tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
tokenizer.model
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
| 3 |
+
size 499723
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": true,
|
| 3 |
+
"add_eos_token": false,
|
| 4 |
+
"add_prefix_space": null,
|
| 5 |
+
"added_tokens_decoder": {
|
| 6 |
+
"0": {
|
| 7 |
+
"content": "<unk>",
|
| 8 |
+
"lstrip": false,
|
| 9 |
+
"normalized": false,
|
| 10 |
+
"rstrip": false,
|
| 11 |
+
"single_word": false,
|
| 12 |
+
"special": true
|
| 13 |
+
},
|
| 14 |
+
"1": {
|
| 15 |
+
"content": "<s>",
|
| 16 |
+
"lstrip": false,
|
| 17 |
+
"normalized": false,
|
| 18 |
+
"rstrip": false,
|
| 19 |
+
"single_word": false,
|
| 20 |
+
"special": true
|
| 21 |
+
},
|
| 22 |
+
"2": {
|
| 23 |
+
"content": "</s>",
|
| 24 |
+
"lstrip": false,
|
| 25 |
+
"normalized": false,
|
| 26 |
+
"rstrip": false,
|
| 27 |
+
"single_word": false,
|
| 28 |
+
"special": true
|
| 29 |
+
}
|
| 30 |
+
},
|
| 31 |
+
"bos_token": "<s>",
|
| 32 |
+
"clean_up_tokenization_spaces": false,
|
| 33 |
+
"eos_token": "</s>",
|
| 34 |
+
"extra_special_tokens": {},
|
| 35 |
+
"legacy": false,
|
| 36 |
+
"model_max_length": 2048,
|
| 37 |
+
"pad_token": "</s>",
|
| 38 |
+
"padding_side": "right",
|
| 39 |
+
"sp_model_kwargs": {},
|
| 40 |
+
"split_special_tokens": false,
|
| 41 |
+
"tokenizer_class": "LlamaTokenizer",
|
| 42 |
+
"unk_token": "<unk>",
|
| 43 |
+
"use_default_system_prompt": false
|
| 44 |
+
}
|
trainer_log.jsonl
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"current_steps": 10, "total_steps": 250, "loss": 0.7296, "lr": 0.00019936113105200085, "epoch": 0.018885741265344664, "percentage": 4.0, "elapsed_time": "0:01:06", "remaining_time": "0:26:25"}
|
| 2 |
+
{"current_steps": 20, "total_steps": 250, "loss": 0.1418, "lr": 0.0001971631732914674, "epoch": 0.03777148253068933, "percentage": 8.0, "elapsed_time": "0:02:09", "remaining_time": "0:24:47"}
|
| 3 |
+
{"current_steps": 30, "total_steps": 250, "loss": 0.1158, "lr": 0.00019343289424566122, "epoch": 0.056657223796033995, "percentage": 12.0, "elapsed_time": "0:03:12", "remaining_time": "0:23:34"}
|
| 4 |
+
{"current_steps": 40, "total_steps": 250, "loss": 0.1129, "lr": 0.00018822912264349534, "epoch": 0.07554296506137866, "percentage": 16.0, "elapsed_time": "0:04:16", "remaining_time": "0:22:25"}
|
| 5 |
+
{"current_steps": 50, "total_steps": 250, "loss": 0.109, "lr": 0.00018163392507171842, "epoch": 0.09442870632672333, "percentage": 20.0, "elapsed_time": "0:05:18", "remaining_time": "0:21:15"}
|
| 6 |
+
{"current_steps": 60, "total_steps": 250, "loss": 0.1082, "lr": 0.0001737513117358174, "epoch": 0.11331444759206799, "percentage": 24.0, "elapsed_time": "0:06:22", "remaining_time": "0:20:09"}
|
| 7 |
+
{"current_steps": 70, "total_steps": 250, "loss": 0.1073, "lr": 0.00016470559615694446, "epoch": 0.13220018885741266, "percentage": 28.0, "elapsed_time": "0:07:25", "remaining_time": "0:19:04"}
|
| 8 |
+
{"current_steps": 80, "total_steps": 250, "loss": 0.1036, "lr": 0.00015463943467342693, "epoch": 0.1510859301227573, "percentage": 32.0, "elapsed_time": "0:08:28", "remaining_time": "0:18:01"}
|
| 9 |
+
{"current_steps": 90, "total_steps": 250, "loss": 0.0999, "lr": 0.0001437115766650933, "epoch": 0.16997167138810199, "percentage": 36.0, "elapsed_time": "0:09:32", "remaining_time": "0:16:57"}
|
| 10 |
+
{"current_steps": 100, "total_steps": 250, "loss": 0.1009, "lr": 0.00013209436098072095, "epoch": 0.18885741265344666, "percentage": 40.0, "elapsed_time": "0:10:35", "remaining_time": "0:15:53"}
|
| 11 |
+
{"current_steps": 110, "total_steps": 250, "loss": 0.0996, "lr": 0.00011997099805144069, "epoch": 0.2077431539187913, "percentage": 44.0, "elapsed_time": "0:11:39", "remaining_time": "0:14:50"}
|
| 12 |
+
{"current_steps": 120, "total_steps": 250, "loss": 0.1015, "lr": 0.00010753268055279329, "epoch": 0.22662889518413598, "percentage": 48.0, "elapsed_time": "0:12:42", "remaining_time": "0:13:46"}
|
| 13 |
+
{"current_steps": 130, "total_steps": 250, "loss": 0.1023, "lr": 9.497556818202306e-05, "epoch": 0.24551463644948066, "percentage": 52.0, "elapsed_time": "0:13:46", "remaining_time": "0:12:42"}
|
| 14 |
+
{"current_steps": 140, "total_steps": 250, "loss": 0.1005, "lr": 8.249769410247239e-05, "epoch": 0.26440037771482533, "percentage": 56.0, "elapsed_time": "0:14:49", "remaining_time": "0:11:39"}
|
| 15 |
+
{"current_steps": 150, "total_steps": 250, "loss": 0.0958, "lr": 7.029584184229653e-05, "epoch": 0.28328611898017, "percentage": 60.0, "elapsed_time": "0:15:52", "remaining_time": "0:10:35"}
|
| 16 |
+
{"current_steps": 160, "total_steps": 250, "loss": 0.0972, "lr": 5.856244190067159e-05, "epoch": 0.3021718602455146, "percentage": 64.0, "elapsed_time": "0:16:57", "remaining_time": "0:09:32"}
|
| 17 |
+
{"current_steps": 170, "total_steps": 250, "loss": 0.0977, "lr": 4.748253700387042e-05, "epoch": 0.3210576015108593, "percentage": 68.0, "elapsed_time": "0:18:00", "remaining_time": "0:08:28"}
|
| 18 |
+
{"current_steps": 180, "total_steps": 250, "loss": 0.0975, "lr": 3.7230863870929964e-05, "epoch": 0.33994334277620397, "percentage": 72.0, "elapsed_time": "0:19:04", "remaining_time": "0:07:24"}
|
| 19 |
+
{"current_steps": 190, "total_steps": 250, "loss": 0.0975, "lr": 2.7969097511209308e-05, "epoch": 0.3588290840415486, "percentage": 76.0, "elapsed_time": "0:20:06", "remaining_time": "0:06:21"}
|
| 20 |
+
{"current_steps": 200, "total_steps": 250, "loss": 0.0968, "lr": 1.9843301512912327e-05, "epoch": 0.3777148253068933, "percentage": 80.0, "elapsed_time": "0:21:09", "remaining_time": "0:05:17"}
|
| 21 |
+
{"current_steps": 210, "total_steps": 250, "loss": 0.0974, "lr": 1.2981624533047432e-05, "epoch": 0.39660056657223797, "percentage": 84.0, "elapsed_time": "0:22:13", "remaining_time": "0:04:13"}
|
| 22 |
+
{"current_steps": 220, "total_steps": 250, "loss": 0.0979, "lr": 7.492279316554207e-06, "epoch": 0.4154863078375826, "percentage": 88.0, "elapsed_time": "0:23:15", "remaining_time": "0:03:10"}
|
| 23 |
+
{"current_steps": 230, "total_steps": 250, "loss": 0.0998, "lr": 3.461836116672612e-06, "epoch": 0.4343720491029273, "percentage": 92.0, "elapsed_time": "0:24:18", "remaining_time": "0:02:06"}
|
| 24 |
+
{"current_steps": 240, "total_steps": 250, "loss": 0.096, "lr": 9.538574303348813e-07, "epoch": 0.45325779036827196, "percentage": 96.0, "elapsed_time": "0:25:21", "remaining_time": "0:01:03"}
|
| 25 |
+
{"current_steps": 250, "total_steps": 250, "loss": 0.0987, "lr": 7.895579618388827e-09, "epoch": 0.4721435316336166, "percentage": 100.0, "elapsed_time": "0:26:24", "remaining_time": "0:00:00"}
|
| 26 |
+
{"current_steps": 250, "total_steps": 250, "epoch": 0.4721435316336166, "percentage": 100.0, "elapsed_time": "0:26:25", "remaining_time": "0:00:00"}
|
training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2dab7247fa84cbaaa8ec716eacb9ad44774956b37e7e8f4f5405f5c5a721d378
|
| 3 |
+
size 6225
|