NobodyExistsOnTheInternet commited on
Commit
c5e2ce0
1 Parent(s): fda7e92

Training in progress, step 1000

Browse files
Files changed (30) hide show
  1. adapter_config.json +35 -0
  2. adapter_model.safetensors +3 -0
  3. added_tokens.json +4 -0
  4. config.json +41 -0
  5. runs/Feb21_16-11-33_05c27c174db0/events.out.tfevents.1708533268.05c27c174db0.8796.0 +3 -0
  6. runs/Feb21_16-36-00_05c27c174db0/events.out.tfevents.1708533366.05c27c174db0.8969.0 +3 -0
  7. runs/Feb21_16-38-18_05c27c174db0/events.out.tfevents.1708533504.05c27c174db0.9101.0 +3 -0
  8. runs/Feb21_16-47-06_05c27c174db0/events.out.tfevents.1708535394.05c27c174db0.9379.0 +3 -0
  9. runs/Feb21_17-11-58_05c27c174db0/events.out.tfevents.1708535559.05c27c174db0.9569.0 +3 -0
  10. runs/Feb21_17-15-53_05c27c174db0/events.out.tfevents.1708535755.05c27c174db0.9725.0 +3 -0
  11. runs/Feb21_17-19-37_05c27c174db0/events.out.tfevents.1708535979.05c27c174db0.9854.0 +3 -0
  12. runs/Feb21_17-28-58_05c27c174db0/events.out.tfevents.1708536541.05c27c174db0.9996.0 +3 -0
  13. runs/Feb21_17-31-52_05c27c174db0/events.out.tfevents.1708536714.05c27c174db0.10108.0 +3 -0
  14. runs/Feb22_01-40-44_05c27c174db0/events.out.tfevents.1708566046.05c27c174db0.10778.0 +3 -0
  15. runs/Feb25_03-41-18_05c27c174db0/events.out.tfevents.1708833608.05c27c174db0.11688.0 +3 -0
  16. runs/Feb25_04-02-43_05c27c174db0/events.out.tfevents.1708833765.05c27c174db0.11905.0 +3 -0
  17. runs/Feb25_04-04-03_05c27c174db0/events.out.tfevents.1708833844.05c27c174db0.12089.0 +3 -0
  18. runs/Feb25_04-05-27_05c27c174db0/events.out.tfevents.1708833930.05c27c174db0.12300.0 +3 -0
  19. runs/Feb25_04-08-40_05c27c174db0/events.out.tfevents.1708834124.05c27c174db0.12680.0 +3 -0
  20. runs/Feb25_04-10-03_05c27c174db0/events.out.tfevents.1708834206.05c27c174db0.12863.0 +3 -0
  21. runs/Feb25_04-11-29_05c27c174db0/events.out.tfevents.1708834292.05c27c174db0.13046.0 +3 -0
  22. runs/Feb25_04-14-13_05c27c174db0/events.out.tfevents.1708834457.05c27c174db0.13230.0 +3 -0
  23. runs/Feb25_04-15-38_05c27c174db0/events.out.tfevents.1708834542.05c27c174db0.13414.0 +3 -0
  24. runs/Feb25_04-17-30_05c27c174db0/events.out.tfevents.1708834651.05c27c174db0.13654.0 +3 -0
  25. runs/Feb25_04-51-25_05c27c174db0/events.out.tfevents.1708836688.05c27c174db0.69.0 +3 -0
  26. special_tokens_map.json +30 -0
  27. tokenizer.json +0 -0
  28. tokenizer.model +3 -0
  29. tokenizer_config.json +62 -0
  30. training_args.bin +3 -0
adapter_config.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "NousResearch/Nous-Hermes-2-Mistral-7B-DPO",
5
+ "bias": "none",
6
+ "fan_in_fan_out": null,
7
+ "inference_mode": true,
8
+ "init_lora_weights": true,
9
+ "layers_pattern": null,
10
+ "layers_to_transform": null,
11
+ "loftq_config": {},
12
+ "lora_alpha": 64,
13
+ "lora_dropout": 0.05,
14
+ "megatron_config": null,
15
+ "megatron_core": "megatron.core",
16
+ "modules_to_save": [
17
+ "embed_tokens",
18
+ "lm_head"
19
+ ],
20
+ "peft_type": "LORA",
21
+ "r": 32,
22
+ "rank_pattern": {},
23
+ "revision": null,
24
+ "target_modules": [
25
+ "k_proj",
26
+ "q_proj",
27
+ "down_proj",
28
+ "gate_proj",
29
+ "up_proj",
30
+ "v_proj",
31
+ "o_proj"
32
+ ],
33
+ "task_type": "CAUSAL_LM",
34
+ "use_rslora": false
35
+ }
adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:abaffa30fdd2d5b34af5455bcbb18672ed7efe5e9723e73005c60c24fa4c615d
3
+ size 1384247640
added_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "<|im_end|>": 32000,
3
+ "<|im_start|>": 32001
4
+ }
config.json ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "NousResearch/Nous-Hermes-2-Mistral-7B-DPO",
3
+ "architectures": [
4
+ "MistralForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 32000,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 4096,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 14336,
13
+ "max_position_embeddings": 32768,
14
+ "model_type": "mistral",
15
+ "num_attention_heads": 32,
16
+ "num_hidden_layers": 32,
17
+ "num_key_value_heads": 8,
18
+ "quantization_config": {
19
+ "_load_in_4bit": true,
20
+ "_load_in_8bit": false,
21
+ "bnb_4bit_compute_dtype": "float16",
22
+ "bnb_4bit_quant_type": "nf4",
23
+ "bnb_4bit_use_double_quant": true,
24
+ "llm_int8_enable_fp32_cpu_offload": false,
25
+ "llm_int8_has_fp16_weight": false,
26
+ "llm_int8_skip_modules": null,
27
+ "llm_int8_threshold": 6.0,
28
+ "load_in_4bit": true,
29
+ "load_in_8bit": false,
30
+ "quant_method": "bitsandbytes"
31
+ },
32
+ "rms_norm_eps": 1e-05,
33
+ "rope_scaling": null,
34
+ "rope_theta": 10000.0,
35
+ "sliding_window": 4096,
36
+ "tie_word_embeddings": false,
37
+ "torch_dtype": "bfloat16",
38
+ "transformers_version": "4.38.0",
39
+ "use_cache": false,
40
+ "vocab_size": 32002
41
+ }
runs/Feb21_16-11-33_05c27c174db0/events.out.tfevents.1708533268.05c27c174db0.8796.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d8d1f492f5f5ec935a4d092087b18ffd707c20b703ad072fecfe12d1cee2b4e1
3
+ size 5129
runs/Feb21_16-36-00_05c27c174db0/events.out.tfevents.1708533366.05c27c174db0.8969.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cf5340bba8a88f3ee3f1236e697aade8ad980c18f076a819fd0811911049a2d3
3
+ size 5129
runs/Feb21_16-38-18_05c27c174db0/events.out.tfevents.1708533504.05c27c174db0.9101.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a07e2972a96e41c25d04b9b97b7ca9c70e45dce0f21d87190e1aed9a2b394e18
3
+ size 5129
runs/Feb21_16-47-06_05c27c174db0/events.out.tfevents.1708535394.05c27c174db0.9379.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be48e6c62a881a28b3a92d0a297a91f2b9aa443500c069c6ceab322b9ce7a63d
3
+ size 5129
runs/Feb21_17-11-58_05c27c174db0/events.out.tfevents.1708535559.05c27c174db0.9569.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72332dcf8aff0b3b772a6368115edbd9ed3fa912514f937d0d5f7dd086bbec16
3
+ size 5129
runs/Feb21_17-15-53_05c27c174db0/events.out.tfevents.1708535755.05c27c174db0.9725.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:26ab629d798621b60a57c03dc8a084c65daf8f9c8be1b034840c735178c5d4b8
3
+ size 5129
runs/Feb21_17-19-37_05c27c174db0/events.out.tfevents.1708535979.05c27c174db0.9854.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e97dd7c08da630e033920f9b77696fcfdbaa7ba68336a83fb74f0179e60ba3a
3
+ size 5129
runs/Feb21_17-28-58_05c27c174db0/events.out.tfevents.1708536541.05c27c174db0.9996.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d0458998d0e8fea9f981b999f03ebd3d9a66a80633c745ce86c5544d4a08d91
3
+ size 5129
runs/Feb21_17-31-52_05c27c174db0/events.out.tfevents.1708536714.05c27c174db0.10108.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ee6f11ac13177d3ebd8891c5013773cf5571f84dd80e67bd375e6852b0282ac
3
+ size 5129
runs/Feb22_01-40-44_05c27c174db0/events.out.tfevents.1708566046.05c27c174db0.10778.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f9a69472ebb5658314f382585dd25d162a619d701a72a92f77ab41819c4feb5e
3
+ size 5129
runs/Feb25_03-41-18_05c27c174db0/events.out.tfevents.1708833608.05c27c174db0.11688.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e4dbced576695634ca8522a8df1dd3382f48fca6625551ba4da2d027b7615ff
3
+ size 5149
runs/Feb25_04-02-43_05c27c174db0/events.out.tfevents.1708833765.05c27c174db0.11905.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5cde308e8cf2ba6fe5e68ca45e37fb30d08a92ea1981bc5bea069083dc07a9d1
3
+ size 5149
runs/Feb25_04-04-03_05c27c174db0/events.out.tfevents.1708833844.05c27c174db0.12089.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2403050a4355aea46f5fd0ee15fc4cac8f2b929b537a5e17ffe33adfa9c6b5f1
3
+ size 5825
runs/Feb25_04-05-27_05c27c174db0/events.out.tfevents.1708833930.05c27c174db0.12300.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8090829e8a463a21c0779a7650bceac595623e6e119d2f8716c66286ca4735ed
3
+ size 5148
runs/Feb25_04-08-40_05c27c174db0/events.out.tfevents.1708834124.05c27c174db0.12680.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a40e31488da6c2c214796735cfe21cbd81bc10347493ee5698d7ad3d3fec2656
3
+ size 5148
runs/Feb25_04-10-03_05c27c174db0/events.out.tfevents.1708834206.05c27c174db0.12863.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22a71093b4b741929cb8f992ed3f0461aeca4c5a08a94031787d218c78232f0c
3
+ size 5148
runs/Feb25_04-11-29_05c27c174db0/events.out.tfevents.1708834292.05c27c174db0.13046.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f9c85ba8512395e54b6cc6bafec694a5bfa4ebe41b33676033d7456449d50ec
3
+ size 5148
runs/Feb25_04-14-13_05c27c174db0/events.out.tfevents.1708834457.05c27c174db0.13230.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:537f1250af2c357a388edf627f3d96ae500368c842059c216671c69c38dcebe6
3
+ size 6501
runs/Feb25_04-15-38_05c27c174db0/events.out.tfevents.1708834542.05c27c174db0.13414.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d072e22bbc0978523889cb253faa957fdf3dd12a2317ba95d84c1898f5d7b7db
3
+ size 6501
runs/Feb25_04-17-30_05c27c174db0/events.out.tfevents.1708834651.05c27c174db0.13654.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c281d98e377e817dd48e8b1355e8106c9716f100e5f34c33ab80b010f14acb9
3
+ size 5150
runs/Feb25_04-51-25_05c27c174db0/events.out.tfevents.1708836688.05c27c174db0.69.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe6b7ea95bc3477e4eaa17560211ba088627101253447d15c1b54b298fd27b8c
3
+ size 691625
special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|im_end|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "</s>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
3
+ size 493443
tokenizer_config.json ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": true,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ },
30
+ "32000": {
31
+ "content": "<|im_end|>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "32001": {
39
+ "content": "<|im_start|>",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": true
45
+ }
46
+ },
47
+ "additional_special_tokens": [],
48
+ "bos_token": "<s>",
49
+ "chat_template": "{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% set system_message = 'Generate a preferable answer.' %}{% endif %}{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in loop_messages %}{% if loop.index0 == 0 %}{{'<|im_start|>system\n' + system_message + '<|im_end|>\n'}}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
50
+ "clean_up_tokenization_spaces": false,
51
+ "eos_token": "<|im_end|>",
52
+ "legacy": true,
53
+ "model_max_length": 1000000000000000019884624838656,
54
+ "pad_token": "</s>",
55
+ "sp_model_kwargs": {},
56
+ "spaces_between_special_tokens": false,
57
+ "tokenizer_class": "LlamaTokenizer",
58
+ "trust_remote_code": false,
59
+ "unk_token": "<unk>",
60
+ "use_default_system_prompt": true,
61
+ "use_fast": true
62
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7f4504e1370becb80d1d6feb0f2ec638b6dc2e19b049b813e8eb5337a7649e82
3
+ size 5048