morgan commited on
Commit
966589a
1 Parent(s): 12b4cfb

Training in progress, step 978

Browse files
adapter_config.json CHANGED
@@ -8,21 +8,18 @@
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
- "lora_alpha": 16,
12
  "lora_dropout": 0.05,
13
- "modules_to_save": [
14
- "lm_head",
15
- "embed_tokens"
16
- ],
17
  "peft_type": "LORA",
18
- "r": 32,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "o_proj",
23
- "k_proj",
24
  "v_proj",
25
- "q_proj"
 
 
26
  ],
27
  "task_type": "CAUSAL_LM"
28
  }
 
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
+ "lora_alpha": 32,
12
  "lora_dropout": 0.05,
13
+ "modules_to_save": null,
 
 
 
14
  "peft_type": "LORA",
15
+ "r": 16,
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
 
 
19
  "v_proj",
20
+ "o_proj",
21
+ "q_proj",
22
+ "k_proj"
23
  ],
24
  "task_type": "CAUSAL_LM"
25
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4cd5967a8ac2c81f0aac85e9096e902bef2ae6c66578b268f4d287543897db66
3
- size 633407416
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:41bc7bb18baf2c368e75550480c8a4346fcaec00edde9fe88c93f33883dbcafe
3
+ size 54560368
config.json CHANGED
@@ -5,7 +5,7 @@
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
8
- "eos_token_id": 32000,
9
  "hidden_act": "silu",
10
  "hidden_size": 4096,
11
  "initializer_range": 0.02,
@@ -38,5 +38,5 @@
38
  "torch_dtype": "bfloat16",
39
  "transformers_version": "4.37.0.dev0",
40
  "use_cache": false,
41
- "vocab_size": 32002
42
  }
 
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
  "hidden_act": "silu",
10
  "hidden_size": 4096,
11
  "initializer_range": 0.02,
 
38
  "torch_dtype": "bfloat16",
39
  "transformers_version": "4.37.0.dev0",
40
  "use_cache": false,
41
+ "vocab_size": 32000
42
  }
special_tokens_map.json CHANGED
@@ -7,7 +7,7 @@
7
  "single_word": false
8
  },
9
  "eos_token": {
10
- "content": "<|im_end|>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
 
7
  "single_word": false
8
  },
9
  "eos_token": {
10
+ "content": "</s>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
tokenizer_config.json CHANGED
@@ -25,28 +25,12 @@
25
  "rstrip": false,
26
  "single_word": false,
27
  "special": true
28
- },
29
- "32000": {
30
- "content": "<|im_end|>",
31
- "lstrip": false,
32
- "normalized": false,
33
- "rstrip": false,
34
- "single_word": false,
35
- "special": true
36
- },
37
- "32001": {
38
- "content": "<|im_start|>",
39
- "lstrip": false,
40
- "normalized": false,
41
- "rstrip": false,
42
- "single_word": false,
43
- "special": false
44
  }
45
  },
46
  "additional_special_tokens": [],
47
  "bos_token": "<s>",
48
  "clean_up_tokenization_spaces": false,
49
- "eos_token": "<|im_end|>",
50
  "legacy": true,
51
  "model_max_length": 1000000000000000019884624838656,
52
  "pad_token": "</s>",
 
25
  "rstrip": false,
26
  "single_word": false,
27
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  }
29
  },
30
  "additional_special_tokens": [],
31
  "bos_token": "<s>",
32
  "clean_up_tokenization_spaces": false,
33
+ "eos_token": "</s>",
34
  "legacy": true,
35
  "model_max_length": 1000000000000000019884624838656,
36
  "pad_token": "</s>",
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7996fa7271f62490bcb7c0a548cad3b765ec648c70677f878ca28b39b86629fb
3
  size 5368
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b4b1fb46910fa7d2aa33f2def3676285a0b87052f84ab51d2c485ae9b6c899b7
3
  size 5368