jikaixuan commited on
Commit
537bd5b
1 Parent(s): b043d6d

Training in progress, step 220

Browse files
adapter_config.json CHANGED
@@ -8,20 +8,17 @@
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
- "loftq_config": {},
12
  "lora_alpha": 16,
13
  "lora_dropout": 0.1,
14
- "megatron_config": null,
15
- "megatron_core": "megatron.core",
16
  "modules_to_save": null,
17
  "peft_type": "LORA",
18
  "r": 64,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
22
  "k_proj",
23
  "q_proj",
24
- "v_proj",
25
  "o_proj"
26
  ],
27
  "task_type": "CAUSAL_LM"
 
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
 
11
  "lora_alpha": 16,
12
  "lora_dropout": 0.1,
 
 
13
  "modules_to_save": null,
14
  "peft_type": "LORA",
15
  "r": 64,
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
+ "v_proj",
20
  "k_proj",
21
  "q_proj",
 
22
  "o_proj"
23
  ],
24
  "task_type": "CAUSAL_LM"
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:84a9c6afaad46949167d2bf8c0ff031030a8d02f1006b050e47642be289c99b1
3
- size 109086672
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:410b46257b79e03043ed1771333ce36cd2c3605e3600ee3dd24709ef346e658c
3
+ size 218138576
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0cc76df2851d4146f789fd37f9db61180778b9f7f4bb177bcab529cc595e76d3
3
  size 4792
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9103d537bc7a42b0df74010143412e6ee65758ade8a9f94ad18972e25f5db07a
3
  size 4792