izh97 commited on
Commit
566aa1a
1 Parent(s): 2cb4feb

Training in progress, epoch 0

Browse files
adapter_config.json CHANGED
@@ -9,23 +9,23 @@
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
  "loftq_config": {},
12
- "lora_alpha": 256,
13
  "lora_dropout": 0.1,
14
  "megatron_config": null,
15
  "megatron_core": "megatron.core",
16
  "modules_to_save": null,
17
  "peft_type": "LORA",
18
- "r": 128,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "q_proj",
23
  "gate_proj",
24
- "up_proj",
25
  "down_proj",
 
 
 
26
  "o_proj",
27
- "v_proj",
28
- "k_proj"
29
  ],
30
  "task_type": "CAUSAL_LM",
31
  "use_rslora": false
 
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
  "loftq_config": {},
12
+ "lora_alpha": 512,
13
  "lora_dropout": 0.1,
14
  "megatron_config": null,
15
  "megatron_core": "megatron.core",
16
  "modules_to_save": null,
17
  "peft_type": "LORA",
18
+ "r": 256,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
22
  "gate_proj",
 
23
  "down_proj",
24
+ "q_proj",
25
+ "up_proj",
26
+ "k_proj",
27
  "o_proj",
28
+ "v_proj"
 
29
  ],
30
  "task_type": "CAUSAL_LM",
31
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8f029ae04116b905496dd1672892ce59b426d49ca596b0ea6ac285618c7087ad
3
- size 1342238560
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b57398d943471b6b88420d1242ebe679d705c5c62c4da3aa88ccd9abc1f151a
3
+ size 2684416208
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0941f2c64ffb1ebe77734f19b07cd49bf820304201b6c8896d7bd1ec46e5369b
3
  size 4664
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:55cc212c5f02ca9658df1dca4ea923c70d739425d304eaead4935cc91a2454ad
3
  size 4664