anhtranhong commited on
Commit
13c3ccc
1 Parent(s): 68b78e2

Upload model

Browse files
Files changed (2) hide show
  1. adapter_config.json +5 -2
  2. adapter_model.bin +2 -2
adapter_config.json CHANGED
@@ -8,14 +8,17 @@
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
  "lora_alpha": 16,
11
- "lora_dropout": 0.1,
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
  "r": 64,
15
  "revision": null,
16
  "target_modules": [
17
  "q_proj",
18
- "v_proj"
 
 
 
19
  ],
20
  "task_type": "CAUSAL_LM"
21
  }
 
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
  "lora_alpha": 16,
11
+ "lora_dropout": 0.05,
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
  "r": 64,
15
  "revision": null,
16
  "target_modules": [
17
  "q_proj",
18
+ "k_proj",
19
+ "v_proj",
20
+ "o_proj",
21
+ "gate_proj"
22
  ],
23
  "task_type": "CAUSAL_LM"
24
  }
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ef357b981b9bac9bdd526b0f747ca017c5ddb22066f7da1ef6a75e8d0302400d
3
- size 134264202
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ba957deaa3b9fcf4694d75a243a49dc01f3d4c7438a4dfdc7d6d49c31cb7f5c
3
+ size 392283466