lemonilia commited on
Commit
5f1fe90
1 Parent(s): 353449e

8-bit LoRA

Browse files
Files changed (2) hide show
  1. adapter_config.json +6 -6
  2. adapter_model.bin +1 -1
adapter_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "auto_mapping": null,
3
- "base_model_name_or_path": "/home/anon/AI-Models/LLM/Llama2-7B/",
4
  "bias": "none",
5
  "fan_in_fan_out": null,
6
  "inference_mode": true,
@@ -8,19 +8,19 @@
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
  "lora_alpha": 16,
11
- "lora_dropout": 0.1,
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
  "r": 256,
15
  "revision": null,
16
  "target_modules": [
17
- "q_proj",
18
  "k_proj",
19
- "o_proj",
20
  "up_proj",
21
- "gate_proj",
 
22
  "down_proj",
23
- "v_proj"
24
  ],
25
  "task_type": "CAUSAL_LM"
26
  }
 
1
  {
2
  "auto_mapping": null,
3
+ "base_model_name_or_path": "/home/pyg/base-models/Llama-2-7b-hf",
4
  "bias": "none",
5
  "fan_in_fan_out": null,
6
  "inference_mode": true,
 
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
  "lora_alpha": 16,
11
+ "lora_dropout": 0.05,
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
  "r": 256,
15
  "revision": null,
16
  "target_modules": [
17
+ "gate_proj",
18
  "k_proj",
 
19
  "up_proj",
20
+ "o_proj",
21
+ "v_proj",
22
  "down_proj",
23
+ "q_proj"
24
  ],
25
  "task_type": "CAUSAL_LM"
26
  }
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6a26c344bce1d08519ca2442da2d47e081607fc9b936fe2741d3c2776a066e49
3
  size 2558687629
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:388950a3f3f0fc8d1e5cbb1449c31f067b592710f98fc63d6397662c0bc3d59c
3
  size 2558687629