roif123 commited on
Commit
c12f868
1 Parent(s): 4c4f21e

Update adapter_config.json

Browse files
Files changed (1) hide show
  1. adapter_config.json +5 -13
adapter_config.json CHANGED
@@ -1,32 +1,24 @@
1
  {
2
- "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "/kaggle/input/mistral/pytorch/7b-instruct-v0.1-hf/1",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
9
- "layer_replication": null,
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
- "loftq_config": {},
13
  "lora_alpha": 16,
14
  "lora_dropout": 0.1,
15
- "megatron_config": null,
16
- "megatron_core": "megatron.core",
17
  "modules_to_save": null,
18
  "peft_type": "LORA",
19
  "r": 64,
20
- "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
 
23
  "v_proj",
24
  "o_proj",
25
- "k_proj",
26
- "gate_proj",
27
- "q_proj"
28
  ],
29
- "task_type": "CAUSAL_LM",
30
- "use_dora": false,
31
- "use_rslora": false
32
  }
 
1
  {
 
2
  "auto_mapping": null,
3
+ "base_model_name_or_path": "mistralai/Mistral-7B-v0.1",
4
  "bias": "none",
5
  "fan_in_fan_out": false,
6
  "inference_mode": true,
7
  "init_lora_weights": true,
 
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
 
10
  "lora_alpha": 16,
11
  "lora_dropout": 0.1,
 
 
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
  "r": 64,
 
15
  "revision": null,
16
  "target_modules": [
17
+ "q_proj",
18
+ "k_proj",
19
  "v_proj",
20
  "o_proj",
21
+ "gate_proj"
 
 
22
  ],
23
+ "task_type": "CAUSAL_LM"
 
 
24
  }