taesiri commited on
Commit
78fb1c0
1 Parent(s): b335377

Update adapter_config.json

Browse files
Files changed (1) hide show
  1. adapter_config.json +1 -3
adapter_config.json CHANGED
@@ -1,16 +1,14 @@
1
  {
2
  "base_model_name_or_path": "decapoda-research/llama-7b-hf",
3
  "bias": "none",
4
- "enable_lora": null,
5
  "fan_in_fan_out": false,
6
  "inference_mode": true,
7
  "init_lora_weights": true,
8
  "lora_alpha": 16,
9
  "lora_dropout": 0.05,
10
- "merge_weights": false,
11
  "modules_to_save": null,
12
  "peft_type": "LORA",
13
- "r": 16,
14
  "target_modules": [
15
  "q_proj",
16
  "k_proj",
 
1
  {
2
  "base_model_name_or_path": "decapoda-research/llama-7b-hf",
3
  "bias": "none",
 
4
  "fan_in_fan_out": false,
5
  "inference_mode": true,
6
  "init_lora_weights": true,
7
  "lora_alpha": 16,
8
  "lora_dropout": 0.05,
 
9
  "modules_to_save": null,
10
  "peft_type": "LORA",
11
+ "r": 8,
12
  "target_modules": [
13
  "q_proj",
14
  "k_proj",