vaibhavad commited on
Commit
a388102
1 Parent(s): 0d262f4

Update adapter_config.json

Browse files
Files changed (1) hide show
  1. adapter_config.json +2 -4
adapter_config.json CHANGED
@@ -1,15 +1,14 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": {
4
- "base_model_class": "LlamaModel",
5
- "parent_library": "transformers.models.llama.modeling_llama"
6
  },
7
  "base_model_name_or_path": "meta-llama/Meta-Llama-3-8B-Instruct",
8
  "bias": "none",
9
  "fan_in_fan_out": false,
10
  "inference_mode": true,
11
  "init_lora_weights": true,
12
- "layer_replication": null,
13
  "layers_pattern": null,
14
  "layers_to_transform": null,
15
  "loftq_config": {},
@@ -32,6 +31,5 @@
32
  "gate_proj"
33
  ],
34
  "task_type": null,
35
- "use_dora": false,
36
  "use_rslora": false
37
  }
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": {
4
+ "base_model_class": "LlamaEncoderModel",
5
+ "parent_library": "llama_encoder_model.modeling_llama_encoder"
6
  },
7
  "base_model_name_or_path": "meta-llama/Meta-Llama-3-8B-Instruct",
8
  "bias": "none",
9
  "fan_in_fan_out": false,
10
  "inference_mode": true,
11
  "init_lora_weights": true,
 
12
  "layers_pattern": null,
13
  "layers_to_transform": null,
14
  "loftq_config": {},
 
31
  "gate_proj"
32
  ],
33
  "task_type": null,
 
34
  "use_rslora": false
35
  }