Text Generation
Transformers
PyTorch
English
llama
causal-lm
Inference Endpoints
text-generation-inference

not able to add Loraconfig target modules

#15
by Andyrasika - opened

i tried:

lora_config = LoraConfig(
    r=16, 
    lora_alpha=32, 
    target_modules=[
        "query_key_value",
        "dense",
        "dense_h_to_4h",
        "dense_4h_to_h",
    ],
    lora_dropout=0.05, 
    bias="none", 
    task_type="CAUSAL_LM"
)

model = get_peft_model(model, lora_config)
print_trainable_parameters(model)

and got error:
```

ValueError Traceback (most recent call last)
Cell In[24], line 15
1 lora_config = LoraConfig(
2 r=16,
3 lora_alpha=32,
(...)
12 task_type="CAUSAL_LM"
13 )
---> 15 model = get_peft_model(model, lora_config)
16 print_trainable_parameters(model)

File /opt/conda/lib/python3.10/site-packages/peft/mapping.py:98, in get_peft_model(model, peft_config, adapter_name)
96 if isinstance(peft_config, PromptLearningConfig):
97 peft_config = _prepare_prompt_learning_config(peft_config, model_config)
---> 98 return MODEL_TYPE_TO_PEFT_MODEL_MAPPING[peft_config.task_type](model, peft_config, adapter_name=adapter_name)

File /opt/conda/lib/python3.10/site-packages/peft/peft_model.py:893, in PeftModelForCausalLM.init(self, model, peft_config, adapter_name)
892 def init(self, model, peft_config: PeftConfig, adapter_name="default"):
--> 893 super().init(model, peft_config, adapter_name)
894 self.base_model_prepare_inputs_for_generation = self.base_model.prepare_inputs_for_generation

File /opt/conda/lib/python3.10/site-packages/peft/peft_model.py:112, in PeftModel.init(self, model, peft_config, adapter_name)
110 if not isinstance(peft_config, PromptLearningConfig):
111 self.peft_config[adapter_name] = peft_config
--> 112 self.base_model = PEFT_TYPE_TO_MODEL_MAPPING[peft_config.peft_type](
113 self.base_model, self.peft_config, adapter_name
114 )
115 self.set_additional_trainable_modules(peft_config, adapter_name)
116 else:

File /opt/conda/lib/python3.10/site-packages/peft/tuners/lora.py:180, in LoraModel.init(self, model, config, adapter_name)
178 self.forward = self.model.forward
179 self.peft_config = config
--> 180 self.add_adapter(adapter_name, self.peft_config[adapter_name])
182 # transformers models have a .config attribute, whose presence is assumed later on
183 if not hasattr(self, "config"):

File /opt/conda/lib/python3.10/site-packages/peft/tuners/lora.py:194, in LoraModel.add_adapter(self, adapter_name, config)
192 config = self._prepare_lora_config(config, model_config)
193 self.peft_config[adapter_name] = config
--> 194 self._find_and_replace(adapter_name)
195 if len(self.peft_config) > 1 and self.peft_config[adapter_name].bias != "none":
196 raise ValueError(
197 "LoraModel supports only 1 adapter with bias. When using multiple adapters, set bias to 'none' for all adapters."
198 )

File /opt/conda/lib/python3.10/site-packages/peft/tuners/lora.py:356, in LoraModel._find_and_replace(self, adapter_name)
353 self._replace_module(parent, target_name, new_module, target)
355 if not is_target_modules_in_base_model:
--> 356 raise ValueError(
357 f"Target modules {lora_config.target_modules} not found in the base model. "
358 f"Please check the target modules and try again."
359 )

ValueError: Target modules ['query_key_value', 'dense', 'dense_h_to_4h', 'dense_4h_to_h'] not found in the base model. Please check the target modules and try again.

Any workarond this issue?

Sign up or log in to comment