from transformers import GPTJConfig | |
class GPTJLoraConfig(GPTJConfig): | |
model_type = "gptj-lora" | |
def __init__(self, add_adapters=False, **kwargs): | |
self.add_apapters = add_adapters | |
super().__init__(**kwargs) | |
self.model_type = "gptj-lora" | |
from transformers import GPTJConfig | |
class GPTJLoraConfig(GPTJConfig): | |
model_type = "gptj-lora" | |
def __init__(self, add_adapters=False, **kwargs): | |
self.add_apapters = add_adapters | |
super().__init__(**kwargs) | |
self.model_type = "gptj-lora" | |