from transformers import GPTJConfig class GPTJLoraConfig(GPTJConfig): def __init__(self, add_adapters=False, **kwargs): self.add_apapters = add_adapters super().__init__(**kwargs)