from transformers import T5Config class SIPFinetuningModelConfig(T5Config): model_type = "sip_finetune" def __init__(self, num_examples: int = 32, prefix_length: int = 50, random_selection: bool = True, # don't change these unless you change what the prefix of the model is initialized with: prefix_max_init_length: int = 70, num_precomputed_examples: int = 400, **kwargs): # These are all about the initialization of the prefix. self.num_examples = num_examples self.prefix_length = prefix_length self.random_selection = random_selection self.prefix_max_init_length = prefix_max_init_length self.num_precomputed_examples = num_precomputed_examples super().__init__(**kwargs)