File size: 1,247 Bytes
612c8db |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
""" GPT1 model configuration """
from transformers import PretrainedConfig
class GPT1Config(PretrainedConfig):
model_type = "gpt1"
def __init__(
self,
vocab_size=40000,
hidden_size=768,
intermediate_size=3072,
num_hidden_layers=12,
num_attention_heads=12,
resid_pdrop=0.1,
embd_pdrop=0.1,
attention_dropout=0.1,
hidden_act="gelu",
max_position_embeddings=512,
initializer_range=0.02,
layer_norm_eps=1e-5,
tie_word_embeddings=True,
**kwargs
):
self.vocab_size = vocab_size
self.hidden_size = hidden_size
self.intermediate_size = intermediate_size
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
self.resid_pdrop = resid_pdrop
self.embd_pdrop = embd_pdrop
self.attention_dropout = attention_dropout
self.hidden_act = hidden_act
self.max_position_embeddings = max_position_embeddings
self.initializer_range = initializer_range
self.layer_norm_eps = layer_norm_eps
super().__init__(
tie_word_embeddings=tie_word_embeddings,
**kwargs,
)
|