{ | |
"model": "gpt2", | |
"segment_vocab_size": 0, | |
"vocab_size": 21128, | |
"hidden_size": 768, | |
"attention_probs_dropout_prob": 0.1, | |
"hidden_dropout_prob": 0.1, | |
"hidden_act": "gelu", | |
"initializer_range": 0.014142135623731, | |
"intermediate_size": 3072, | |
"max_position_embeddings": 1024, | |
"num_attention_heads": 12, | |
"num_hidden_layers": 12 | |
} |