AbLang_light / config.py
qilowoq's picture
Upload AbLang
9fc352f
raw
history blame
1.21 kB
from transformers import PretrainedConfig
from typing import List
class AbLangConfig(PretrainedConfig):
model_type = "bert"
def __init__(
self,
max_position_embeddings: int=160,
hidden_size: int=768,
num_hidden_layers: int=12,
num_attention_heads: int=12,
attention_probs_dropout_prob: float=0.1,
intermediate_size: int=3072,
hidden_act: str="gelu",
hidden_dropout_prob: float=0.1,
initializer_range: float=0.02,
layer_norm_eps: float=1e-12,
chain: str="heavy",
**kwargs,
):
self.ptid = 21
self.vocab_size=24
self.max_position_embeddings=max_position_embeddings
self.hidden_size=hidden_size
self.num_hidden_layers=num_hidden_layers
self.num_attention_heads=num_attention_heads
self.attention_probs_dropout_prob=attention_probs_dropout_prob
self.intermediate_size=intermediate_size
self.hidden_act=hidden_act
self.hidden_dropout_prob=hidden_dropout_prob
self.initializer_range=initializer_range
self.layer_norm_eps=layer_norm_eps
self.chain=chain
super().__init__(**kwargs)