Upload AbLang
Browse files- config.json +1 -0
- config.py +1 -0
config.json
CHANGED
@@ -16,6 +16,7 @@
|
|
16 |
"intermediate_size": 3072,
|
17 |
"layer_norm_eps": 1e-12,
|
18 |
"max_position_embeddings": 160,
|
|
|
19 |
"num_attention_heads": 12,
|
20 |
"num_hidden_layers": 12,
|
21 |
"ptid": 21,
|
|
|
16 |
"intermediate_size": 3072,
|
17 |
"layer_norm_eps": 1e-12,
|
18 |
"max_position_embeddings": 160,
|
19 |
+
"model_type": "bert",
|
20 |
"num_attention_heads": 12,
|
21 |
"num_hidden_layers": 12,
|
22 |
"ptid": 21,
|
config.py
CHANGED
@@ -2,6 +2,7 @@ from transformers import PretrainedConfig
|
|
2 |
from typing import List
|
3 |
|
4 |
class AbLangConfig(PretrainedConfig):
|
|
|
5 |
def __init__(
|
6 |
self,
|
7 |
max_position_embeddings: int=160,
|
|
|
2 |
from typing import List
|
3 |
|
4 |
class AbLangConfig(PretrainedConfig):
|
5 |
+
model_type = "bert"
|
6 |
def __init__(
|
7 |
self,
|
8 |
max_position_embeddings: int=160,
|