Commit
•
37eccca
1
Parent(s):
04053a6
Upload Bilma
Browse files- config.json +2 -2
- configuration_bilma.py +1 -1
- tf_model.h5 +1 -1
config.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
{
|
2 |
"architectures": [
|
3 |
-
"
|
4 |
],
|
5 |
"auto_map": {
|
6 |
"AutoConfig": "configuration_bilma.BilmaConfig",
|
@@ -8,7 +8,7 @@
|
|
8 |
},
|
9 |
"drop_rate": 0.1,
|
10 |
"embedding_dim": 512,
|
11 |
-
"model_type": "
|
12 |
"num_attention_heads": 4,
|
13 |
"num_encoders": 2,
|
14 |
"seq_max_length": 280,
|
|
|
1 |
{
|
2 |
"architectures": [
|
3 |
+
"lma"
|
4 |
],
|
5 |
"auto_map": {
|
6 |
"AutoConfig": "configuration_bilma.BilmaConfig",
|
|
|
8 |
},
|
9 |
"drop_rate": 0.1,
|
10 |
"embedding_dim": 512,
|
11 |
+
"model_type": "bilma",
|
12 |
"num_attention_heads": 4,
|
13 |
"num_encoders": 2,
|
14 |
"seq_max_length": 280,
|
configuration_bilma.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
from transformers import PretrainedConfig
|
2 |
|
3 |
class BilmaConfig(PretrainedConfig):
|
4 |
-
model_type = "
|
5 |
|
6 |
def __init__(
|
7 |
self,
|
|
|
1 |
from transformers import PretrainedConfig
|
2 |
|
3 |
class BilmaConfig(PretrainedConfig):
|
4 |
+
model_type = "bilma"
|
5 |
|
6 |
def __init__(
|
7 |
self,
|
tf_model.h5
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 156561684
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:42a62ff751c90b4e9cdf2f33f2744732dcb79a061db2dda2a344477dec95a6ab
|
3 |
size 156561684
|