add model
Browse files- config.json +2 -2
- configuration_word2vec.py +10 -0
- modeling_word2vec.py +18 -0
config.json
CHANGED
@@ -3,8 +3,8 @@
|
|
3 |
"PretrainedWord2VecHFModel"
|
4 |
],
|
5 |
"auto_map": {
|
6 |
-
"AutoConfig": "
|
7 |
-
"AutoModel": "
|
8 |
},
|
9 |
"hidden_size": 50,
|
10 |
"model_type": "glove",
|
|
|
3 |
"PretrainedWord2VecHFModel"
|
4 |
],
|
5 |
"auto_map": {
|
6 |
+
"AutoConfig": "configuration_word2vec.PretrainedWord2VecHFConfig",
|
7 |
+
"AutoModel": "modeling_word2vec.PretrainedWord2VecHFModel"
|
8 |
},
|
9 |
"hidden_size": 50,
|
10 |
"model_type": "glove",
|
configuration_word2vec.py
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from transformers import PretrainedConfig
|
2 |
+
|
3 |
+
class PretrainedWord2VecHFConfig(PretrainedConfig):
|
4 |
+
model_type = "glove"
|
5 |
+
|
6 |
+
def __init__(self, num_words=400001, vector_size=50, **kwargs):
|
7 |
+
self.num_words = num_words
|
8 |
+
self.vector_size = vector_size
|
9 |
+
self.hidden_size = self.vector_size # Required for sBERT
|
10 |
+
super().__init__(**kwargs)
|
modeling_word2vec.py
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from transformers import PreTrainedModel
|
2 |
+
from torch import nn
|
3 |
+
import torch
|
4 |
+
from .configuration_word2vec import PretrainedWord2VecHFConfig
|
5 |
+
|
6 |
+
class PretrainedWord2VecHFModel(PreTrainedModel):
|
7 |
+
config_class = PretrainedWord2VecHFConfig
|
8 |
+
|
9 |
+
def __init__(self, config):
|
10 |
+
super().__init__(config)
|
11 |
+
self.embeddings = nn.Embedding(config.num_words, config.vector_size)
|
12 |
+
|
13 |
+
def set_embeddings(self, embeddings):
|
14 |
+
self.embeddings = nn.Embedding.from_pretrained(torch.tensor(embeddings))
|
15 |
+
|
16 |
+
def forward(self, input_ids, **kwargs):
|
17 |
+
x = self.embeddings(torch.tensor(input_ids))
|
18 |
+
return x
|