Commit
•
8777cb2
1
Parent(s):
257d747
Upload BERT_Arch
Browse files- config.json +2 -2
- init.py +79 -0
config.json
CHANGED
@@ -3,8 +3,8 @@
|
|
3 |
"BERT_Arch"
|
4 |
],
|
5 |
"auto_map": {
|
6 |
-
"AutoConfig": "
|
7 |
-
"AutoModel": "
|
8 |
},
|
9 |
"bert": {
|
10 |
"_commit_hash": "43cf2d48e8c75d255dccab2a19e40d4774fd8853",
|
|
|
3 |
"BERT_Arch"
|
4 |
],
|
5 |
"auto_map": {
|
6 |
+
"AutoConfig": "__main__.PragFormerConfig",
|
7 |
+
"AutoModel": "__main__.BERT_Arch"
|
8 |
},
|
9 |
"bert": {
|
10 |
"_commit_hash": "43cf2d48e8c75d255dccab2a19e40d4774fd8853",
|
init.py
ADDED
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from transformers import PretrainedConfig
|
2 |
+
from transformers import AutoModel, AutoConfig
|
3 |
+
import torch.nn as nn
|
4 |
+
from transformers import BertPreTrainedModel, AutoModel, PretrainedConfig
|
5 |
+
|
6 |
+
|
7 |
+
|
8 |
+
class PragFormerConfig(PretrainedConfig):
|
9 |
+
model_type = "pragformer"
|
10 |
+
|
11 |
+
def __init__(self, bert=None, dropout=0.2, fc1=512, fc2=2, softmax_dim=1, **kwargs):
|
12 |
+
self.bert = bert
|
13 |
+
self.dropout = dropout
|
14 |
+
self.fc1 = fc1
|
15 |
+
self.fc2 = fc2
|
16 |
+
self.softmax_dim = softmax_dim
|
17 |
+
super().__init__(**kwargs)
|
18 |
+
|
19 |
+
|
20 |
+
class BERT_Arch(BertPreTrainedModel):
|
21 |
+
config_class = PragFormerConfig
|
22 |
+
|
23 |
+
def __init__(self, config):
|
24 |
+
super().__init__(config)
|
25 |
+
|
26 |
+
self.bert = AutoModel.from_pretrained(config.bert['_name_or_path'])
|
27 |
+
|
28 |
+
# dropout layer
|
29 |
+
self.dropout = nn.Dropout(config.dropout)
|
30 |
+
|
31 |
+
# relu activation function
|
32 |
+
self.relu = nn.ReLU()
|
33 |
+
|
34 |
+
# dense layer 1
|
35 |
+
self.fc1 = nn.Linear(self.config.bert['hidden_size'], config.fc1)
|
36 |
+
# self.fc1 = nn.Linear(768, 512)
|
37 |
+
|
38 |
+
# dense layer 2 (Output layer)
|
39 |
+
self.fc2 = nn.Linear(config.fc1, config.fc2)
|
40 |
+
|
41 |
+
# softmax activation function
|
42 |
+
self.softmax = nn.LogSoftmax(dim = config.softmax_dim)
|
43 |
+
|
44 |
+
# define the forward pass
|
45 |
+
def forward(self, input_ids, attention_mask):
|
46 |
+
# pass the inputs to the model
|
47 |
+
_, cls_hs = self.bert(input_ids, attention_mask = attention_mask, return_dict=False)
|
48 |
+
|
49 |
+
x = self.fc1(cls_hs)
|
50 |
+
|
51 |
+
x = self.relu(x)
|
52 |
+
|
53 |
+
x = self.dropout(x)
|
54 |
+
|
55 |
+
# output layer
|
56 |
+
x = self.fc2(x)
|
57 |
+
|
58 |
+
# apply softmax activation
|
59 |
+
x = self.softmax(x)
|
60 |
+
return x
|
61 |
+
|
62 |
+
|
63 |
+
PragFormerConfig.register_for_auto_class()
|
64 |
+
BERT_Arch.register_for_auto_class("AutoModel")
|
65 |
+
config = PragFormerConfig.from_pretrained('./Classifier/PragFormer')
|
66 |
+
model = BERT_Arch(config)
|
67 |
+
pretrained_model = BERT_Arch.from_pretrained("./Classifier/PragFormer")
|
68 |
+
model.load_state_dict(pretrained_model.state_dict())
|
69 |
+
model.push_to_hub("PragFormer")
|
70 |
+
|
71 |
+
|
72 |
+
# AutoConfig.register("pragformer", PragFormerConfig)
|
73 |
+
# AutoModel.register(PragFormerConfig, BERT_Arch)
|
74 |
+
# model.push_to_hub("PragFormer")
|
75 |
+
# # config = PragFormerConfig.from_pretrained('./PragFormer')
|
76 |
+
# # model = BERT_Arch(config)
|
77 |
+
# # pretrained_model = BERT_Arch.from_pretrained("./PragFormer")
|
78 |
+
# # model.load_state_dict(pretrained_model.state_dict())
|
79 |
+
# # model.push_to_hub("PragFormer")
|