Pragformer commited on
Commit
0c2edec
1 Parent(s): 6c2fcf7

Upload BERT_Arch

Browse files
Files changed (3) hide show
  1. config.json +2 -2
  2. model.py +12 -100
  3. model_config.py +13 -0
config.json CHANGED
@@ -3,8 +3,8 @@
3
  "BERT_Arch"
4
  ],
5
  "auto_map": {
6
- "AutoConfig": "__main__.PragFormerConfig",
7
- "AutoModel": "__main__.BERT_Arch"
8
  },
9
  "bert": {
10
  "_commit_hash": "43cf2d48e8c75d255dccab2a19e40d4774fd8853",
 
3
  "BERT_Arch"
4
  ],
5
  "auto_map": {
6
+ "AutoConfig": "model_config.PragFormerConfig",
7
+ "AutoModel": "model.BERT_Arch"
8
  },
9
  "bert": {
10
  "_commit_hash": "43cf2d48e8c75d255dccab2a19e40d4774fd8853",
model.py CHANGED
@@ -1,11 +1,19 @@
 
 
1
  from transformers import BertPreTrainedModel, AutoModel, PretrainedConfig
2
- import sys
3
- sys.path.append("..")
4
 
5
- import torch.nn as nn
6
- from Classifier.pragformer_config import PragFormerConfig
7
 
8
 
 
 
 
 
 
 
 
 
 
 
9
 
10
 
11
  class BERT_Arch(BertPreTrainedModel):
@@ -49,99 +57,3 @@ class BERT_Arch(BertPreTrainedModel):
49
  # apply softmax activation
50
  x = self.softmax(x)
51
  return x
52
-
53
-
54
-
55
-
56
-
57
- # class BERT_Arch_new(BertPreTrainedModel):
58
- # def __init__(self, config):
59
- # super().__init__(config)
60
- # self.bert = AutoModel.from_pretrained('/home/talkad/Desktop/pragformer/PragFormer/DeepSCC-RoBERTa')
61
-
62
- # # dropout layer
63
- # self.dropout = nn.Dropout(0.2)
64
-
65
- # # relu activation function
66
- # self.relu = nn.ReLU()
67
-
68
- # # dense layer 1
69
- # self.fc1 = nn.Linear(self.config.hidden_size, 512)
70
- # # self.fc1 = nn.Linear(768, 512)
71
-
72
- # # dense layer 2 (Output layer)
73
- # self.fc2 = nn.Linear(512, 2)
74
-
75
- # # softmax activation function
76
- # self.softmax = nn.LogSoftmax(dim = 1)
77
-
78
- # # define the forward pass
79
- # def forward(self, input_ids, attention_mask):
80
- # # pass the inputs to the model
81
- # _, cls_hs = self.bert(input_ids, attention_mask = attention_mask, return_dict=False)
82
-
83
- # x = self.fc1(cls_hs)
84
-
85
- # x = self.relu(x)
86
-
87
- # x = self.dropout(x)
88
-
89
- # # output layer
90
- # x = self.fc2(x)
91
-
92
- # # apply softmax activation
93
- # x = self.softmax(x)
94
- # return x
95
-
96
-
97
-
98
-
99
-
100
-
101
-
102
-
103
- # class BERT_Arch(nn.Module):
104
- # def __init__(self, bert):
105
- # super(BERT_Arch, self).__init__()
106
- # self.bert = bert
107
-
108
- # # dropout layer
109
- # self.dropout = nn.Dropout(0.2)
110
-
111
- # # relu activation function
112
- # self.relu = nn.ReLU()
113
-
114
- # # dense layer 1
115
- # self.fc1 = nn.Linear(768, 512)
116
-
117
-
118
- # # dense layer 2 (Output layer)
119
- # self.fc2 = nn.Linear(512, 2)
120
-
121
- # # softmax activation function
122
- # self.softmax = nn.LogSoftmax(dim = 1)
123
-
124
- # # define the forward pass
125
- # def forward(self, input_ids, attention_mask):
126
- # # pass the inputs to the model
127
- # _, cls_hs = self.bert(input_ids, attention_mask = attention_mask, return_dict=False)
128
-
129
- # x = self.fc1(cls_hs)
130
-
131
- # x = self.relu(x)
132
-
133
- # x = self.dropout(x)
134
-
135
- # # output layer
136
- # x = self.fc2(x)
137
-
138
- # # apply softmax activation
139
- # x = self.softmax(x)
140
- # return x
141
-
142
- # def save_pretrained_model(self, path="", push=False, repo_name=""):
143
- # if not push:
144
- # self.bert.save_pretrained(path, repo_url=repo_name)
145
- # else:
146
- # self.bert.push_to_hub(repo_name)
147
-
 
1
+ from transformers import AutoModel, AutoConfig
2
+ import torch.nn as nn
3
  from transformers import BertPreTrainedModel, AutoModel, PretrainedConfig
 
 
4
 
 
 
5
 
6
 
7
+ class PragFormerConfig(PretrainedConfig):
8
+ model_type = "pragformer"
9
+
10
+ def __init__(self, bert=None, dropout=0.2, fc1=512, fc2=2, softmax_dim=1, **kwargs):
11
+ self.bert = bert
12
+ self.dropout = dropout
13
+ self.fc1 = fc1
14
+ self.fc2 = fc2
15
+ self.softmax_dim = softmax_dim
16
+ super().__init__(**kwargs)
17
 
18
 
19
  class BERT_Arch(BertPreTrainedModel):
 
57
  # apply softmax activation
58
  x = self.softmax(x)
59
  return x
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
model_config.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import PretrainedConfig
2
+
3
+
4
+ class PragFormerConfig(PretrainedConfig):
5
+ model_type = "pragformer"
6
+
7
+ def __init__(self, bert=None, dropout=0.2, fc1=512, fc2=2, softmax_dim=1, **kwargs):
8
+ self.bert = bert
9
+ self.dropout = dropout
10
+ self.fc1 = fc1
11
+ self.fc2 = fc2
12
+ self.softmax_dim = softmax_dim
13
+ super().__init__(**kwargs)