Commit
•
257d747
1
Parent(s):
9097e70
Upload BERT_Arch
Browse files- config.json +4 -0
- model.py +4 -1
- pragformer_config.py +28 -0
config.json
CHANGED
@@ -2,6 +2,10 @@
|
|
2 |
"architectures": [
|
3 |
"BERT_Arch"
|
4 |
],
|
|
|
|
|
|
|
|
|
5 |
"bert": {
|
6 |
"_commit_hash": "43cf2d48e8c75d255dccab2a19e40d4774fd8853",
|
7 |
"_name_or_path": "NTUYG/DeepSCC-RoBERTa",
|
|
|
2 |
"architectures": [
|
3 |
"BERT_Arch"
|
4 |
],
|
5 |
+
"auto_map": {
|
6 |
+
"AutoConfig": "pragformer_config.PragFormerConfig",
|
7 |
+
"AutoModel": "model.BERT_Arch"
|
8 |
+
},
|
9 |
"bert": {
|
10 |
"_commit_hash": "43cf2d48e8c75d255dccab2a19e40d4774fd8853",
|
11 |
"_name_or_path": "NTUYG/DeepSCC-RoBERTa",
|
model.py
CHANGED
@@ -1,6 +1,9 @@
|
|
1 |
from transformers import BertPreTrainedModel, AutoModel, PretrainedConfig
|
|
|
|
|
|
|
2 |
import torch.nn as nn
|
3 |
-
from pragformer_config import PragFormerConfig
|
4 |
|
5 |
|
6 |
|
|
|
1 |
from transformers import BertPreTrainedModel, AutoModel, PretrainedConfig
|
2 |
+
import sys
|
3 |
+
sys.path.append("..")
|
4 |
+
|
5 |
import torch.nn as nn
|
6 |
+
from Classifier.pragformer_config import PragFormerConfig
|
7 |
|
8 |
|
9 |
|
pragformer_config.py
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from transformers import PretrainedConfig, AutoModel
|
2 |
+
|
3 |
+
|
4 |
+
# class PragFormerConfig(PretrainedConfig):
|
5 |
+
# model_type = "pragformer"
|
6 |
+
|
7 |
+
# def __init__(
|
8 |
+
# self,
|
9 |
+
# dropout=0.1,
|
10 |
+
# layers=[768, 512],
|
11 |
+
# **kwargs,
|
12 |
+
# ):
|
13 |
+
# self.dropout = dropout
|
14 |
+
# self.layers = layers
|
15 |
+
# super().__init__(**kwargs)
|
16 |
+
|
17 |
+
|
18 |
+
|
19 |
+
class PragFormerConfig(PretrainedConfig):
|
20 |
+
model_type = "pragformer"
|
21 |
+
|
22 |
+
def __init__(self, bert=None, dropout=0.2, fc1=512, fc2=2, softmax_dim=1, **kwargs):
|
23 |
+
self.bert = bert
|
24 |
+
self.dropout = dropout
|
25 |
+
self.fc1 = fc1
|
26 |
+
self.fc2 = fc2
|
27 |
+
self.softmax_dim = softmax_dim
|
28 |
+
super().__init__(**kwargs)
|