hplisiecki
commited on
Commit
•
02f14f0
1
Parent(s):
9eaf6a1
Update model_script.py
Browse files- model_script.py +70 -58
model_script.py
CHANGED
@@ -1,58 +1,70 @@
|
|
1 |
-
import torch
|
2 |
-
import torch.nn as nn
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
:param
|
13 |
-
:param
|
14 |
-
:param
|
15 |
-
|
16 |
-
|
17 |
-
self.
|
18 |
-
self.
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
setattr(self,
|
24 |
-
|
25 |
-
|
26 |
-
self.
|
27 |
-
self.
|
28 |
-
self.
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
:
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import os
|
4 |
+
from transformers import AutoModel
|
5 |
+
|
6 |
+
|
7 |
+
class Model(torch.nn.Module):
|
8 |
+
|
9 |
+
def __init__(self, model_dir, dropout=0.2, hidden_dim=768):
|
10 |
+
"""
|
11 |
+
Initialize the model.
|
12 |
+
:param model_name: the name of the model
|
13 |
+
:param metric_names: the names of the metrics to use
|
14 |
+
:param dropout: the dropout rate
|
15 |
+
:param hidden_dim: the hidden dimension of the model
|
16 |
+
"""
|
17 |
+
super(Model, self).__init__()
|
18 |
+
self.metric_names = ['Happiness', 'Sadness', 'Anger', 'Disgust', 'Fear', 'Pride', 'Valence', 'Arousal']
|
19 |
+
self.bert = AutoModel.from_pretrained(model_dir)
|
20 |
+
|
21 |
+
|
22 |
+
for name in self.metric_names:
|
23 |
+
setattr(self, name, nn.Linear(hidden_dim, 1))
|
24 |
+
setattr(self, 'l_1_' + name, nn.Linear(hidden_dim, hidden_dim))
|
25 |
+
|
26 |
+
self.layer_norm = nn.LayerNorm(hidden_dim)
|
27 |
+
self.relu = nn.ReLU()
|
28 |
+
self.dropout = nn.Dropout(dropout)
|
29 |
+
self.sigmoid = nn.Sigmoid()
|
30 |
+
|
31 |
+
def forward(self, input_id, mask):
|
32 |
+
"""
|
33 |
+
Forward pass of the model.
|
34 |
+
:param args: the inputs
|
35 |
+
:return: the outputs
|
36 |
+
"""
|
37 |
+
_, x = self.bert(input_ids = input_id, attention_mask=mask, return_dict=False)
|
38 |
+
output = self.rate_embedding(x)
|
39 |
+
return output
|
40 |
+
|
41 |
+
def rate_embedding(self, x):
|
42 |
+
output_ratings = []
|
43 |
+
for name in self.metric_names:
|
44 |
+
first_layer = self.relu(self.dropout(self.layer_norm(getattr(self, 'l_1_' + name)(x) + x)))
|
45 |
+
second_layer = self.sigmoid(getattr(self, name)(first_layer))
|
46 |
+
output_ratings.append(second_layer)
|
47 |
+
|
48 |
+
return output_ratings
|
49 |
+
|
50 |
+
def save_pretrained(self, save_directory):
|
51 |
+
self.bert.save_pretrained(save_directory)
|
52 |
+
torch.save(self.state_dict(), f'{save_directory}/pytorch_model.bin')
|
53 |
+
|
54 |
+
@classmethod
|
55 |
+
def from_pretrained(cls, model_dir, dropout=0.2, hidden_dim=768):
|
56 |
+
if not os.path.isdir(model_dir):
|
57 |
+
raise ValueError(f"The provided model directory {model_dir} is not a valid directory.")
|
58 |
+
|
59 |
+
model_path = os.path.join(model_dir, 'pytorch_model.bin')
|
60 |
+
if not os.path.isfile(model_path):
|
61 |
+
raise FileNotFoundError(f"The model file pytorch_model.bin was not found in the directory {model_dir}.")
|
62 |
+
|
63 |
+
config_path = os.path.join(model_dir, 'config.json')
|
64 |
+
if not os.path.isfile(config_path):
|
65 |
+
raise FileNotFoundError(f"The configuration file config.json was not found in the directory {model_dir}.")
|
66 |
+
|
67 |
+
model = cls(model_dir, dropout, hidden_dim)
|
68 |
+
state_dict = torch.load(model_path, map_location=torch.device('cpu'))
|
69 |
+
model.load_state_dict(state_dict)
|
70 |
+
return model
|