KhaldiAbderrhmane commited on
Commit
1d451be
1 Parent(s): 0d35791

Upload 3 files

Browse files
__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ from .configuration_emotion_classifier import EmotionClassifierConfig
2
+ from .modeling_emotion_classifier import EmotionClassifierHuBERT
configuration_emotion_classifier.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import PretrainedConfig
2
+
3
+ class EmotionClassifierConfig(PretrainedConfig):
4
+ model_type = "emotion_classifier"
5
+
6
+ def __init__(
7
+ self,
8
+ hidden_size=128,
9
+ num_classes=6,
10
+ **kwargs,
11
+ ):
12
+ super().__init__(**kwargs)
13
+ self.hidden_size = hidden_size
14
+ self.num_classes = num_classes
modeling_emotion_classifier.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import PreTrainedModel, HubertModel
2
+ import torch.nn as nn
3
+ import torch
4
+ from .configuration_emotion_classifier import EmotionClassifierConfig
5
+
6
+
7
+ class EmotionClassifierHuBERT(PreTrainedModel):
8
+ config_class = EmotionClassifierConfig
9
+
10
+ def __init__(self, config):
11
+ super().__init__(config)
12
+ self.hubert = HubertModel.from_pretrained("facebook/hubert-large-ls960-ft")
13
+ self.conv1 = nn.Conv1d(in_channels=1024, out_channels=512, kernel_size=3, padding=1)
14
+ self.conv2 = nn.Conv1d(in_channels=512, out_channels=256, kernel_size=3, padding=1)
15
+ self.transformer_encoder = nn.TransformerEncoderLayer(d_model=256, nhead=8)
16
+ self.bilstm = nn.LSTM(input_size=256, hidden_size=config.hidden_size, num_layers=2, batch_first=True, bidirectional=True)
17
+ self.fc = nn.Linear(config.hidden_size * 2, config.num_classes) # * 2 for bidirectional
18
+
19
+ def forward(self, x):
20
+ with torch.no_grad():
21
+ features = self.hubert(x).last_hidden_state
22
+ features = features.transpose(1, 2)
23
+ x = torch.relu(self.conv1(features))
24
+ x = torch.relu(self.conv2(x))
25
+ x = x.transpose(1, 2)
26
+ x = self.transformer_encoder(x)
27
+ x, _ = self.bilstm(x)
28
+ x = self.fc(x[:, -1, :])
29
+ return x