henry2024 ClefChen commited on
Commit
3bcce06
1 Parent(s): 3eafb37

Upload model.py (#7)

Browse files

- Upload model.py (afa399dbffdd37cb1ec72766133ae4a9faff8a63)


Co-authored-by: Feiyu Chen <ClefChen@users.noreply.huggingface.co>

Files changed (1) hide show
  1. model.py +34 -13
model.py CHANGED
@@ -1,17 +1,38 @@
1
  import torch
2
  from torch import nn
3
 
4
- class RNN_model(nn.Module):
5
- def __init__(self):
6
- super().__init__()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
 
8
- self.rnn= nn.RNN(input_size=1080, hidden_size=240,num_layers=1, nonlinearity= 'relu', bias= True)
9
- self.output= nn.Linear(in_features=240, out_features=24)
10
-
11
- def forward(self, x):
12
- y, hidden= self.rnn(x)
13
- #print(y.shape)
14
- #print(hidden.shape)
15
- x= self.output(y)
16
-
17
- return(x)
 
1
  import torch
2
  from torch import nn
3
 
4
+ class ImprovedGRUModel(nn.Module):
5
+ def __init__(self,
6
+ input_size=1080,
7
+ hidden_size=240,
8
+ output_size=24,
9
+ num_layers=2,
10
+ bidirectional=True,
11
+ dropout_rate=0.1):
12
+ super(ImprovedGRUModel, self).__init__()
13
+ self.hidden_size = hidden_size
14
+ self.num_directions = 2 if bidirectional else 1
15
+ self.gru = nn.GRU(
16
+ input_size=input_size,
17
+ hidden_size=self.hidden_size,
18
+ num_layers=num_layers,
19
+ batch_first=True,
20
+ dropout=dropout_rate if num_layers > 1 else 0,
21
+ bidirectional=bidirectional
22
+ )
23
+ self.fc1 = nn.Linear(hidden_size * self.num_directions, hidden_size)
24
+ self.dropout = nn.Dropout(dropout_rate)
25
+ self.fc2 = nn.Linear(hidden_size, output_size)
26
+
27
+ def forward(self, x):
28
+ gru_out, _ = self.gru(x)
29
+
30
+ fc1_out = self.fc1(gru_out)
31
 
32
+ fc1_out = torch.relu(fc1_out)
33
+
34
+ fc1_out = self.dropout(fc1_out)
35
+
36
+ output = self.fc2(fc1_out)
37
+
38
+ return output