Initialize attention vector
Browse files- torchmoji/attlayer.py +1 -0
torchmoji/attlayer.py
CHANGED
@@ -29,6 +29,7 @@ class Attention(Module):
|
|
29 |
self.return_attention = return_attention
|
30 |
self.attention_size = attention_size
|
31 |
self.attention_vector = Parameter(torch.FloatTensor(attention_size))
|
|
|
32 |
|
33 |
def __repr__(self):
|
34 |
s = '{name}({attention_size}, return attention={return_attention})'
|
|
|
29 |
self.return_attention = return_attention
|
30 |
self.attention_size = attention_size
|
31 |
self.attention_vector = Parameter(torch.FloatTensor(attention_size))
|
32 |
+
self.attention_vector.data.normal_(std=0.05) # Initialize attention vector
|
33 |
|
34 |
def __repr__(self):
|
35 |
s = '{name}({attention_size}, return attention={return_attention})'
|