Add custom embedding
#36
by
bcui19
- opened
- Add custom embedding +12 -0
Add custom embedding
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torch.nn.functional as F
|
4 |
+
from torch import Tensor
|
5 |
+
|
6 |
+
|
7 |
+
class SharedEmbedding(nn.Embedding):
|
8 |
+
|
9 |
+
def forward(self, input: Tensor, unembed: bool = False) -> Tensor:
|
10 |
+
if unembed:
|
11 |
+
return F.linear(input, self.weight)
|
12 |
+
return super().forward(input)
|