Create custom_embedding.py

#37
by bcui19 - opened
Files changed (1) hide show
  1. custom_embedding.py +12 -0
custom_embedding.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+ from torch import Tensor
5
+
6
+
7
+ class SharedEmbedding(nn.Embedding):
8
+
9
+ def forward(self, input: Tensor, unembed: bool = False) -> Tensor:
10
+ if unembed:
11
+ return F.linear(input, self.weight)
12
+ return super().forward(input)