Hugo Flores commited on
Commit
fc839a6
1 Parent(s): 5582d2e
.gitignore CHANGED
@@ -171,3 +171,5 @@ archived/
171
  scratch/
172
 
173
  runs-archive
 
 
 
171
  scratch/
172
 
173
  runs-archive
174
+ lyrebird-audiotools
175
+ lyrebird-audio-codec
vampnet/modules/activations.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import torch
3
+ import torch.nn as nn
4
+ import torch.nn.functional as F
5
+ from einops import rearrange
6
+
7
+
8
+
9
+ class NewGELU(nn.Module):
10
+ """
11
+ Implementation of the GELU activation function currently in Google BERT repo
12
+ (identical to OpenAI GPT). Also see the Gaussian Error Linear Units
13
+ paper: https://arxiv.org/abs/1606.08415
14
+ """
15
+
16
+ def forward(self, x):
17
+ return (
18
+ 0.5
19
+ * x
20
+ * (
21
+ 1.0
22
+ + torch.tanh(
23
+ math.sqrt(2.0 / math.pi) * (x + 0.044715 * torch.pow(x, 3.0))
24
+ )
25
+ )
26
+ )
27
+
28
+ class GatedGELU(nn.Module):
29
+ def __init__(self):
30
+ super().__init__()
31
+ self.gelu = NewGELU()
32
+
33
+ def forward(self, x, dim: int = -1):
34
+ p1, p2 = x.chunk(2, dim=dim)
35
+ return p1 * self.gelu(p2)
36
+
37
+ class Snake1d(nn.Module):
38
+ def __init__(self, channels):
39
+ super().__init__()
40
+ self.alpha = nn.Parameter(torch.ones(channels))
41
+
42
+ def forward(self, x):
43
+ return x + (self.alpha + 1e-9).reciprocal() * torch.sin(self.alpha * x).pow(2)
44
+
45
+ def get_activation(name: str = "relu"):
46
+ if name == "relu":
47
+ return nn.ReLU
48
+ elif name == "gelu":
49
+ return NewGELU
50
+ elif name == "geglu":
51
+ return GatedGELU
52
+ elif name == "snake":
53
+ return Snake1d
54
+ else:
55
+ raise ValueError(f"Unrecognized activation {name}")
vampnet/modules/{modules.py → layers.py} RENAMED
@@ -26,25 +26,6 @@ def recurse_children(module, fn):
26
  yield fn(child)
27
 
28
 
29
- # Scripting this brings model speed up 1.4x
30
- @torch.jit.script
31
- def snake(x, alpha):
32
- shape = x.shape
33
- x = x.reshape(shape[0], shape[1], -1)
34
- x = x + (alpha + 1e-9).reciprocal() * torch.sin(alpha * x).pow(2)
35
- x = x.reshape(shape)
36
- return x
37
-
38
-
39
- class Snake1d(nn.Module):
40
- def __init__(self, channels):
41
- super().__init__()
42
- self.alpha = nn.Parameter(torch.ones(1, channels, 1))
43
-
44
- def forward(self, x):
45
- return snake(x, self.alpha)
46
-
47
-
48
  def WNConv1d(*args, **kwargs):
49
  return weight_norm(nn.Conv1d(*args, **kwargs))
50
 
 
26
  yield fn(child)
27
 
28
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  def WNConv1d(*args, **kwargs):
30
  return weight_norm(nn.Conv1d(*args, **kwargs))
31
 
vampnet/modules/transformer.py CHANGED
@@ -7,10 +7,11 @@ import torch.nn.functional as F
7
  from einops import rearrange
8
 
9
  from .base import VampBase
10
- from .modules import CodebookEmbedding
11
- from .modules import FiLM
12
- from .modules import SequentialWithFiLM
13
- from .modules import WNConv1d
 
14
 
15
 
16
  class RMSNorm(nn.Module):
@@ -37,58 +38,6 @@ class RMSNorm(nn.Module):
37
  return self.weight * x
38
 
39
 
40
- def get_activation(name: str = "relu"):
41
- if name == "relu":
42
- return nn.ReLU
43
- elif name == "gelu":
44
- return NewGELU
45
- elif name == "geglu":
46
- return GatedGELU
47
- elif name == "snake":
48
- return Snake1d
49
- else:
50
- raise ValueError(f"Unrecognized activation {name}")
51
-
52
-
53
- class NewGELU(nn.Module):
54
- """
55
- Implementation of the GELU activation function currently in Google BERT repo
56
- (identical to OpenAI GPT). Also see the Gaussian Error Linear Units
57
- paper: https://arxiv.org/abs/1606.08415
58
- """
59
-
60
- def forward(self, x):
61
- return (
62
- 0.5
63
- * x
64
- * (
65
- 1.0
66
- + torch.tanh(
67
- math.sqrt(2.0 / math.pi) * (x + 0.044715 * torch.pow(x, 3.0))
68
- )
69
- )
70
- )
71
-
72
-
73
- class GatedGELU(nn.Module):
74
- def __init__(self):
75
- super().__init__()
76
- self.gelu = NewGELU()
77
-
78
- def forward(self, x, dim: int = -1):
79
- p1, p2 = x.chunk(2, dim=dim)
80
- return p1 * self.gelu(p2)
81
-
82
-
83
- class Snake1d(nn.Module):
84
- def __init__(self, channels):
85
- super().__init__()
86
- self.alpha = nn.Parameter(torch.ones(channels))
87
-
88
- def forward(self, x):
89
- return x + (self.alpha + 1e-9).reciprocal() * torch.sin(self.alpha * x).pow(2)
90
-
91
-
92
  class FeedForward(nn.Module):
93
  def __init__(
94
  self, d_model: int = 512, dropout: float = 0.1, activation: str = "geglu"
@@ -572,7 +521,7 @@ class VampNet(VampBase):
572
 
573
  if __name__ == "__main__":
574
  # import argbind
575
- from .modules import num_params
576
 
577
  VampNet = argbind.bind(VampNet)
578
 
 
7
  from einops import rearrange
8
 
9
  from .base import VampBase
10
+ from .activations import get_activation
11
+ from .layers import CodebookEmbedding
12
+ from .layers import FiLM
13
+ from .layers import SequentialWithFiLM
14
+ from .layers import WNConv1d
15
 
16
 
17
  class RMSNorm(nn.Module):
 
38
  return self.weight * x
39
 
40
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
  class FeedForward(nn.Module):
42
  def __init__(
43
  self, d_model: int = 512, dropout: float = 0.1, activation: str = "geglu"
 
521
 
522
  if __name__ == "__main__":
523
  # import argbind
524
+ from .layers import num_params
525
 
526
  VampNet = argbind.bind(VampNet)
527