remove dependency on x-transformers
Browse files- models/arch_util.py +1 -2
- requirements.txt +1 -2
models/arch_util.py
CHANGED
@@ -5,8 +5,7 @@ import torch
|
|
5 |
import torch.nn as nn
|
6 |
import torch.nn.functional as F
|
7 |
import torchaudio
|
8 |
-
from
|
9 |
-
from x_transformers.x_transformers import RelativePositionBias
|
10 |
|
11 |
|
12 |
def zero_module(module):
|
|
|
5 |
import torch.nn as nn
|
6 |
import torch.nn.functional as F
|
7 |
import torchaudio
|
8 |
+
from models.xtransformers import ContinuousTransformerWrapper, RelativePositionBias
|
|
|
9 |
|
10 |
|
11 |
def zero_module(module):
|
requirements.txt
CHANGED
@@ -6,5 +6,4 @@ tokenizers
|
|
6 |
inflect
|
7 |
progressbar
|
8 |
einops
|
9 |
-
unidecode
|
10 |
-
x-transformers
|
|
|
6 |
inflect
|
7 |
progressbar
|
8 |
einops
|
9 |
+
unidecode
|
|