jbetker commited on
Commit
053b8d1
1 Parent(s): 0b496a0

remove xt dep

Browse files
Files changed (1) hide show
  1. models/xtransformers.py +0 -2
models/xtransformers.py CHANGED
@@ -13,8 +13,6 @@ from einops.layers.torch import Rearrange
13
  from entmax import entmax15
14
  from torch.utils.checkpoint import checkpoint
15
 
16
- from x_transformers.autoregressive_wrapper import AutoregressiveWrapper
17
-
18
  DEFAULT_DIM_HEAD = 64
19
 
20
  Intermediates = namedtuple('Intermediates', [
 
13
  from entmax import entmax15
14
  from torch.utils.checkpoint import checkpoint
15
 
 
 
16
  DEFAULT_DIM_HEAD = 64
17
 
18
  Intermediates = namedtuple('Intermediates', [