import torch, torch.nn as nn | |
from vector_quantize_pytorch import ResidualVQ | |
class RVQWrapper(nn.Module): | |
def __init__(self, dim, num_quantizers, codebook_size): | |
super().__init__() | |
self.ln_in = nn.LayerNorm(dim) | |
self.proj_in = nn.Linear(dim, dim) | |
self.rvq = ResidualVQ(dim=dim, num_quantizers=num_quantizers, codebook_size=codebook_size) | |
self.ln_out = nn.LayerNorm(dim) | |
self.proj_out = nn.Linear(dim, dim) | |
def forward(self, x): | |
x = self.proj_in(self.ln_in(x)) | |
q, indices, commit_loss = self.rvq(x) | |
y = self.proj_out(self.ln_out(q)) | |
return y, indices, commit_loss | |