import torch.nn as nn


from training.models.vit_mlp.mlp_mixer_concatenate_fusion import gmlp_s16_224

__all__ = ['rgb_lace_mixer']


class RgbLaceMixer(nn.Module):

    def __init__(self, num_classes, drop_rate, pretrained):
        super(RgbLaceMixer, self).__init__()
        # self.modified_unet = ModifiedUNet(3, 3)

        self.mixer = gmlp_s16_224(num_classes=num_classes, drop_rate=drop_rate, pretrained=pretrained)
        # self.mixer = mixer_b16_224_miil_in21k(num_classes=num_classes, drop_rate=drop_rate, pretrained=pretrained)


    def forward(self, x_rgb, x_lace):
        x = self.mixer(x_rgb, x_lace)
        return x


def rgb_lace_mixer(num_classes=2, drop_rate=0., pretrained=False):
    model = RgbLaceMixer(num_classes, drop_rate, pretrained)
    model.default_cfg = model.mixer.default_cfg
    return model
