import torch.nn as nn
import torch


# from training.models.convnext_fusion import convnext_tiny, convnext_small, convnext_large_in22k
from training.models.cbam.cbam_convnext_fusion import convnext_tiny, convnext_small, convnext_large_in22k

__all__ = ['rgb_lace_convnext']


class RgbLaceConvnext(nn.Module):

    def __init__(self, num_classes, drop_rate, pretrained):
        super(RgbLaceConvnext, self).__init__()
        # self.modified_unet = ModifiedUNet(3, 3)


        # self.convnext = convnext_tiny(num_classes=num_classes, drop_rate=drop_rate, pretrained=pretrained)
        self.convnext = convnext_large_in22k(num_classes=num_classes, drop_rate=drop_rate, pretrained=pretrained)



    def forward(self, x_rgb, x_lace):
        x = self.convnext(x_rgb, x_lace)
        return x


def rgb_lace_convnext(num_classes=2, drop_rate=0., pretrained=False):
    model = RgbLaceConvnext(num_classes, drop_rate, pretrained)
    model.default_cfg = model.convnext.default_cfg
    return model


if __name__ == "__main__":

    model = RgbLaceConvnext(num_classes=2, drop_rate=0., pretrained=False)
    x1 = torch.randn(4, 3, 224, 224)
    x2 = torch.randn(4, 3, 224, 224)
    regression = model(x1, x2)
    print("regression: ".format(), regression.shape)