import torch
from torch import nn
from classifier.ease import CosineLinear
from utils.projectors import _PROJECTORS
from .base import Base


class BaseMulti(Base):

    def __init__(self, args):
        super().__init__(args)
        self.fc_func = args.get("fc_func", None)
        # self.fc_func = args.pop("fc_func", None)
        self.constant_dim = False

    def forward(self, x, mode="", ca=False) -> dict:
        if ca:
            return self.fc(x, mode)

        feats = self.extract_token(x, mode)
        out = self.fc(feats, mode)

        out.update({"features": feats})
        return out

    def extract_token(self, x, mode=""):
        """extract cls token features from the vit backbone
        Return:
            torch.Tensor: [B, T, D]
        """
        mode = mode.lower()
        method_name = f"forward_{mode}_token"  # Create the method name dynamically
        method = getattr(self.backbone, method_name)  # Get the method reference
        return method(x)  # Call the method

    def extract_feats(self, x, mode=""):
        """extract all (cls token and patch token) features from the vit backbone
        Return:
            torch.Tensor: [B, T, L, D]
        """
        mode = mode.lower()
        method_name = f"forward_{mode}_feats"  # Create the method name dynamically
        method = getattr(self.backbone, method_name)
        return method(x)


class MultiSSL(BaseMulti):

    def __init__(self, args):
        super().__init__(args)
        self.projector = _PROJECTORS[args["projector"]](
            ft_dim=self.out_dim,
            bottleneck_dim=self.out_dim,
        )

    def extract_token_ssl(self, x, proj_mode=""):
        """extract cls token features from the vit backbone,
        and project it for Self-Supervised Learning"""
        tokens = self.extract_token(x, mode="all")
        if proj_mode == "all":
            proj_tokens = self.projector(tokens)
            raise ValueError("Are you sure ? ALL OF THEM?")
        elif proj_mode == "cur":
            proj_token = self.projector(tokens[:, -1])
            proj_tokens = torch.cat([tokens[:, :-1], proj_token.unsqueeze(1)], dim=1)
        else:
            raise ValueError
        return proj_tokens


def generate_proxy_proj_fc(in_dim, out_dim, **fc_kwargs):
    del fc_kwargs
    hidden_dim = 4096
    bottleneck_dim = 512
    mlp_layers = nn.Sequential(
        nn.Linear(in_dim, hidden_dim),
        # nn.LayerNorm(hidden_dim),
        nn.ReLU(),
        nn.Linear(hidden_dim, bottleneck_dim),
    )
    # initialize the mlp_layers
    for layer in mlp_layers:
        if isinstance(layer, nn.Linear):
            nn.init.kaiming_normal_(layer.weight)
            nn.init.zeros_(layer.bias)
        elif isinstance(layer, nn.BatchNorm1d):
            nn.init.constant_(layer.weight, 1)
            nn.init.constant_(layer.bias, 0)

    fc = CosineLinear(bottleneck_dim, out_dim)
    proxy_layer = nn.Sequential(mlp_layers, fc)
    proxy_layer.requires_grad_(True)
    return proxy_layer


# class EaseNet(Base):
#     backbone: bb.vease
#     fc: CosineLinear

#     def __init__(self, args):
#         super(EaseNet, self).__init__(args)

#         self._cur_task = -1
#         self.out_dim = self.backbone.out_dim
#         self.alpha = args["alpha"]
#         self.beta = args["beta"]
#         self.use_reweight = args["use_reweight"]

#     @property
#     def feature_dim(self):
#         return self.out_dim * (self._cur_task + 1)

#     # (proxy_fc = cls * dim)
#     def update_fc(self, nb_classes, *args, **kwargs):
#         self._cur_task += 1

#         self.proxy_fc = self.generate_fc(
#             self.out_dim, self.init_cls if self._cur_task == 0 else self.inc_cls
#         )
#         self.proxy_fc = self.proxy_fc.to(self._device)

#         fc = self.generate_fc(self.feature_dim, nb_classes)
#         fc = fc.to(self._device)
#         fc.reset_parameters_to_zero()

#         if getattr(self, "fc", None) is not None:
#             old_nb_classes = self.fc.out_features
#             weight = copy.deepcopy(self.fc.weight.data)
#             fc.weight.data[:old_nb_classes, : -self.out_dim] = nn.Parameter(weight)
#         if hasattr(self, "fc"):
#             del self.fc
#         self.fc = fc

#     def generate_fc(self, in_dim, out_dim):
#         fc = CosineLinear(in_dim, out_dim)
#         return fc

#     def forward(self, x, test=False):
#         if test is False:
#             x = self.backbone.forward(x, mode="cur")
#             out = self.proxy_fc(x)
#         else:
#             x = self.backbone.forward(x, mode="all")
#             if not self.use_reweight:
#                 out = self.fc(x)
#             else:
#                 out = self.fc.forward_reweight(
#                     x,
#                     cur_task=self._cur_task,
#                     alpha=self.alpha,
#                     init_cls=self.init_cls,
#                     inc=self.inc_cls,
#                     beta=self.beta,
#                 )

#         out.update({"features": x})
#         return out
