import torch
import torch.nn as nn
from model.attention import AttentionModel
from model.mixture_of_experts import HeirarchicalMoE, MoE
from model.encoder import MultiViewEncoder
from model.classifier import Classifier, AttClassifier
from model.fuselayer import FuseLayer
from loguru import logger


class MoVE_model(nn.Module):
    def __init__(self, data, args):
        super(MoVE_model, self).__init__()

        self.encoder = MultiViewEncoder(data)
        self.fuse_type = args.fuse_type

        self.fuselayer = FuseLayer(
            data=data,
            fuse_type=self.fuse_type,
        )

        # self.classifier = Classifier(data)
        self.classifier = AttClassifier(data)

    def forward(
        self,
        batch_word,
        batch_biword,
        layer_gaz,
        gaz_count,
        gaz_chars,
        gaz_mask,
        gazchar_mask,
        batch_bert,
        bert_mask,
        batch_label,
        scopes,
    ):

        feature = self.encoder(
            batch_word,
            batch_biword,
            layer_gaz,
            gaz_count,
            gaz_chars,
            gaz_mask,
            gazchar_mask,
            batch_bert,
            bert_mask,
        )
        # breakpoint()
        if self.fuse_type == "moe" or self.fuse_type == "hmoe":
            # breakpoint()
            m_feature, aux_loss = self.fuselayer(feature)
            logit = self.classifier(m_feature, batch_label, scopes)
            return logit, aux_loss
        else:
            m_feature = self.fuselayer(feature)
            logit = self.classifier(m_feature, batch_label, scopes)
            # breakpoint()
            return logit
