import torch
import torch.nn as nn
from transformers import BertModel

import configs


class ClassifyModel(nn.Module):
    def __init__(
            self,
            pretrain_bert_model,
            lstm_hidden_size,
            lstm_n_layers,
            stat_n_features,
            fusion_n_out,
            classifier_dropout,
            out_n_classes=2,
            device: str | None = configs.device
    ):
        super().__init__()

        # Bert Embedding
        self.bert_model: BertModel = pretrain_bert_model
        # Froze bert paras
        for param in self.bert_model.parameters():
            param.requires_grad = False

        # LSTM Layer
        bert_hidden_size = self.bert_model.config.hidden_size
        self.lstm = nn.LSTM(
            input_size=bert_hidden_size,
            hidden_size=lstm_hidden_size,
            num_layers=lstm_n_layers,
            batch_first=True,
            bidirectional=True,
            device=device
        )

        # Features Fusion Layer (LSTM+Stat)
        self.fusion = nn.Linear(
            in_features=lstm_hidden_size * 2 + stat_n_features,
            out_features=fusion_n_out
        )

        # Full-Connected NN Classifier
        self.classifier = nn.Sequential(
            nn.ReLU(),
            nn.Dropout(classifier_dropout),
            nn.Linear(in_features=fusion_n_out, out_features=out_n_classes),
            nn.Softmax(dim=1)
        )

        print("[ClassifyModel:ClassifyModel::__init__ ok")

    def forward(self, in_ids, attention_mask, stats_features):
        # bert
        bert_output = self.bert_model(input_ids=in_ids, attention_mask=attention_mask)
        seq_output = bert_output.last_hidden_state

        # lstm
        lstm_out, _ = self.lstm(seq_output)
        # batch_size = lstm_out.size(0)
        final_forward = lstm_out[:, -1, :self.lstm.hidden_size]
        final_backward = lstm_out[:, 0, self.lstm.hidden_size:]
        lstm_final = torch.cat([final_forward, final_backward], dim=1)

        # fusion & classification
        combined = torch.cat([lstm_final, stats_features], dim=1)
        fused = self.fusion(combined)
        res = self.classifier(fused)

        return res

    def custom_state_dict(self):
        return {"lstm": self.lstm.state_dict(), "fu": self.fusion.state_dict(), "cls": self.classifier.state_dict()}

    def load_custom_dict(self, state_dict):
        self.lstm.load_state_dict(state_dict["lstm"])
        self.fusion.load_state_dict(state_dict["fu"])
        self.classifier.load_state_dict(state_dict["cls"])


if __name__ == '__main__':
    model = ClassifyModel(128, 2, 10, 256, 0.3, 2)
    print(model)
