import torch
import torch.nn as nn
import inspect
from ..utils import get_model_class


class BaseModel(nn.Module):
    def __init__(self, args, transformer_output_type='seq'):
        super().__init__()
        model_class = get_model_class(args.model_type)
        self.transformer = model_class.from_pretrained(args.model_path)
        self.transformer_output_type = transformer_output_type

    def forward_transformer(self, input_ids, attention_mask, **kwargs):
        sequence_output, *_ = self.transformer(input_ids, attention_mask, **kwargs, return_dict=False)
        if self.transformer_output_type == 'seq':
            return sequence_output
        elif self.transformer_output_type == 'cls':
            return sequence_output[:, 0]

    def _init_weights(self, module):
        """Initialize the weights"""
        if isinstance(module, nn.Linear):
            module.weight.data.normal_(mean=0.0, std=0.02)
            if module.bias is not None:
                module.bias.data.zero_()
        elif isinstance(module, nn.Embedding):
            module.weight.data.normal_(mean=0.0, std=0.02)
            if module.padding_idx is not None:
                module.weight.data[module.padding_idx].zero_()
        elif isinstance(module, nn.LayerNorm):
            module.bias.data.zero_()
            module.weight.data.fill_(1.0)

    def train_step(self, batch):
        out = self(**batch)
        loss = self.compute_loss(out, batch)
        return loss