# encoding: utf-8
"""
@author:  tianpengwu
@contact: tianpengwu@outlook.com
"""

import torch
from torch import nn
from transformers import BertModel
# from fastreid.modeling.backbones import build_backbone
# from fastreid.modeling.heads import build_reid_heads
# from fastreid.modeling.losses import *
from torch import cuda
from .build import META_ARCH_REGISTRY


@META_ARCH_REGISTRY.register()
class Baseline(nn.Module):
    def __init__(self, cfg):
        super().__init__()
        self._cfg = cfg
        self.device = cfg.MODEL.DEVICE
        self.l1 = BertModel.from_pretrained(self._cfg.MODEL.BACKBONE.PRETRAINED_PATH)
        self.pre_classifier = torch.nn.Linear(768, 768)
        self.dropout = torch.nn.Dropout(0.3)
        # self.classifier = torch.nn.Linear(768, 4)
        self.classifier = torch.nn.Linear(768, 3)
        # self.device = 'cuda' if cuda.is_available() else 'cpu'

    # def forward(self, input_ids, attention_mask):
    def forward(self, batched_inputs):
        # batched_inputs = batched_inputs.cuda()
        ids = batched_inputs['ids'].to(self.device)
        masks = batched_inputs['masks'].to(self.device)
        targets = batched_inputs['targets'].to(self.device)
        output_1 = self.l1(input_ids=ids, attention_mask=masks)
        hidden_state = output_1[0]
        pooler = hidden_state[:, 0]
        pooler = self.pre_classifier(pooler)
        pooler = torch.nn.ReLU()(pooler)
        pooler = self.dropout(pooler)
        output = self.classifier(pooler)
        if self.training:
            return {
                   "outputs": output,
                    "target": targets,
                    }
        else:
            return torch.max(output.data, dim=1)

    def losses(self, outs):
        loss_dict = {}
        if "CrossEntropyLoss" in self._cfg.MODEL.LOSSES.NAME:
            CrossEntropyLoss = torch.nn.CrossEntropyLoss()
            loss_dict['loss_cls'] = CrossEntropyLoss(outs["outputs"], outs["target"])
        return loss_dict






