import torch.nn as nn
from typing import Tuple
import logging
import sys

from transformers import BertModel

logger = logging.getLogger(__name__, )
logging.basicConfig(level=logging.INFO, stream=sys.stdout)


class MultitaskBERT(nn.Module):
    def __init__(self,
                 model_dir: str,
                 num_labels: int,
                 dropout_prob=0.1):
        super(MultitaskBERT, self).__init__()
        self.dropout = nn.Dropout(dropout_prob)
        self.bert = BertModel.from_pretrained(model_dir)
        self.ide_linear = nn.Linear(self.bert.config.hidden_size, 2)
        self.cls_linear = nn.Linear(self.bert.config.hidden_size, num_labels)

    def forward(self,
                inputs: Tuple,
                task_name: str = 'ide'
                ):
        outputs = self.bert(input_ids=inputs[0], attention_mask=inputs[1])
        pooled_output = self.dropout(outputs[1])
        if task_name == 'ide':
            logits = self.ide_linear(pooled_output)
        elif task_name == 'cls':
            logits = self.cls_linear(pooled_output)
        else:
            logits = None
            logger.error(f'No task_name: {task_name}')
        return logits, pooled_output
