# coding=utf-8
# Copyright (C) xxx team - All Rights Reserved
#
# @Version:   3.9.4
# @Software:  PyCharm
# @FileName:  PLM.py
# @CTime:     2021/5/3 17:36   
# @Author:    Haiyang Yu
# @Email:     xxx
# @UTime:     2021/5/3 17:36
#
# @Description:
#     xxx
#     xxx
#
import codecs
import logging
from typing import List, Dict
import torch
import torch.nn as nn
import torch.nn.functional as F
from transformers import AutoModel

logger = logging.getLogger(__name__)


class BertBase(nn.Module):
    def __init__(self, cfg):
        super(BertBase, self).__init__()
        # get config
        self._model_name = cfg.bert_model_name
        self._local_files_only = cfg.bert_local_files_only
        self._cache_dir = cfg.bert_cache_dir
        self._num_hidden_layers = cfg.bert_num_hidden_layers
        self.ner_types = cfg.ner_types
        self.cls_types = cfg.cls_types
        # setting module layers
        self.bert = AutoModel.from_pretrained(self._model_name,
                                              local_files_only=self._local_files_only,
                                              # cache_dir=self._cache_dir,
                                              num_hidden_layers=self._num_hidden_layers,)
        self.fc_ner = nn.Linear(768, self.ner_types)
        self.fc_cls = nn.Linear(768, self.cls_types)
        self.dropout = nn.Dropout()

    def forward(self, x, x_mask):
        last_hidden_state, pooler_output = self.bert(x, attention_mask=x_mask, return_dict=True).to_tuple()
        ner_output = self.fc_ner(self.dropout(last_hidden_state))
        cls_output = self.fc_cls(self.dropout(pooler_output))
        return ner_output, cls_output



if __name__ == '__main__':
    model = BertBase()

