import torch
from transformers import BertConfig

from models.classify_model import SoftmaxSeqClassifyModule
from models.encoder_model import BiLSTMEncoderModule
from models.language_model import Word2VecLMModule
from models.model import NERTokenClassification
from utils import Params


def model_test():
    pp = Params(
        config=BertConfig(
            vocab_size=100,
            hidden_size=12
        ),
        params={
            "encoder_output_size":128,
            "classify_fc_hidden_size":[64,32]
        }
    )
    mm = Word2VecLMModule(pp)
    mm2 = BiLSTMEncoderModule(pp)
    mm3 = SoftmaxSeqClassifyModule(pp)
    # print(mm3)
    model = NERTokenClassification(pp)
    print(model)
    ids = torch.tensor([
        [1,3,4,1,5],
        [3,4,1,0,0]
    ])
    masks = torch.tensor([
        [1,1,1,1,1],
        [1,1,1,0,0]
    ])
    labels = torch.tensor([
        [24,24,0,2,24],
        [24,24,24,24,24]
    ])
    # result = mm(ids, masks)
    # result = mm2(result, masks)
    # result = mm3(result, masks)
    # print(result.shape)
    # print(result)
    result = model(ids,masks,None)
    print(result.shape)
    print(result)
    result = model(ids,masks,labels)
    print(result.shape)
    print(result)
if __name__ == '__main__':
    model_test()