import torch
from transformers import BertTokenizer


def TestSingle(text):
    device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
    detector_model = torch.load('./checkponts/detector_model').to(device)
    model = torch.load('./checkponts/model').to(device)
    bert = BertTokenizer.from_pretrained('chinese-bert-wwm-ext')
    tokenizer = BertTokenizer.from_pretrained('chinese-bert-wwm-ext')
    embedding = bert.embedding.to(device)
    text = tokenizer.convert_tokens_to_ids(['[CLS]' ]+ list(text) + ['[SEP]'])
    mask = [1] * len(text)
    mask = torch.tensor(mask).unsqueeze(0).float().to(device)
    text = torch.tensor(text).unsqueeze(0).to(device)

    prob = detector_model(text)

    text_embedding = embedding(text)
    out = model(
        text_embedding,
        prob,
        bert.get_extend_attention_mask(
            mask,
            text.shape,
            device
        )
    )
    out = torch.argmax(out, dim=-1).reshape(-1)


    return tokenizer.convert_ids_to_tokens(out)
if __name__ == '__main__':
    text = input()
    print(TestSingle(text))