from transformers import BertTokenizerFast


class Tokenizer:

    def __init__(self, tokenizer_path):
        self.__tokenizer = BertTokenizerFast(tokenizer_path,
                                             sep_token="[SEP]",  # 句子分隔标记
                                             pad_token="[PAD]",  # 填充标记
                                             cls_token="[CLS]")  # 分类标记

    def get_tokens(self, text):
        return self.__tokenizer.tokenize(text)

    def get_special_tokens(self, text):
        return self.__tokenizer.convert_tokens_to_ids(text)

    def get_special_tokens_mask(self, text):
        return self.__tokenizer.convert_tokens_to_ids(text)

    def get_padding_mask(self, text):
        return self.__tokenizer.convert_tokens_to_ids(text)

    def get_mask(self, text):
        return self.__tokenizer.convert_tokens_to_ids(text)

    def convert_ids_to_tokens(self, ids):
        return self.__tokenizer.convert_ids_to_tokens(ids)

    def convert_ids_to_tokens_mask(self, ids):
        return self.__tokenizer.convert_ids_to_tokens_mask(ids)

    def convert_tokens_to_ids(self, tokens):
        return self.__tokenizer.convert_tokens_to_ids(tokens)

    def convert_tokens_to_ids_mask(self, tokens):
        return self.__tokenizer.convert_tokens_to_ids(tokens)

    def get_ids(self, text):
        return self.__tokenizer.convert_tokens_to_ids(text)

    def get_tokens_mask(self, text):
        return self.__tokenizer.convert_tokens_to_ids(text)

    def get_tokenizer(self):
        return self.__tokenizer
