from transformers import AutoTokenizer


class ModelTokenizer:

    def __init__(self, tokenizer_path):
        self.__tokenizer = AutoTokenizer.from_pretrained(tokenizer_path)

    def get_tokens(self, text):
        return self.__tokenizer.tokenize(text)

    def get_special_tokens(self, text):
        return self.__tokenizer.convert_tokens_to_ids(text)

    def get_special_tokens_mask(self, text):
        return self.__tokenizer.convert_tokens_to_ids(text)

    def get_padding_mask(self, text):
        return self.__tokenizer.convert_tokens_to_ids(text)

    def get_mask(self, text):
        return self.__tokenizer.convert_tokens_to_ids(text)

    def convert_ids_to_tokens(self, ids):
        return self.__tokenizer.convert_ids_to_tokens(ids)

    def convert_ids_to_tokens_mask(self, ids):
        return self.__tokenizer.convert_ids_to_tokens_mask(ids)

    def convert_tokens_to_ids(self, tokens):
        return self.__tokenizer.convert_tokens_to_ids(tokens)

    def convert_tokens_to_ids_mask(self, tokens):
        return self.__tokenizer.convert_tokens_to_ids(tokens)

    def get_ids(self, text):
        return self.__tokenizer.convert_tokens_to_ids(text)

    def get_tokens_mask(self, text):
        return self.__tokenizer.convert_tokens_to_ids(text)

    def get_tokenizer(self):
        return self.__tokenizer
