# from .generation import EncoderDecoderData
from .ner import NerData
from .jieba_tokenizer import JiebaTokenizer
from transformers.models.bert import BertTokenizerFast
import os


def get_tokenizer(name):
    vocab_file_mapping = {'jieba': 'jieba_vocab.txt',
                          'bert': 'bert_vocab.txt',
                          'roformer_char': 'roformer_char.txt'}
    vocab_path = get_module_path(os.path.join('./vocabs', vocab_file_mapping[name]))
    tokenizer_class_mapping = {'jieba': JiebaTokenizer,
                               'bert': BertTokenizerFast,
                               'roformer_char': BertTokenizerFast}
    tokenizer_class = tokenizer_class_mapping[name]
    return tokenizer_class(vocab_path)


def get_module_path(path):
    return os.path.normpath(os.path.join(os.getcwd(), os.path.dirname(__file__), path))
