from os.path import join, abspath
import json
from backend.experiment.framework.tokenizer import SimpleSpecialTokenizer
from backend.experiment.us.tokenizations import tokenization_bert
from backend.experiment.us.model import config_file


def _initialize():
    with open(abspath(join(__file__, '..', config_file)), 'r') as f:
        params = json.load(f)

    vocabulary_file = params['vocabulary_file']
    tokenizer_path = abspath(join(__file__, '..', vocabulary_file))

    bert_tokenizer = tokenization_bert.BertTokenizer(vocab_file=tokenizer_path)
    if 'v0' in vocabulary_file:
        return SimpleSpecialTokenizer(
            sample_start_token=[103], paragraph_sep_token=[102],
            unknown_token=[100],
            tokenizer=bert_tokenizer, special_tokens_id={
                '[UNK]': 100,
                '[CLS]': 101,
                '[SEP]': 102,
                '[MASK]': 103,
            }
        )
    elif 'v1' in vocabulary_file:
        return SimpleSpecialTokenizer(
            sample_start_token=[10], paragraph_sep_token=[9],
            unknown_token=[7],
            tokenizer=bert_tokenizer, special_tokens_id={
                '[UNK]': 7,
                '[CLS]': 8,
                '[SEP]': 9,
                '[MASK]': 10,
            }
        )
    raise Exception(f'unknown vocabulary_file: {vocabulary_file}')


tokenizer = _initialize()
