{ "added_tokens_decoder": {}, "auto_map": { "AutoTokenizer": [ "sentencepiece_tokenizer.Tokenizer", null ] }, "clean_up_tokenization_spaces": true, "model_max_length": 1000000000000000019884624838656, "model_path": "./spmodel_wikiqa.model", "tokenizer_class": "Tokenizer", "vocab_file": "./spmodel_wikiqa.vocab" }