File size: 251 Bytes
ddb4de0
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
{
    "tokenizer_class": "BertJapaneseTokenizer",
    "model_max_length": 512,
    "do_lower_case": false,
    "word_tokenizer_type": "mecab",
    "subword_tokenizer_type": "wordpiece",
    "mecab_kwargs": {
        "mecab_dic": "unidic_lite"
    }
}