w2v-bert-fine-tuning / vocab.json
AHedya's picture
Upload tokenizer
b65833f verified
{
" ": 0,
"[PAD]": 54,
"[UNK]": 53,
"ا": 1,
"اْ": 2,
"بُ": 3,
"بِ": 4,
"بِّ": 5,
"تَ": 6,
"حَ": 7,
"حِ": 8,
"حْ": 9,
"دُ": 10,
"دِ": 11,
"دِّ": 12,
"ذِ": 13,
"رَ": 14,
"رِ": 15,
"رَّ": 16,
"سْ": 17,
"صِ": 18,
"صِّ": 19,
"طَ": 20,
"عَ": 21,
"عِ": 22,
"عْ": 23,
"غَ": 24,
"غُْ": 25,
"قِ": 26,
"كَ": 27,
"كِ": 28,
"ل": 29,
"لَ": 30,
"لِ": 31,
"لَّ": 32,
"لَِّ": 33,
"لِّ": 34,
"لْ": 35,
"مَ": 36,
"مُ": 37,
"مِ": 38,
"مْ": 39,
"نَ": 40,
"نُ": 41,
"نِ": 42,
"نْ": 43,
"و": 44,
"وَ": 45,
"وَِ": 46,
"وْ": 47,
"ي": 48,
"يَ": 49,
"يَّ": 50,
"يْ": 51,
"يِْ": 52
}