GPT_CHARS / vocab.json
IbrahimSalah's picture
Upload tokenizer
654a20a verified
raw
history blame contribute delete
537 Bytes
{
".": 41,
"[PAD]": 40,
"[UNK]": 39,
"|": 36,
"ء": 19,
"آ": 18,
"أ": 42,
"إ": 43,
"ئ": 44,
"ا": 26,
"ب": 9,
"ة": 14,
"ت": 15,
"ث": 1,
"ج": 17,
"ح": 8,
"خ": 33,
"د": 12,
"ذ": 10,
"ر": 27,
"ز": 11,
"س": 0,
"ش": 6,
"ص": 37,
"ض": 31,
"ط": 32,
"ظ": 20,
"ع": 4,
"غ": 30,
"ف": 28,
"ق": 3,
"ك": 25,
"ل": 5,
"م": 38,
"ن": 7,
"ه": 29,
"و": 22,
"ى": 24,
"ي": 2,
"َ": 23,
"ُ": 16,
"ِ": 13,
"ّ": 34,
"ْ": 35,
"ھ": 21
}