PereLluis13 commited on
Commit
aa00611
1 Parent(s): f0af856

add tokenizer

Browse files
.gitattributes CHANGED
@@ -32,3 +32,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
35
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"<triplet>": 250054, "ca_XX": 250057, "<loc>": 250059, "<concept>": 250070, "<eve>": 250066, "<relation>": 250055, "<misc>": 250060, "el_EL": 250056, "<num>": 250062, "<dis>": 250069, "<date>": 250065, "<org>": 250064, "<media>": 250068, "tp_XX": 250058, "<time>": 250063, "<per>": 250061, "<cel>": 250067}
sentencepiece.bpe.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cfc8146abe2a0488e9e2a0c56de7952f7c11ab059eca145a0a727afce0db2865
3
+ size 5069051
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": "<mask>", "additional_special_tokens": ["<triplet>", "<relation>", "el_EL", "ca_XX", "tp_XX", "<loc>", "<misc>", "<per>", "<num>", "<time>", "<org>", "<date>", "<eve>", "<cel>", "<media>", "<dis>", "<unk>", "<concept>", "<loc>", "<misc>", "<per>", "<num>", "<time>", "<org>", "<date>", "<eve>", "<cel>", "<media>", "<dis>", "<unk>", "<concept>", "ar_AR", "cs_CZ", "de_DE", "en_XX", "es_XX", "et_EE", "fi_FI", "fr_XX", "gu_IN", "hi_IN", "it_IT", "ja_XX", "kk_KZ", "ko_KR", "lt_LT", "lv_LV", "my_MM", "ne_NP", "nl_XX", "ro_RO", "ru_RU", "si_LK", "tr_TR", "vi_VN", "zh_CN", "af_ZA", "az_AZ", "bn_IN", "fa_IR", "he_IL", "hr_HR", "id_ID", "ka_GE", "km_KH", "mk_MK", "ml_IN", "mn_MN", "mr_IN", "pl_PL", "ps_AF", "pt_XX", "sv_SE", "sw_KE", "ta_IN", "te_IN", "th_TH", "tl_XX", "uk_UA", "ur_PK", "xh_ZA", "gl_ES", "sl_SI"]}
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0a1b73ee00274d011fd6e01336741d7cb0b7eb018bd86e14aab76b9cf3eaf85a
3
+ size 17097513
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"src_lang": null, "tgt_lang": "tp_XX", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "bos_token": "<s>", "model_max_length": 1024, "name_or_path": "/home/huguetcabot/mrebel/rebel/model/mrebel-large-32", "special_tokens_map_file": "/home/suraj/projects/mbart-50/hf_models/mbart-50-large/special_tokens_map.json", "use_fast": true, "additional_special_tokens": ["<triplet>", "<relation>", "el_EL", "ca_XX", "tp_XX", "<loc>", "<misc>", "<per>", "<num>", "<time>", "<org>", "<date>", "<eve>", "<cel>", "<media>", "<dis>", "<unk>", "<concept>", "<loc>", "<misc>", "<per>", "<num>", "<time>", "<org>", "<date>", "<eve>", "<cel>", "<media>", "<dis>", "<unk>", "<concept>", "ar_AR", "cs_CZ", "de_DE", "en_XX", "es_XX", "et_EE", "fi_FI", "fr_XX", "gu_IN", "hi_IN", "it_IT", "ja_XX", "kk_KZ", "ko_KR", "lt_LT", "lv_LV", "my_MM", "ne_NP", "nl_XX", "ro_RO", "ru_RU", "si_LK", "tr_TR", "vi_VN", "zh_CN", "af_ZA", "az_AZ", "bn_IN", "fa_IR", "he_IL", "hr_HR", "id_ID", "ka_GE", "km_KH", "mk_MK", "ml_IN", "mn_MN", "mr_IN", "pl_PL", "ps_AF", "pt_XX", "sv_SE", "sw_KE", "ta_IN", "te_IN", "th_TH", "tl_XX", "uk_UA", "ur_PK", "xh_ZA", "gl_ES", "sl_SI"], "sp_model_kwargs": {}, "tokenizer_class": "MBart50Tokenizer"}