add tokenizer
Browse files- .gitattributes +1 -0
- special_tokens_map.json +7 -0
- tokenizer.json +3 -0
- tokenizer_config.json +5 -0
.gitattributes
CHANGED
@@ -25,3 +25,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
25 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
26 |
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
27 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
25 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
26 |
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
27 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
28 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
special_tokens_map.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": "<s>",
|
3 |
+
"eos_token": "</s>",
|
4 |
+
"mask_token": "<mask>",
|
5 |
+
"pad_token": "<pad>",
|
6 |
+
"unk_token": "<unk>"
|
7 |
+
}
|
tokenizer.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:444f68abc3b798ce3f285f1afff88f634a15514d53c11b49d5f9f640a006af03
|
3 |
+
size 1049337
|
tokenizer_config.json
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"name_or_path": "Gunulhona/tbbarttokenizer",
|
3 |
+
"special_tokens_map_file": "/root/.cache/huggingface/transformers/3e6abf40f4fadbea9e7b539c182868d979838d8f7e6cdcdf2ed52ddcf01420c0.15447ae63ad4a2eba8bc7a5146360711dc32b315b4f1488b4806debf35315e9a",
|
4 |
+
"tokenizer_class": "PreTrainedTokenizerFast"
|
5 |
+
}
|