snoop2head
commited on
Commit
•
ca3e05e
1
Parent(s):
f997df4
add tokenizer
Browse files- special_tokens_map.json +1 -0
- tokenizer.json +0 -0
- tokenizer_config.json +1 -0
special_tokens_map.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>"}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"max_len": 42, "padding": "max_length", "add_special_tokens": true, "return_tensors": "pt", "truncation": true, "bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "special_tokens_map_file": "/opt/ml/.cache/huggingface/transformers/c2ab65b9d700d0871fd407d489869d7b93f69fb5f1a58fb1fac796fd43b9ea27.1f5b09bb43973b9fbd2ba75c9fe44ffab036b980c4e6a9d779aa7707913416fe", "name_or_path": "skt/ko-gpt-trinity-1.2B-v0.5", "tokenizer_class": "PreTrainedTokenizerFast"}
|