haven-jeon commited on
Commit
e148262
1 Parent(s): fb0078b

add special_tokens

Browse files
Files changed (5) hide show
  1. README.md +1 -2
  2. added_tokens.json +2 -0
  3. merges.txt +0 -0
  4. special_tokens_map.json +2 -0
  5. vocab.json +0 -0
README.md CHANGED
@@ -15,8 +15,7 @@ from transformers import PreTrainedTokenizerFast
15
  from transformers import BartForConditionalGeneration
16
 
17
  tokenizer = PreTrainedTokenizerFast.from_pretrained(
18
- 'gogamza/kobart-summarization',
19
- bos_token='<s>', eos_token='</s>', unk_token='<unk>', pad_token='<pad>', mask_token='<mask>')
20
 
21
  model = BartForConditionalGeneration.from_pretrained('gogamza/kobart-summarization')
22
 
 
15
  from transformers import BartForConditionalGeneration
16
 
17
  tokenizer = PreTrainedTokenizerFast.from_pretrained(
18
+ 'gogamza/kobart-summarization')
 
19
 
20
  model = BartForConditionalGeneration.from_pretrained('gogamza/kobart-summarization')
21
 
added_tokens.json ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ {}
2
+
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
special_tokens_map.json ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>"}
2
+
vocab.json ADDED
The diff for this file is too large to render. See raw diff