ZhankuiHe commited on
Commit
ddd79eb
1 Parent(s): eb88366

Upload tokenizer

Browse files
.gitattributes CHANGED
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ entity/tokenizer.json filter=lfs diff=lfs merge=lfs -text
37
+ item/tokenizer.json filter=lfs diff=lfs merge=lfs -text
concept/special_tokens_map.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "pad_token": "[UNK]"
3
+ }
concept/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
concept/tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff
 
entity/special_tokens_map.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "pad_token": "[PAD]"
3
+ }
entity/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0798d6ea50e1c1d335775ea7a54a659a841babd39203452b52c13ca62e4cef45
3
+ size 16653821
entity/tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff
 
item/special_tokens_map.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "pad_token": "[PAD]",
3
+ "unk_token": "[UNK]"
4
+ }
item/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c5367496303729b85ae8f9329823d91a96a27ab30732735804a0b175494c43d
3
+ size 14121752
item/tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff
 
word/special_tokens_map.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "pad_token": "[PAD]"
3
+ }
word/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
word/tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff