gwkrsrch commited on
Commit
a813850
1 Parent(s): 063d2ca

initial commit

Browse files
added_tokens.json ADDED
@@ -0,0 +1 @@
 
1
+ {"<sep/>": 57522, "<s_iitcdip>": 57523, "<s_synthdog>": 57524}
config.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "align_long_axis": true,
3
+ "architectures": [
4
+ "DonutModel"
5
+ ],
6
+ "decoder_layer": 4,
7
+ "encoder_layer": [
8
+ 2,
9
+ 2,
10
+ 14,
11
+ 2
12
+ ],
13
+ "input_size": [
14
+ 2560,
15
+ 1920
16
+ ],
17
+ "max_length": 1536,
18
+ "max_position_embeddings": 1536,
19
+ "model_type": "donut",
20
+ "torch_dtype": "float32",
21
+ "transformers_version": "4.11.3",
22
+ "window_size": 10
23
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d21e8b5e708168f4f9885d18f8bc95ad6950439e7ac518161828ff0b27b984e8
3
+ size 1018458179
sentencepiece.bpe.model ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb9e3dce4c326195d08fc3dd0f7e2eee1da8595c847bf4c1a9c78b7a82d47e2d
3
+ size 1296245
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true}, "additional_special_tokens": ["<s_iitcdip>", "<s_synthdog>"]}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "sp_model_kwargs": {}, "special_tokens_map_file": null, "tokenizer_file": "/root/.cache/huggingface/transformers/213c2041358e63047b407f94cde1ae23904d31a3bceb57eab291028c1e949437.7135a4b25ac726e19641f0d68803ff02bad960d6319064f55fa9c536929b86fc", "name_or_path": "hyunwoongko/asian-bart-ecjk", "tokenizer_class": "XLMRobertaTokenizer"}