gabihp30 commited on
Commit
0aa4bce
1 Parent(s): c277832

Upload 10 files

Browse files
added_tokens.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "<s_base>": 57525,
3
+ "<s_iitcdip>": 57523,
4
+ "<s_synthdog>": 57524,
5
+ "<sep/>": 57522
6
+ }
artifacts.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8b5596501ac14ffbfecfe236cda59761468740bc3fa48882a5f934b2c5e9418
3
+ size 1619339163
config.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "naver-clova-ix/donut-base",
3
+ "align_long_axis": false,
4
+ "architectures": [
5
+ "DonutModel"
6
+ ],
7
+ "decoder_layer": 4,
8
+ "encoder_layer": [
9
+ 2,
10
+ 2,
11
+ 14,
12
+ 2
13
+ ],
14
+ "input_size": [
15
+ 1280,
16
+ 1280
17
+ ],
18
+ "max_length": 2048,
19
+ "max_position_embeddings": 2048,
20
+ "model_type": "donut",
21
+ "torch_dtype": "float32",
22
+ "transformers_version": "4.25.1",
23
+ "window_size": 10
24
+ }
config.yaml ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ resume_from_checkpoint_path: None
2
+ result_path: './result'
3
+ pretrained_model_name_or_path: 'naver-clova-ix/donut-base'
4
+ dataset_name_or_paths:
5
+ - 'dataset/base'
6
+ sort_json_key: False
7
+ train_batch_sizes:
8
+ - 1
9
+ val_batch_sizes:
10
+ - 1
11
+ input_size:
12
+ - 1280
13
+ - 1280
14
+ max_length: 2048
15
+ align_long_axis: False
16
+ num_nodes: 1
17
+ seed: 2022
18
+ lr: 3e-05
19
+ warmup_steps: 1000
20
+ num_training_samples_per_epoch: 800
21
+ max_epochs: 100
22
+ max_steps: -1
23
+ num_workers: 8
24
+ val_check_interval: 1.0
25
+ check_val_every_n_epoch: 10
26
+ gradient_clip_val: 1.0
27
+ verbose: True
28
+ exp_name: 'base'
29
+ exp_version: 'base'
events.out.tfevents.1676770516.server.16270.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:91f6a374f7b228d5c10541d7a2d23dd342dfdc3b4e10bc0532f6e1bedf8391a4
3
+ size 948119
hparams.yaml ADDED
@@ -0,0 +1 @@
 
 
1
+ {}
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5aa92a5cf6f7d42459e45537c150a1110609f4f41d275ea817cb87c4ba6a6fa5
3
+ size 881018051
sentencepiece.bpe.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb9e3dce4c326195d08fc3dd0f7e2eee1da8595c847bf4c1a9c78b7a82d47e2d
3
+ size 1296245
special_tokens_map.json ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<s_base>"
4
+ ],
5
+ "bos_token": "<s>",
6
+ "cls_token": "<s>",
7
+ "eos_token": "</s>",
8
+ "mask_token": {
9
+ "content": "<mask>",
10
+ "lstrip": true,
11
+ "normalized": true,
12
+ "rstrip": false,
13
+ "single_word": false
14
+ },
15
+ "pad_token": "<pad>",
16
+ "sep_token": "</s>",
17
+ "unk_token": "<unk>"
18
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
+ "eos_token": "</s>",
5
+ "from_slow": true,
6
+ "mask_token": {
7
+ "__type": "AddedToken",
8
+ "content": "<mask>",
9
+ "lstrip": true,
10
+ "normalized": true,
11
+ "rstrip": false,
12
+ "single_word": false
13
+ },
14
+ "model_max_length": 1000000000000000019884624838656,
15
+ "name_or_path": "naver-clova-ix/donut-base",
16
+ "pad_token": "<pad>",
17
+ "processor_class": "DonutProcessor",
18
+ "sep_token": "</s>",
19
+ "sp_model_kwargs": {},
20
+ "special_tokens_map_file": null,
21
+ "tokenizer_class": "XLMRobertaTokenizer",
22
+ "unk_token": "<unk>"
23
+ }