nielsr HF staff commited on
Commit
8219d1f
1 Parent(s): 2a0f784

Training in progress, epoch 0

Browse files
Files changed (3) hide show
  1. config.json +8 -4
  2. generation_config.json +10 -0
  3. pytorch_model.bin +2 -2
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
- "_commit_hash": "6b3e8ef8087609eff23423ea1b5d7c3ae8b90402",
3
- "_name_or_path": "nielsr/donut-base",
4
  "architectures": [
5
  "VisionEncoderDecoderModel"
6
  ],
@@ -13,6 +13,7 @@
13
  "architectures": null,
14
  "attention_dropout": 0.0,
15
  "bad_words_ids": null,
 
16
  "bos_token_id": 0,
17
  "chunk_size_feed_forward": 0,
18
  "classifier_dropout": 0.0,
@@ -71,6 +72,7 @@
71
  "return_dict_in_generate": false,
72
  "scale_embedding": true,
73
  "sep_token_id": null,
 
74
  "task_specific_params": null,
75
  "temperature": 1.0,
76
  "tf_legacy_loss": false,
@@ -81,7 +83,7 @@
81
  "top_p": 1.0,
82
  "torch_dtype": null,
83
  "torchscript": false,
84
- "transformers_version": "4.22.0.dev0",
85
  "typical_p": 1.0,
86
  "use_bfloat16": false,
87
  "use_cache": true,
@@ -94,6 +96,7 @@
94
  "architectures": null,
95
  "attention_probs_dropout_prob": 0.0,
96
  "bad_words_ids": null,
 
97
  "bos_token_id": null,
98
  "chunk_size_feed_forward": 0,
99
  "cross_attention_hidden_size": null,
@@ -166,6 +169,7 @@
166
  "return_dict": true,
167
  "return_dict_in_generate": false,
168
  "sep_token_id": null,
 
169
  "task_specific_params": null,
170
  "temperature": 1.0,
171
  "tf_legacy_loss": false,
@@ -176,7 +180,7 @@
176
  "top_p": 1.0,
177
  "torch_dtype": null,
178
  "torchscript": false,
179
- "transformers_version": "4.22.0.dev0",
180
  "typical_p": 1.0,
181
  "use_absolute_embeddings": false,
182
  "use_bfloat16": false,
 
1
  {
2
+ "_commit_hash": "a959cf33c20e09215873e338299c900f57047c61",
3
+ "_name_or_path": "naver-clova-ix/donut-base",
4
  "architectures": [
5
  "VisionEncoderDecoderModel"
6
  ],
 
13
  "architectures": null,
14
  "attention_dropout": 0.0,
15
  "bad_words_ids": null,
16
+ "begin_suppress_tokens": null,
17
  "bos_token_id": 0,
18
  "chunk_size_feed_forward": 0,
19
  "classifier_dropout": 0.0,
 
72
  "return_dict_in_generate": false,
73
  "scale_embedding": true,
74
  "sep_token_id": null,
75
+ "suppress_tokens": null,
76
  "task_specific_params": null,
77
  "temperature": 1.0,
78
  "tf_legacy_loss": false,
 
83
  "top_p": 1.0,
84
  "torch_dtype": null,
85
  "torchscript": false,
86
+ "transformers_version": "4.26.0",
87
  "typical_p": 1.0,
88
  "use_bfloat16": false,
89
  "use_cache": true,
 
96
  "architectures": null,
97
  "attention_probs_dropout_prob": 0.0,
98
  "bad_words_ids": null,
99
+ "begin_suppress_tokens": null,
100
  "bos_token_id": null,
101
  "chunk_size_feed_forward": 0,
102
  "cross_attention_hidden_size": null,
 
169
  "return_dict": true,
170
  "return_dict_in_generate": false,
171
  "sep_token_id": null,
172
+ "suppress_tokens": null,
173
  "task_specific_params": null,
174
  "temperature": 1.0,
175
  "tf_legacy_loss": false,
 
180
  "top_p": 1.0,
181
  "torch_dtype": null,
182
  "torchscript": false,
183
+ "transformers_version": "4.26.0",
184
  "typical_p": 1.0,
185
  "use_absolute_embeddings": false,
186
  "use_bfloat16": false,
generation_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "decoder_start_token_id": 57579,
5
+ "eos_token_id": 2,
6
+ "forced_eos_token_id": 2,
7
+ "max_length": 768,
8
+ "pad_token_id": 1,
9
+ "transformers_version": "4.26.0"
10
+ }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e0ac091e8bebafe3c07e31a7243184a0a9ff8de3c3fca7891be536d090788ba8
3
- size 809400699
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1df63feac73adf1816763074b8f9d6eda5d9ab7360629888d57a890c9bdae94c
3
+ size 809404185