ngxquang commited on
Commit
ef52c7f
1 Parent(s): f120c30

Epoch 15 - CER 0.2253

Browse files
Files changed (3) hide show
  1. config.json +2 -2
  2. generation_config.json +2 -2
  3. pytorch_model.bin +1 -1
config.json CHANGED
@@ -171,10 +171,10 @@
171
  "eos_token_id": 6,
172
  "is_encoder_decoder": true,
173
  "length_penalty": 2.0,
174
- "max_length": 128,
175
  "model_type": "vision-encoder-decoder",
176
  "no_repeat_ngram_size": 3,
177
- "num_beams": 4,
178
  "pad_token_id": 6199,
179
  "tie_word_embeddings": false,
180
  "torch_dtype": "float32",
 
171
  "eos_token_id": 6,
172
  "is_encoder_decoder": true,
173
  "length_penalty": 2.0,
174
+ "max_length": 64,
175
  "model_type": "vision-encoder-decoder",
176
  "no_repeat_ngram_size": 3,
177
+ "num_beams": 5,
178
  "pad_token_id": 6199,
179
  "tie_word_embeddings": false,
180
  "torch_dtype": "float32",
generation_config.json CHANGED
@@ -5,9 +5,9 @@
5
  "early_stopping": true,
6
  "eos_token_id": 6,
7
  "length_penalty": 2.0,
8
- "max_length": 128,
9
  "no_repeat_ngram_size": 3,
10
- "num_beams": 4,
11
  "pad_token_id": 6199,
12
  "transformers_version": "4.30.1"
13
  }
 
5
  "early_stopping": true,
6
  "eos_token_id": 6,
7
  "length_penalty": 2.0,
8
+ "max_length": 64,
9
  "no_repeat_ngram_size": 3,
10
+ "num_beams": 5,
11
  "pad_token_id": 6199,
12
  "transformers_version": "4.30.1"
13
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b311005b7c71320934cbb80a426b7a0d571d4d81d1b1ec8b19292ed725af2b7b
3
  size 671844809
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c92293204724d3a75880b69699c6b7c10e68eda51dea0c8f6498b08163f68fe6
3
  size 671844809