hoangphu7122002ai commited on
Commit
c039b1c
1 Parent(s): 6a810bf

valid best lost

Browse files
Files changed (2) hide show
  1. config.json +8 -10
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -1,11 +1,10 @@
1
  {
2
- "_commit_hash": "ef530ffa8ff545931fe09d80d5e05f914ed3305d",
3
- "_name_or_path": "aravind-selvam/pix2struct_chart",
4
  "architectures": [
5
  "Pix2StructForConditionalGeneration"
6
  ],
7
- "decoder_end_token_id": 50349,
8
- "decoder_start_token_id": 50344,
9
  "eos_token_id": 1,
10
  "initializer_factor": 1.0,
11
  "initializer_range": 0.02,
@@ -28,7 +27,7 @@
28
  "dense_act_fn": "gelu_new",
29
  "diversity_penalty": 0.0,
30
  "do_sample": false,
31
- "dropout_rate": 0.2,
32
  "early_stopping": false,
33
  "encoder_hidden_size": 768,
34
  "encoder_no_repeat_ngram_size": 0,
@@ -44,7 +43,7 @@
44
  },
45
  "initializer_factor": 1.0,
46
  "initializer_range": 0.02,
47
- "is_decoder": false,
48
  "is_encoder_decoder": false,
49
  "label2id": {
50
  "LABEL_0": 0,
@@ -90,7 +89,7 @@
90
  "typical_p": 1.0,
91
  "use_bfloat16": false,
92
  "use_cache": false,
93
- "vocab_size": 50353
94
  },
95
  "tie_word_embeddings": false,
96
  "torch_dtype": "float32",
@@ -99,7 +98,7 @@
99
  "_name_or_path": "",
100
  "add_cross_attention": false,
101
  "architectures": null,
102
- "attention_dropout": 0.2,
103
  "bad_words_ids": null,
104
  "begin_suppress_tokens": null,
105
  "bos_token_id": null,
@@ -111,7 +110,7 @@
111
  "dense_act_fn": "gelu_new",
112
  "diversity_penalty": 0.0,
113
  "do_sample": false,
114
- "dropout_rate": 0.2,
115
  "early_stopping": false,
116
  "encoder_no_repeat_ngram_size": 0,
117
  "eos_token_id": null,
@@ -119,7 +118,6 @@
119
  "finetuning_task": null,
120
  "forced_bos_token_id": null,
121
  "forced_eos_token_id": null,
122
- "hidden_dropout_prob": 0.2,
123
  "hidden_size": 768,
124
  "id2label": {
125
  "0": "LABEL_0",
 
1
  {
2
+ "_commit_hash": "b5e9052166f90aa6cd57e09002bad39a9a7b9914",
3
+ "_name_or_path": "google/matcha-base",
4
  "architectures": [
5
  "Pix2StructForConditionalGeneration"
6
  ],
7
+ "decoder_start_token_id": 0,
 
8
  "eos_token_id": 1,
9
  "initializer_factor": 1.0,
10
  "initializer_range": 0.02,
 
27
  "dense_act_fn": "gelu_new",
28
  "diversity_penalty": 0.0,
29
  "do_sample": false,
30
+ "dropout_rate": 0.1,
31
  "early_stopping": false,
32
  "encoder_hidden_size": 768,
33
  "encoder_no_repeat_ngram_size": 0,
 
43
  },
44
  "initializer_factor": 1.0,
45
  "initializer_range": 0.02,
46
+ "is_decoder": true,
47
  "is_encoder_decoder": false,
48
  "label2id": {
49
  "LABEL_0": 0,
 
89
  "typical_p": 1.0,
90
  "use_bfloat16": false,
91
  "use_cache": false,
92
+ "vocab_size": 50352
93
  },
94
  "tie_word_embeddings": false,
95
  "torch_dtype": "float32",
 
98
  "_name_or_path": "",
99
  "add_cross_attention": false,
100
  "architectures": null,
101
+ "attention_dropout": 0.0,
102
  "bad_words_ids": null,
103
  "begin_suppress_tokens": null,
104
  "bos_token_id": null,
 
110
  "dense_act_fn": "gelu_new",
111
  "diversity_penalty": 0.0,
112
  "do_sample": false,
113
+ "dropout_rate": 0.0,
114
  "early_stopping": false,
115
  "encoder_no_repeat_ngram_size": 0,
116
  "eos_token_id": null,
 
118
  "finetuning_task": null,
119
  "forced_bos_token_id": null,
120
  "forced_eos_token_id": null,
 
121
  "hidden_size": 768,
122
  "id2label": {
123
  "0": "LABEL_0",
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:26ba39101bd7ef8a264a600d2d41debb4b9fee2854d7911ad79d019b2663c951
3
- size 1129907777
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:160a695779f92ee5319563df01a0a3bf71d63c55d415025ea62f37d9767fa7c3
3
+ size 1129905601