Aixile commited on
Commit
a9189c4
1 Parent(s): 3d68279

support older transformers versions

Browse files
Files changed (1) hide show
  1. config.json +18 -8
config.json CHANGED
@@ -13,13 +13,13 @@
13
  "architectures": null,
14
  "attention_dropout": 0.0,
15
  "bad_words_ids": null,
16
- "begin_suppress_tokens": null,
17
  "bos_token_id": 0,
18
  "chunk_size_feed_forward": 0,
19
  "cross_attention_hidden_size": null,
20
  "decoder_start_token_id": null,
21
  "diversity_penalty": 0.0,
22
  "do_sample": false,
 
23
  "early_stopping": false,
24
  "encoder_no_repeat_ngram_size": 0,
25
  "eos_token_id": 2,
@@ -60,14 +60,12 @@
60
  "pad_token_id": 1,
61
  "prefix": null,
62
  "problem_type": null,
63
- "projection_dim": 512,
64
  "pruned_heads": {},
65
  "remove_invalid_values": false,
66
  "repetition_penalty": 1.0,
67
  "return_dict": true,
68
  "return_dict_in_generate": false,
69
  "sep_token_id": null,
70
- "suppress_tokens": null,
71
  "task_specific_params": null,
72
  "temperature": 1.0,
73
  "tf_legacy_loss": false,
@@ -78,11 +76,17 @@
78
  "top_p": 1.0,
79
  "torch_dtype": null,
80
  "torchscript": false,
81
- "transformers_version": "4.30.0.dev0",
82
  "typical_p": 1.0,
83
  "use_bfloat16": false,
84
  "vocab_size": 49408
85
  },
 
 
 
 
 
 
86
  "torch_dtype": "float32",
87
  "transformers_version": null,
88
  "vision_config": {
@@ -91,13 +95,13 @@
91
  "architectures": null,
92
  "attention_dropout": 0.0,
93
  "bad_words_ids": null,
94
- "begin_suppress_tokens": null,
95
  "bos_token_id": null,
96
  "chunk_size_feed_forward": 0,
97
  "cross_attention_hidden_size": null,
98
  "decoder_start_token_id": null,
99
  "diversity_penalty": 0.0,
100
  "do_sample": false,
 
101
  "early_stopping": false,
102
  "encoder_no_repeat_ngram_size": 0,
103
  "eos_token_id": null,
@@ -140,14 +144,12 @@
140
  "patch_size": 14,
141
  "prefix": null,
142
  "problem_type": null,
143
- "projection_dim": 512,
144
  "pruned_heads": {},
145
  "remove_invalid_values": false,
146
  "repetition_penalty": 1.0,
147
  "return_dict": true,
148
  "return_dict_in_generate": false,
149
  "sep_token_id": null,
150
- "suppress_tokens": null,
151
  "task_specific_params": null,
152
  "temperature": 1.0,
153
  "tf_legacy_loss": false,
@@ -158,8 +160,16 @@
158
  "top_p": 1.0,
159
  "torch_dtype": null,
160
  "torchscript": false,
161
- "transformers_version": "4.30.0.dev0",
162
  "typical_p": 1.0,
163
  "use_bfloat16": false
 
 
 
 
 
 
 
 
164
  }
165
  }
 
13
  "architectures": null,
14
  "attention_dropout": 0.0,
15
  "bad_words_ids": null,
 
16
  "bos_token_id": 0,
17
  "chunk_size_feed_forward": 0,
18
  "cross_attention_hidden_size": null,
19
  "decoder_start_token_id": null,
20
  "diversity_penalty": 0.0,
21
  "do_sample": false,
22
+ "dropout": 0.0,
23
  "early_stopping": false,
24
  "encoder_no_repeat_ngram_size": 0,
25
  "eos_token_id": 2,
 
60
  "pad_token_id": 1,
61
  "prefix": null,
62
  "problem_type": null,
 
63
  "pruned_heads": {},
64
  "remove_invalid_values": false,
65
  "repetition_penalty": 1.0,
66
  "return_dict": true,
67
  "return_dict_in_generate": false,
68
  "sep_token_id": null,
 
69
  "task_specific_params": null,
70
  "temperature": 1.0,
71
  "tf_legacy_loss": false,
 
76
  "top_p": 1.0,
77
  "torch_dtype": null,
78
  "torchscript": false,
79
+ "transformers_version": "4.23.0.dev0",
80
  "typical_p": 1.0,
81
  "use_bfloat16": false,
82
  "vocab_size": 49408
83
  },
84
+ "text_config_dict": {
85
+ "hidden_act": "gelu",
86
+ "hidden_size": 768,
87
+ "intermediate_size": 3072,
88
+ "num_attention_heads": 12
89
+ },
90
  "torch_dtype": "float32",
91
  "transformers_version": null,
92
  "vision_config": {
 
95
  "architectures": null,
96
  "attention_dropout": 0.0,
97
  "bad_words_ids": null,
 
98
  "bos_token_id": null,
99
  "chunk_size_feed_forward": 0,
100
  "cross_attention_hidden_size": null,
101
  "decoder_start_token_id": null,
102
  "diversity_penalty": 0.0,
103
  "do_sample": false,
104
+ "dropout": 0.0,
105
  "early_stopping": false,
106
  "encoder_no_repeat_ngram_size": 0,
107
  "eos_token_id": null,
 
144
  "patch_size": 14,
145
  "prefix": null,
146
  "problem_type": null,
 
147
  "pruned_heads": {},
148
  "remove_invalid_values": false,
149
  "repetition_penalty": 1.0,
150
  "return_dict": true,
151
  "return_dict_in_generate": false,
152
  "sep_token_id": null,
 
153
  "task_specific_params": null,
154
  "temperature": 1.0,
155
  "tf_legacy_loss": false,
 
160
  "top_p": 1.0,
161
  "torch_dtype": null,
162
  "torchscript": false,
163
+ "transformers_version": "4.23.0.dev0",
164
  "typical_p": 1.0,
165
  "use_bfloat16": false
166
+ },
167
+ "vision_config_dict": {
168
+ "hidden_act": "gelu",
169
+ "hidden_size": 1024,
170
+ "intermediate_size": 4096,
171
+ "num_attention_heads": 16,
172
+ "num_hidden_layers": 24,
173
+ "patch_size": 14
174
  }
175
  }