nielsr HF staff commited on
Commit
1a621c9
1 Parent(s): 43bb77d

Upload InstructBlipForConditionalGeneration

Browse files
Files changed (1) hide show
  1. config.json +4 -9
config.json CHANGED
@@ -17,7 +17,6 @@
17
  "begin_suppress_tokens": null,
18
  "bos_token_id": null,
19
  "chunk_size_feed_forward": 0,
20
- "classifier_dropout": null,
21
  "cross_attention_frequency": 2,
22
  "cross_attention_hidden_size": null,
23
  "decoder_start_token_id": null,
@@ -82,7 +81,7 @@
82
  "top_p": 1.0,
83
  "torch_dtype": null,
84
  "torchscript": false,
85
- "transformers_version": "4.30.0.dev0",
86
  "typical_p": 1.0,
87
  "use_bfloat16": false,
88
  "vocab_size": 30523
@@ -164,7 +163,7 @@
164
  "top_p": 1.0,
165
  "torch_dtype": "float32",
166
  "torchscript": false,
167
- "transformers_version": "4.30.0.dev0",
168
  "typical_p": 1.0,
169
  "use_bfloat16": false,
170
  "use_cache": true,
@@ -187,7 +186,6 @@
187
  "decoder_start_token_id": null,
188
  "diversity_penalty": 0.0,
189
  "do_sample": false,
190
- "dropout": 0.0,
191
  "early_stopping": false,
192
  "encoder_no_repeat_ngram_size": 0,
193
  "eos_token_id": null,
@@ -202,7 +200,6 @@
202
  "1": "LABEL_1"
203
  },
204
  "image_size": 224,
205
- "initializer_factor": 1.0,
206
  "initializer_range": 1e-10,
207
  "intermediate_size": 6144,
208
  "is_decoder": false,
@@ -211,7 +208,7 @@
211
  "LABEL_0": 0,
212
  "LABEL_1": 1
213
  },
214
- "layer_norm_eps": 1e-05,
215
  "length_penalty": 1.0,
216
  "max_length": 20,
217
  "min_length": 0,
@@ -220,7 +217,6 @@
220
  "num_attention_heads": 16,
221
  "num_beam_groups": 1,
222
  "num_beams": 1,
223
- "num_channels": 3,
224
  "num_hidden_layers": 39,
225
  "num_return_sequences": 1,
226
  "output_attentions": false,
@@ -230,7 +226,6 @@
230
  "patch_size": 14,
231
  "prefix": null,
232
  "problem_type": null,
233
- "projection_dim": 512,
234
  "pruned_heads": {},
235
  "qkv_bias": true,
236
  "remove_invalid_values": false,
@@ -249,7 +244,7 @@
249
  "top_p": 1.0,
250
  "torch_dtype": null,
251
  "torchscript": false,
252
- "transformers_version": "4.30.0.dev0",
253
  "typical_p": 1.0,
254
  "use_bfloat16": false
255
  }
 
17
  "begin_suppress_tokens": null,
18
  "bos_token_id": null,
19
  "chunk_size_feed_forward": 0,
 
20
  "cross_attention_frequency": 2,
21
  "cross_attention_hidden_size": null,
22
  "decoder_start_token_id": null,
 
81
  "top_p": 1.0,
82
  "torch_dtype": null,
83
  "torchscript": false,
84
+ "transformers_version": "4.31.0.dev0",
85
  "typical_p": 1.0,
86
  "use_bfloat16": false,
87
  "vocab_size": 30523
 
163
  "top_p": 1.0,
164
  "torch_dtype": "float32",
165
  "torchscript": false,
166
+ "transformers_version": "4.31.0.dev0",
167
  "typical_p": 1.0,
168
  "use_bfloat16": false,
169
  "use_cache": true,
 
186
  "decoder_start_token_id": null,
187
  "diversity_penalty": 0.0,
188
  "do_sample": false,
 
189
  "early_stopping": false,
190
  "encoder_no_repeat_ngram_size": 0,
191
  "eos_token_id": null,
 
200
  "1": "LABEL_1"
201
  },
202
  "image_size": 224,
 
203
  "initializer_range": 1e-10,
204
  "intermediate_size": 6144,
205
  "is_decoder": false,
 
208
  "LABEL_0": 0,
209
  "LABEL_1": 1
210
  },
211
+ "layer_norm_eps": 1e-06,
212
  "length_penalty": 1.0,
213
  "max_length": 20,
214
  "min_length": 0,
 
217
  "num_attention_heads": 16,
218
  "num_beam_groups": 1,
219
  "num_beams": 1,
 
220
  "num_hidden_layers": 39,
221
  "num_return_sequences": 1,
222
  "output_attentions": false,
 
226
  "patch_size": 14,
227
  "prefix": null,
228
  "problem_type": null,
 
229
  "pruned_heads": {},
230
  "qkv_bias": true,
231
  "remove_invalid_values": false,
 
244
  "top_p": 1.0,
245
  "torch_dtype": null,
246
  "torchscript": false,
247
+ "transformers_version": "4.31.0.dev0",
248
  "typical_p": 1.0,
249
  "use_bfloat16": false
250
  }