AdamOswald1 commited on
Commit
ae37e52
1 Parent(s): 501f4b5

Update safety_checker/config.json

Browse files
Files changed (1) hide show
  1. safety_checker/config.json +0 -172
safety_checker/config.json CHANGED
@@ -177,175 +177,3 @@
177
  "patch_size": 14
178
  }
179
  }
180
-
181
- {
182
- "_name_or_path": "CompVis/stable-diffusion-safety-checker",
183
- "architectures": [
184
- "StableDiffusionSafetyChecker"
185
- ],
186
- "initializer_factor": 1.0,
187
- "logit_scale_init_value": 2.6592,
188
- "model_type": "clip",
189
- "projection_dim": 768,
190
- "text_config": {
191
- "_name_or_path": "",
192
- "add_cross_attention": false,
193
- "architectures": null,
194
- "attention_dropout": 0.0,
195
- "bad_words_ids": null,
196
- "bos_token_id": 0,
197
- "chunk_size_feed_forward": 0,
198
- "cross_attention_hidden_size": null,
199
- "decoder_start_token_id": null,
200
- "diversity_penalty": 0.0,
201
- "do_sample": false,
202
- "dropout": 0.0,
203
- "early_stopping": false,
204
- "encoder_no_repeat_ngram_size": 0,
205
- "eos_token_id": 2,
206
- "exponential_decay_length_penalty": null,
207
- "finetuning_task": null,
208
- "forced_bos_token_id": null,
209
- "forced_eos_token_id": null,
210
- "hidden_act": "quick_gelu",
211
- "hidden_size": 768,
212
- "id2label": {
213
- "0": "LABEL_0",
214
- "1": "LABEL_1"
215
- },
216
- "initializer_factor": 1.0,
217
- "initializer_range": 0.02,
218
- "intermediate_size": 3072,
219
- "is_decoder": false,
220
- "is_encoder_decoder": false,
221
- "label2id": {
222
- "LABEL_0": 0,
223
- "LABEL_1": 1
224
- },
225
- "layer_norm_eps": 1e-05,
226
- "length_penalty": 1.0,
227
- "max_length": 20,
228
- "max_position_embeddings": 77,
229
- "min_length": 0,
230
- "model_type": "clip_text_model",
231
- "no_repeat_ngram_size": 0,
232
- "num_attention_heads": 12,
233
- "num_beam_groups": 1,
234
- "num_beams": 1,
235
- "num_hidden_layers": 12,
236
- "num_return_sequences": 1,
237
- "output_attentions": false,
238
- "output_hidden_states": false,
239
- "output_scores": false,
240
- "pad_token_id": 1,
241
- "prefix": null,
242
- "problem_type": null,
243
- "pruned_heads": {},
244
- "remove_invalid_values": false,
245
- "repetition_penalty": 1.0,
246
- "return_dict": true,
247
- "return_dict_in_generate": false,
248
- "sep_token_id": null,
249
- "task_specific_params": null,
250
- "temperature": 1.0,
251
- "tie_encoder_decoder": false,
252
- "tie_word_embeddings": true,
253
- "tokenizer_class": null,
254
- "top_k": 50,
255
- "top_p": 1.0,
256
- "torch_dtype": null,
257
- "torchscript": false,
258
- "transformers_version": "4.19.2",
259
- "typical_p": 1.0,
260
- "use_bfloat16": false,
261
- "vocab_size": 49408
262
- },
263
- "text_config_dict": {
264
- "hidden_size": 768,
265
- "intermediate_size": 3072,
266
- "num_attention_heads": 12,
267
- "num_hidden_layers": 12
268
- },
269
- "torch_dtype": "float32",
270
- "transformers_version": null,
271
- "vision_config": {
272
- "_name_or_path": "",
273
- "add_cross_attention": false,
274
- "architectures": null,
275
- "attention_dropout": 0.0,
276
- "bad_words_ids": null,
277
- "bos_token_id": null,
278
- "chunk_size_feed_forward": 0,
279
- "cross_attention_hidden_size": null,
280
- "decoder_start_token_id": null,
281
- "diversity_penalty": 0.0,
282
- "do_sample": false,
283
- "dropout": 0.0,
284
- "early_stopping": false,
285
- "encoder_no_repeat_ngram_size": 0,
286
- "eos_token_id": null,
287
- "exponential_decay_length_penalty": null,
288
- "finetuning_task": null,
289
- "forced_bos_token_id": null,
290
- "forced_eos_token_id": null,
291
- "hidden_act": "quick_gelu",
292
- "hidden_size": 1024,
293
- "id2label": {
294
- "0": "LABEL_0",
295
- "1": "LABEL_1"
296
- },
297
- "image_size": 224,
298
- "initializer_factor": 1.0,
299
- "initializer_range": 0.02,
300
- "intermediate_size": 4096,
301
- "is_decoder": false,
302
- "is_encoder_decoder": false,
303
- "label2id": {
304
- "LABEL_0": 0,
305
- "LABEL_1": 1
306
- },
307
- "layer_norm_eps": 1e-05,
308
- "length_penalty": 1.0,
309
- "max_length": 20,
310
- "min_length": 0,
311
- "model_type": "clip_vision_model",
312
- "no_repeat_ngram_size": 0,
313
- "num_attention_heads": 16,
314
- "num_beam_groups": 1,
315
- "num_beams": 1,
316
- "num_hidden_layers": 24,
317
- "num_return_sequences": 1,
318
- "output_attentions": false,
319
- "output_hidden_states": false,
320
- "output_scores": false,
321
- "pad_token_id": null,
322
- "patch_size": 14,
323
- "prefix": null,
324
- "problem_type": null,
325
- "pruned_heads": {},
326
- "remove_invalid_values": false,
327
- "repetition_penalty": 1.0,
328
- "return_dict": true,
329
- "return_dict_in_generate": false,
330
- "sep_token_id": null,
331
- "task_specific_params": null,
332
- "temperature": 1.0,
333
- "tie_encoder_decoder": false,
334
- "tie_word_embeddings": true,
335
- "tokenizer_class": null,
336
- "top_k": 50,
337
- "top_p": 1.0,
338
- "torch_dtype": null,
339
- "torchscript": false,
340
- "transformers_version": "4.19.2",
341
- "typical_p": 1.0,
342
- "use_bfloat16": false
343
- },
344
- "vision_config_dict": {
345
- "hidden_size": 1024,
346
- "intermediate_size": 4096,
347
- "num_attention_heads": 16,
348
- "num_hidden_layers": 24,
349
- "patch_size": 14
350
- }
351
- }
 
177
  "patch_size": 14
178
  }
179
  }