claudfuen commited on
Commit
5b98518
1 Parent(s): 69bde45

Update safety_checker/config.json

Browse files
Files changed (1) hide show
  1. safety_checker/config.json +3 -7
safety_checker/config.json CHANGED
@@ -14,7 +14,6 @@
14
  "architectures": null,
15
  "attention_dropout": 0.0,
16
  "bad_words_ids": null,
17
- "begin_suppress_tokens": null,
18
  "bos_token_id": 0,
19
  "chunk_size_feed_forward": 0,
20
  "cross_attention_hidden_size": null,
@@ -68,7 +67,6 @@
68
  "return_dict": true,
69
  "return_dict_in_generate": false,
70
  "sep_token_id": null,
71
- "suppress_tokens": null,
72
  "task_specific_params": null,
73
  "temperature": 1.0,
74
  "tf_legacy_loss": false,
@@ -79,7 +77,7 @@
79
  "top_p": 1.0,
80
  "torch_dtype": null,
81
  "torchscript": false,
82
- "transformers_version": "4.24.0",
83
  "typical_p": 1.0,
84
  "use_bfloat16": false,
85
  "vocab_size": 49408
@@ -98,7 +96,6 @@
98
  "architectures": null,
99
  "attention_dropout": 0.0,
100
  "bad_words_ids": null,
101
- "begin_suppress_tokens": null,
102
  "bos_token_id": null,
103
  "chunk_size_feed_forward": 0,
104
  "cross_attention_hidden_size": null,
@@ -154,7 +151,6 @@
154
  "return_dict": true,
155
  "return_dict_in_generate": false,
156
  "sep_token_id": null,
157
- "suppress_tokens": null,
158
  "task_specific_params": null,
159
  "temperature": 1.0,
160
  "tf_legacy_loss": false,
@@ -165,7 +161,7 @@
165
  "top_p": 1.0,
166
  "torch_dtype": null,
167
  "torchscript": false,
168
- "transformers_version": "4.24.0",
169
  "typical_p": 1.0,
170
  "use_bfloat16": false
171
  },
@@ -176,4 +172,4 @@
176
  "num_hidden_layers": 24,
177
  "patch_size": 14
178
  }
179
- }
 
14
  "architectures": null,
15
  "attention_dropout": 0.0,
16
  "bad_words_ids": null,
 
17
  "bos_token_id": 0,
18
  "chunk_size_feed_forward": 0,
19
  "cross_attention_hidden_size": null,
 
67
  "return_dict": true,
68
  "return_dict_in_generate": false,
69
  "sep_token_id": null,
 
70
  "task_specific_params": null,
71
  "temperature": 1.0,
72
  "tf_legacy_loss": false,
 
77
  "top_p": 1.0,
78
  "torch_dtype": null,
79
  "torchscript": false,
80
+ "transformers_version": "4.22.0.dev0",
81
  "typical_p": 1.0,
82
  "use_bfloat16": false,
83
  "vocab_size": 49408
 
96
  "architectures": null,
97
  "attention_dropout": 0.0,
98
  "bad_words_ids": null,
 
99
  "bos_token_id": null,
100
  "chunk_size_feed_forward": 0,
101
  "cross_attention_hidden_size": null,
 
151
  "return_dict": true,
152
  "return_dict_in_generate": false,
153
  "sep_token_id": null,
 
154
  "task_specific_params": null,
155
  "temperature": 1.0,
156
  "tf_legacy_loss": false,
 
161
  "top_p": 1.0,
162
  "torch_dtype": null,
163
  "torchscript": false,
164
+ "transformers_version": "4.22.0.dev0",
165
  "typical_p": 1.0,
166
  "use_bfloat16": false
167
  },
 
172
  "num_hidden_layers": 24,
173
  "patch_size": 14
174
  }
175
+ }