ArthurZ HF staff commited on
Commit
0f53730
·
1 Parent(s): d481d73

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +8 -10
config.json CHANGED
@@ -7,7 +7,7 @@
7
  "audio_config": {
8
  "_name_or_path": "",
9
  "add_cross_attention": false,
10
- "aff_block_r": 4,
11
  "architectures": null,
12
  "attention_dropout": 0.1,
13
  "attention_probs_dropout_prob": 0.0,
@@ -20,8 +20,6 @@
20
  "depths": [
21
  2,
22
  2,
23
- 6,
24
- 2
25
  ],
26
  "diversity_penalty": 0.0,
27
  "do_sample": false,
@@ -73,7 +71,7 @@
73
  "num_beams": 1,
74
  "num_channels": 3,
75
  "num_classes": 527,
76
- "num_hidden_layers": 4,
77
  "num_mel_bins": 64,
78
  "num_return_sequences": 1,
79
  "output_attentions": false,
@@ -116,8 +114,8 @@
116
  "initializer_factor": 1.0,
117
  "logit_scale_init_value": 14.285714285714285,
118
  "model_type": "clap",
119
- "num_hidden_layers": 9,
120
- "projection_dim": 64,
121
  "projection_hidden_act": "relu",
122
  "text_config": {
123
  "_name_or_path": "",
@@ -165,10 +163,10 @@
165
  "min_length": 0,
166
  "model_type": "clap_text_model",
167
  "no_repeat_ngram_size": 0,
168
- "num_attention_heads": 4,
169
  "num_beam_groups": 1,
170
  "num_beams": 1,
171
- "num_hidden_layers": 5,
172
  "num_return_sequences": 1,
173
  "output_attentions": false,
174
  "output_hidden_states": false,
@@ -177,7 +175,7 @@
177
  "position_embedding_type": "absolute",
178
  "prefix": null,
179
  "problem_type": null,
180
- "projection_dim": 64,
181
  "projection_hidden_act": "relu",
182
  "pruned_heads": {},
183
  "remove_invalid_values": false,
@@ -201,7 +199,7 @@
201
  "typical_p": 1.0,
202
  "use_bfloat16": false,
203
  "use_cache": true,
204
- "vocab_size": 99
205
  },
206
  "torch_dtype": "float32",
207
  "transformers_version": null
 
7
  "audio_config": {
8
  "_name_or_path": "",
9
  "add_cross_attention": false,
10
+ "aff_block_r": 2,
11
  "architectures": null,
12
  "attention_dropout": 0.1,
13
  "attention_probs_dropout_prob": 0.0,
 
20
  "depths": [
21
  2,
22
  2,
 
 
23
  ],
24
  "diversity_penalty": 0.0,
25
  "do_sample": false,
 
71
  "num_beams": 1,
72
  "num_channels": 3,
73
  "num_classes": 527,
74
+ "num_hidden_layers": 3,
75
  "num_mel_bins": 64,
76
  "num_return_sequences": 1,
77
  "output_attentions": false,
 
114
  "initializer_factor": 1.0,
115
  "logit_scale_init_value": 14.285714285714285,
116
  "model_type": "clap",
117
+ "num_hidden_layers": 3,
118
+ "projection_dim": 32,
119
  "projection_hidden_act": "relu",
120
  "text_config": {
121
  "_name_or_path": "",
 
163
  "min_length": 0,
164
  "model_type": "clap_text_model",
165
  "no_repeat_ngram_size": 0,
166
+ "num_attention_heads": 2,
167
  "num_beam_groups": 1,
168
  "num_beams": 1,
169
+ "num_hidden_layers": 3,
170
  "num_return_sequences": 1,
171
  "output_attentions": false,
172
  "output_hidden_states": false,
 
175
  "position_embedding_type": "absolute",
176
  "prefix": null,
177
  "problem_type": null,
178
+ "projection_dim": 32,
179
  "projection_hidden_act": "relu",
180
  "pruned_heads": {},
181
  "remove_invalid_values": false,
 
199
  "typical_p": 1.0,
200
  "use_bfloat16": false,
201
  "use_cache": true,
202
+ "vocab_size": 50265
203
  },
204
  "torch_dtype": "float32",
205
  "transformers_version": null