LeroyDyer commited on
Commit
842c047
1 Parent(s): 7b5e30b

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -28
config.json CHANGED
@@ -1,9 +1,8 @@
1
  {
2
  {
3
  "architectures": [
4
- "MistralForCausalLM"
5
  ],
6
- "_name_or_path": "LeroyDyer/Mixtral_AI_Multi_Input_Model",
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 1,
9
  "eos_token_id": 2,
@@ -24,12 +23,6 @@
24
  "transformers_version": "4.36.0",
25
  "use_cache": true,
26
  "vocab_size": 32000
27
- }
28
- {
29
- "_name_or_path": "LeroyDyer/Mixtral_AI_Multi_Input_Model/Sound",
30
- "architectures": [
31
- "WhisperForConditionalGeneration"
32
- ],
33
  "activation_dropout": 0.0,
34
  "activation_function": "gelu",
35
  "attention_dropout": 0.0,
@@ -37,7 +30,6 @@
37
  220,
38
  50256
39
  ],
40
- "bos_token_id": 50257,
41
  "d_model": 1024,
42
  "decoder_attention_heads": 16,
43
  "decoder_ffn_dim": 4096,
@@ -158,16 +150,6 @@
158
  50360,
159
  50361
160
  ],
161
- "torch_dtype": "float32",
162
- "transformers_version": "4.27.0.dev0",
163
- "use_cache": true,
164
- "vocab_size": 51864
165
- }
166
- {
167
- "_name_or_path": "LeroyDyer/Mixtral_AI_Multi_Input_Model/Video",
168
- "architectures": [
169
- "XClipModel"
170
- ],
171
  "initializer_factor": 1.0,
172
  "logit_scale_init_value": 2.6592,
173
  "model_type": "xclip",
@@ -246,11 +228,8 @@
246
  "top_p": 1.0,
247
  "torch_dtype": null,
248
  "torchscript": false,
249
- "transformers_version": "4.22.0.dev0",
250
  "typical_p": 1.0,
251
  "use_bfloat16": false,
252
- "vocab_size": 49408
253
- },
254
  "text_config_dict": null,
255
  "torch_dtype": "float32",
256
  "transformers_version": null,
@@ -336,12 +315,6 @@
336
  "use_bfloat16": false
337
  "vision_config_dict": null
338
  }
339
- }
340
- {
341
- "_name_or_path": "LeroyDyer/Mixtral_AI_Multi_Input_Model/Image",
342
- "architectures": [
343
- "CLIPModel"
344
- ],
345
  "initializer_factor": 1.0,
346
  "logit_scale_init_value": 2.6592,
347
  "model_type": "clip",
 
1
  {
2
  {
3
  "architectures": [
4
+ "MistralForCausalLM", "WhisperForConditionalGeneration"
5
  ],
 
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
8
  "eos_token_id": 2,
 
23
  "transformers_version": "4.36.0",
24
  "use_cache": true,
25
  "vocab_size": 32000
 
 
 
 
 
 
26
  "activation_dropout": 0.0,
27
  "activation_function": "gelu",
28
  "attention_dropout": 0.0,
 
30
  220,
31
  50256
32
  ],
 
33
  "d_model": 1024,
34
  "decoder_attention_heads": 16,
35
  "decoder_ffn_dim": 4096,
 
150
  50360,
151
  50361
152
  ],
 
 
 
 
 
 
 
 
 
 
153
  "initializer_factor": 1.0,
154
  "logit_scale_init_value": 2.6592,
155
  "model_type": "xclip",
 
228
  "top_p": 1.0,
229
  "torch_dtype": null,
230
  "torchscript": false,
 
231
  "typical_p": 1.0,
232
  "use_bfloat16": false,
 
 
233
  "text_config_dict": null,
234
  "torch_dtype": "float32",
235
  "transformers_version": null,
 
315
  "use_bfloat16": false
316
  "vision_config_dict": null
317
  }
 
 
 
 
 
 
318
  "initializer_factor": 1.0,
319
  "logit_scale_init_value": 2.6592,
320
  "model_type": "clip",