MAGAer13 commited on
Commit
37399bd
1 Parent(s): d0485fb
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -205,7 +205,7 @@
205
  "num_hidden_layers": 6,
206
  "num_attention_heads": 16,
207
  "hidden_act": "gelu",
208
- "intermediate_size": 4096,
209
  "hidden_dropout_prob": 0.1,
210
  "attention_probs_dropout_prob": 0.1,
211
  "max_position_embeddings": 512,
@@ -250,4 +250,4 @@
250
  "initializer_factor": 1.0,
251
  "initializer_range": 0.02,
252
  "model_type": "mplug-owl"
253
- }
 
205
  "num_hidden_layers": 6,
206
  "num_attention_heads": 16,
207
  "hidden_act": "gelu",
208
+ "intermediate_size": 2816,
209
  "hidden_dropout_prob": 0.1,
210
  "attention_probs_dropout_prob": 0.1,
211
  "max_position_embeddings": 512,
 
250
  "initializer_factor": 1.0,
251
  "initializer_range": 0.02,
252
  "model_type": "mplug-owl"
253
+ }