Files changed (1) hide show
  1. config.json +0 -2
config.json CHANGED
@@ -4,7 +4,6 @@
4
  ],
5
  "model_type": "openlm",
6
  "params": null,
7
- "params_args_dict": {
8
  "apply_qk_norm": true,
9
  "attn_activation": null,
10
  "attn_name": "auto",
@@ -30,7 +29,6 @@
30
  "seq_len": 2048,
31
  "vocab_size": 50432,
32
  "weight_tying": false
33
- },
34
  "torch_dtype": "float32",
35
  "transformers_version": "4.40.2"
36
  }
 
4
  ],
5
  "model_type": "openlm",
6
  "params": null,
 
7
  "apply_qk_norm": true,
8
  "attn_activation": null,
9
  "attn_name": "auto",
 
29
  "seq_len": 2048,
30
  "vocab_size": 50432,
31
  "weight_tying": false
 
32
  "torch_dtype": "float32",
33
  "transformers_version": "4.40.2"
34
  }