I have updated the correct case for Mpt and MptConfig.
#1
by
amarahiqbal
- opened
- config.json +2 -2
config.json
CHANGED
@@ -16,8 +16,8 @@
|
|
16 |
"softmax_scale": null
|
17 |
},
|
18 |
"auto_map": {
|
19 |
-
"AutoConfig": "mosaicml/mpt-7b--configuration_mpt.
|
20 |
-
"AutoModelForCausalLM": "mosaicml/mpt-7b--modeling_mpt.
|
21 |
},
|
22 |
"d_model": 128,
|
23 |
"emb_pdrop": 0,
|
|
|
16 |
"softmax_scale": null
|
17 |
},
|
18 |
"auto_map": {
|
19 |
+
"AutoConfig": "mosaicml/mpt-7b--configuration_mpt.MptConfig",
|
20 |
+
"AutoModelForCausalLM": "mosaicml/mpt-7b--modeling_mpt.MptForCausalLM"
|
21 |
},
|
22 |
"d_model": 128,
|
23 |
"emb_pdrop": 0,
|