htriedman commited on
Commit
d5f5c26
1 Parent(s): 5b0444e

Upload MPTForCausalLM

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/content/mpt-7b-instruct",
3
  "architectures": [
4
  "MPTForCausalLM"
5
  ],
@@ -16,8 +16,8 @@
16
  "softmax_scale": null
17
  },
18
  "auto_map": {
19
- "AutoConfig": "configuration_mpt.MPTConfig",
20
- "AutoModelForCausalLM": "modeling_mpt.MPTForCausalLM"
21
  },
22
  "d_model": 4096,
23
  "emb_pdrop": 0,
 
1
  {
2
+ "_name_or_path": "mosaicml/mpt-7b-instruct",
3
  "architectures": [
4
  "MPTForCausalLM"
5
  ],
 
16
  "softmax_scale": null
17
  },
18
  "auto_map": {
19
+ "AutoConfig": "mosaicml/mpt-7b-instruct--configuration_mpt.MPTConfig",
20
+ "AutoModelForCausalLM": "mosaicml/mpt-7b-instruct--modeling_mpt.MPTForCausalLM"
21
  },
22
  "d_model": 4096,
23
  "emb_pdrop": 0,
generation_config.json CHANGED
@@ -1,5 +1,6 @@
1
  {
2
  "_from_model_config": true,
 
3
  "transformers_version": "4.31.0",
4
  "use_cache": false
5
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "eos_token_id": 0,
4
  "transformers_version": "4.31.0",
5
  "use_cache": false
6
  }
pytorch_model-00001-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:569bca6f58c5b8d4d0e506accb0646d98935edf567c1d7dcd0c373539a6e598c
3
  size 9943042259
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e6bd51fa39feb52597251d74fa02f3e006002c06e1f1674377f5b9febaa314b4
3
  size 9943042259
pytorch_model-00002-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:02e994e0821bf6b55cc61e5ceefa09c5d62ba18dc0deb736aecaf9d22d843d56
3
  size 3355599827
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e2981f89408217949c72dc3bf30b9f83b9bb95e54f02c553eff8f17e3dff0a5d
3
  size 3355599827