Ontocord.AI commited on
Commit
f28a2f8
1 Parent(s): 067de39

Upload MPTForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +4 -3
  2. generation_config.json +1 -1
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "architectures": [
3
  "MPTForCausalLM"
4
  ],
@@ -15,8 +16,8 @@
15
  "softmax_scale": null
16
  },
17
  "auto_map": {
18
- "AutoConfig": "configuration_mpt.MPTConfig",
19
- "AutoModelForCausalLM": "modeling_mpt.MPTForCausalLM"
20
  },
21
  "d_model": 4096,
22
  "emb_pdrop": 0,
@@ -45,7 +46,7 @@
45
  "resid_pdrop": 0,
46
  "tokenizer_name": "EleutherAI/gpt-neox-20b",
47
  "torch_dtype": "bfloat16",
48
- "transformers_version": "4.28.1",
49
  "use_cache": false,
50
  "verbose": 0,
51
  "vocab_size": 50432
 
1
  {
2
+ "_name_or_path": "TehVenom/MPT-7b-WizardLM_Uncensored-Storywriter-Merge",
3
  "architectures": [
4
  "MPTForCausalLM"
5
  ],
 
16
  "softmax_scale": null
17
  },
18
  "auto_map": {
19
+ "AutoConfig": "TehVenom/MPT-7b-WizardLM_Uncensored-Storywriter-Merge--configuration_mpt.MPTConfig",
20
+ "AutoModelForCausalLM": "TehVenom/MPT-7b-WizardLM_Uncensored-Storywriter-Merge--modeling_mpt.MPTForCausalLM"
21
  },
22
  "d_model": 4096,
23
  "emb_pdrop": 0,
 
46
  "resid_pdrop": 0,
47
  "tokenizer_name": "EleutherAI/gpt-neox-20b",
48
  "torch_dtype": "bfloat16",
49
+ "transformers_version": "4.30.1",
50
  "use_cache": false,
51
  "verbose": 0,
52
  "vocab_size": 50432
generation_config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
  "_from_model_config": true,
3
- "transformers_version": "4.28.1",
4
  "use_cache": false
5
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "transformers_version": "4.30.1",
4
  "use_cache": false
5
  }