TehVenom commited on
Commit
cc173d4
1 Parent(s): e081dcc

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "H:/Project/Pygmalion/THE_BLENDER/PygmalionAI_pygmalion-6b-dev",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPTJForCausalLM"
@@ -48,5 +48,6 @@
48
  "torch_dtype": "float16",
49
  "transformers_version": "4.25.0.dev0",
50
  "use_cache": true,
51
- "vocab_size": 50400
 
52
  }
 
1
  {
2
+ "_name_or_path": "TehVenom_PPO_Pygway-V8p4_Dev-6b",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPTJForCausalLM"
 
48
  "torch_dtype": "float16",
49
  "transformers_version": "4.25.0.dev0",
50
  "use_cache": true,
51
+ "vocab_size": 50400,
52
+ "welcome": "You are currently running (ppo_hh-GPT-J[20%] + Janeway[20%]) + Pygmalion V8p4 [60%], \na mix of the models reciprocate/ppo_hh_gpt-j, KoboldAI/GPT-J-6B-Janeway, and PygmalionAI/pygmalion-6b at a ratio of 20:20:50"
53
  }