rebeccaoskarsson commited on
Commit
a0175aa
1 Parent(s): 913c214

correct config files

Browse files
Files changed (2) hide show
  1. config.json +2 -4
  2. tokenizer_config.json +1 -2
config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "_name_or_path": "pth13b-erebus-v2",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
@@ -18,8 +17,7 @@
18
  "rotary_pct": 0.25,
19
  "tie_word_embeddings": false,
20
  "torch_dtype": "float16",
21
- "transformers_version": "4.26.1",
22
- "use_cache": false,
23
- "use_parallel_residual": true,
24
  "vocab_size": 50277
25
  }
 
1
  {
 
2
  "architectures": [
3
  "GPTNeoXForCausalLM"
4
  ],
 
17
  "rotary_pct": 0.25,
18
  "tie_word_embeddings": false,
19
  "torch_dtype": "float16",
20
+ "transformers_version": "4.22.2",
21
+ "use_cache": true,
 
22
  "vocab_size": 50277
23
  }
tokenizer_config.json CHANGED
@@ -2,8 +2,7 @@
2
  "add_prefix_space": false,
3
  "bos_token": "<|endoftext|>",
4
  "eos_token": "<|endoftext|>",
5
- "model_max_length": 1000000000000000019884624838656,
6
- "name_or_path": "EleutherAI/pythia-13b-deduped",
7
  "special_tokens_map_file": "/fsx/home-hailey/.cache/huggingface/hub/models--EleutherAI--gpt-neox-20b/snapshots/3523781c8df75f7741687a4284f6f70e1afa12f4/special_tokens_map.json",
8
  "tokenizer_class": "GPTNeoXTokenizer",
9
  "unk_token": "<|endoftext|>"
 
2
  "add_prefix_space": false,
3
  "bos_token": "<|endoftext|>",
4
  "eos_token": "<|endoftext|>",
5
+ "name_or_path": "EleutherAI/gpt-neox-20b",
 
6
  "special_tokens_map_file": "/fsx/home-hailey/.cache/huggingface/hub/models--EleutherAI--gpt-neox-20b/snapshots/3523781c8df75f7741687a4284f6f70e1afa12f4/special_tokens_map.json",
7
  "tokenizer_class": "GPTNeoXTokenizer",
8
  "unk_token": "<|endoftext|>"