breadlicker45 commited on
Commit
680e007
1 Parent(s): f34c08a

Upload 4 files

Browse files
config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "_name_or_path": "EleutherAI/pythia-19m",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
@@ -10,7 +9,6 @@
10
  "initializer_range": 0.02,
11
  "intermediate_size": 2048,
12
  "layer_norm_eps": 1e-05,
13
- "line_by_line": true,
14
  "max_position_embeddings": 2048,
15
  "model_type": "gpt_neox",
16
  "num_attention_heads": 8,
@@ -18,9 +16,8 @@
18
  "rotary_emb_base": 10000,
19
  "rotary_pct": 0.25,
20
  "tie_word_embeddings": false,
21
- "torch_dtype": "float32",
22
- "transformers_version": "4.25.1",
23
  "use_cache": true,
24
- "use_parallel_residual": true,
25
  "vocab_size": 50304
26
  }
 
1
  {
 
2
  "architectures": [
3
  "GPTNeoXForCausalLM"
4
  ],
 
9
  "initializer_range": 0.02,
10
  "intermediate_size": 2048,
11
  "layer_norm_eps": 1e-05,
 
12
  "max_position_embeddings": 2048,
13
  "model_type": "gpt_neox",
14
  "num_attention_heads": 8,
 
16
  "rotary_emb_base": 10000,
17
  "rotary_pct": 0.25,
18
  "tie_word_embeddings": false,
19
+ "torch_dtype": "float16",
20
+ "transformers_version": "4.22.2",
21
  "use_cache": true,
 
22
  "vocab_size": 50304
23
  }
special_tokens_map.json CHANGED
@@ -1,11 +1,5 @@
1
  {
2
- "additional_special_tokens": [
3
- "<|endoftext|>",
4
- "<|sep|>",
5
- "<|acc|>",
6
- "<|tel|>",
7
- "<|rrn|>"
8
- ],
9
  "eos_token": "<|endoftext|>",
10
- "pad_token": "<|endoftext|>"
11
  }
 
1
  {
2
+ "bos_token": "<|endoftext|>",
 
 
 
 
 
 
3
  "eos_token": "<|endoftext|>",
4
+ "unk_token": "<|endoftext|>"
5
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,6 +1,9 @@
1
  {
2
- "name_or_path": "EleutherAI/polyglot-ko-1.3b",
 
3
  "eos_token": "<|endoftext|>",
4
- "pad_token": "<|endoftext|>",
5
- "tokenizer_class": "PreTrainedTokenizerFast"
 
 
6
  }
 
1
  {
2
+ "add_prefix_space": false,
3
+ "bos_token": "<|endoftext|>",
4
  "eos_token": "<|endoftext|>",
5
+ "name_or_path": "EleutherAI/gpt-neox-20b",
6
+ "special_tokens_map_file": "/fsx/home-hailey/.cache/huggingface/hub/models--EleutherAI--gpt-neox-20b/snapshots/3523781c8df75f7741687a4284f6f70e1afa12f4/special_tokens_map.json",
7
+ "tokenizer_class": "GPTNeoXTokenizer",
8
+ "unk_token": "<|endoftext|>"
9
  }