naclbit commited on
Commit
c23f0b5
1 Parent(s): 5cf9f7e

Added instruct tokens and config fix

Browse files
Files changed (2) hide show
  1. config.json +1 -29
  2. tokenizer_config.json +1 -1
config.json CHANGED
@@ -1,8 +1,4 @@
1
  {
2
- "activation_function": "gelu_new",
3
- "architectures": [
4
- "GPTNeoXForCausalLM"
5
- ],
6
  "attn_pdrop": 0.0,
7
  "pad_token_id": 0,
8
  "unk_token_id": 1,
@@ -10,31 +6,7 @@
10
  "sep_token_id": 3,
11
  "eos_token_id": 4,
12
  "embd_pdrop": 0.0,
13
- "gradient_checkpointing": false,
14
- "initializer_range": 0.02,
15
- "layer_norm_epsilon": 1e-05,
16
- "model_type": "gptj",
17
- "model_dtype": "fp16",
18
- "n_embd": 4096,
19
- "n_head": 16,
20
- "n_layer": 32,
21
- "n_positions": 2048,
22
- "rotary": true,
23
- "rotary_dim": 64,
24
- "summary_activation": null,
25
- "summary_first_dropout": 0.1,
26
- "summary_proj_to_labels": true,
27
- "summary_type": "cls_index",
28
- "summary_use_proj": true,
29
  "transformers_version": "4.10.0.dev0",
30
  "tokenizer_class": "T5Tokenizer",
31
- "task_specific_params": {
32
- "text-generation": {
33
- "do_sample": true,
34
- "temperature": 1.0,
35
- "max_length": 50
36
- }
37
- },
38
- "use_cache": true,
39
- "vocab_size": 52000
40
  }
 
1
  {
 
 
 
 
2
  "attn_pdrop": 0.0,
3
  "pad_token_id": 0,
4
  "unk_token_id": 1,
 
6
  "sep_token_id": 3,
7
  "eos_token_id": 4,
8
  "embd_pdrop": 0.0,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
  "transformers_version": "4.10.0.dev0",
10
  "tokenizer_class": "T5Tokenizer",
11
+ "vocab_size": 52100
 
 
 
 
 
 
 
 
12
  }
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "sep_token": "<sep>", "cls_token": "<cls>"}
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "sep_token": "<sep>", "cls_token": "<cls>", "tokenizer_class": "T5Tokenizer"}