Zimix commited on
Commit
b2facde
1 Parent(s): 67838dd

fix convert bug about FFN

Browse files
Files changed (2) hide show
  1. config.json +1 -2
  2. tokenizer.json +0 -0
config.json CHANGED
@@ -10,7 +10,6 @@
10
  "initializer_range": 0.02,
11
  "layer_norm_epsilon": 1e-05,
12
  "model_type": "gpt2",
13
- "n_ctx": 1024,
14
  "n_embd": 3072,
15
  "n_head": 32,
16
  "n_inner": 12288,
@@ -26,7 +25,7 @@
26
  "summary_type": "cls_index",
27
  "summary_use_proj": true,
28
  "tokenizer_class": "GPT2TokenizerFast",
29
- "transformers_version": "4.12.3",
30
  "use_cache": true,
31
  "vocab_size": 50304
32
  }
 
10
  "initializer_range": 0.02,
11
  "layer_norm_epsilon": 1e-05,
12
  "model_type": "gpt2",
 
13
  "n_embd": 3072,
14
  "n_head": 32,
15
  "n_inner": 12288,
 
25
  "summary_type": "cls_index",
26
  "summary_use_proj": true,
27
  "tokenizer_class": "GPT2TokenizerFast",
28
+ "transformers_version": "4.18.0",
29
  "use_cache": true,
30
  "vocab_size": 50304
31
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff