system HF staff commited on
Commit
aab91e1
1 Parent(s): d2a7212

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +10 -5
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "activation_function": "gelu_new",
3
  "architectures": [
4
  "GPT2LMHeadModel"
@@ -7,17 +8,21 @@
7
  "bos_token_id": 50256,
8
  "embd_pdrop": 0.1,
9
  "eos_token_id": 50256,
 
 
 
10
  "initializer_range": 0.02,
 
 
 
11
  "layer_norm_epsilon": 1e-05,
12
  "model_type": "gpt2",
13
  "n_ctx": 1024,
14
- "n_embd": 1024,
15
- "n_head": 16,
16
  "n_inner": null,
17
- "n_layer": 24,
18
  "n_positions": 1024,
19
- "n_special": 0,
20
- "predict_special_tokens": true,
21
  "resid_pdrop": 0.1,
22
  "summary_activation": null,
23
  "summary_first_dropout": 0.1,
 
1
  {
2
+ "_num_labels": 1,
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
 
8
  "bos_token_id": 50256,
9
  "embd_pdrop": 0.1,
10
  "eos_token_id": 50256,
11
+ "id2label": {
12
+ "0": "LABEL_0"
13
+ },
14
  "initializer_range": 0.02,
15
+ "label2id": {
16
+ "LABEL_0": 0
17
+ },
18
  "layer_norm_epsilon": 1e-05,
19
  "model_type": "gpt2",
20
  "n_ctx": 1024,
21
+ "n_embd": 1280,
22
+ "n_head": 20,
23
  "n_inner": null,
24
+ "n_layer": 36,
25
  "n_positions": 1024,
 
 
26
  "resid_pdrop": 0.1,
27
  "summary_activation": null,
28
  "summary_first_dropout": 0.1,