system HF staff commited on
Commit
da51c3c
1 Parent(s): fabf523

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -12
config.json CHANGED
@@ -1,9 +1,6 @@
1
  {
2
  "attn_pdrop": 0.1,
3
- "bos_token_id": 0,
4
- "do_sample": false,
5
  "embd_pdrop": 0.1,
6
- "eos_token_ids": 0,
7
  "finetuning_task": null,
8
  "id2label": {
9
  "0": "LABEL_0"
@@ -14,31 +11,22 @@
14
  "LABEL_0": 0
15
  },
16
  "layer_norm_epsilon": 1e-05,
17
- "length_penalty": 1.0,
18
- "max_length": 20,
19
  "n_ctx": 1024,
20
  "n_embd": 768,
21
  "n_head": 12,
22
  "n_layer": 6,
23
  "n_positions": 1024,
24
- "num_beams": 1,
25
  "num_labels": 1,
26
- "num_return_sequences": 1,
27
  "output_attentions": false,
28
  "output_hidden_states": false,
29
  "output_past": true,
30
- "pad_token_id": 0,
31
  "pruned_heads": {},
32
- "repetition_penalty": 1.0,
33
  "resid_pdrop": 0.1,
34
  "summary_activation": null,
35
  "summary_first_dropout": 0.1,
36
  "summary_proj_to_labels": true,
37
  "summary_type": "cls_index",
38
  "summary_use_proj": true,
39
- "temperature": 1.0,
40
- "top_k": 50,
41
- "top_p": 1.0,
42
  "torchscript": false,
43
  "use_bfloat16": false,
44
  "vocab_size": 50257
 
1
  {
2
  "attn_pdrop": 0.1,
 
 
3
  "embd_pdrop": 0.1,
 
4
  "finetuning_task": null,
5
  "id2label": {
6
  "0": "LABEL_0"
 
11
  "LABEL_0": 0
12
  },
13
  "layer_norm_epsilon": 1e-05,
 
 
14
  "n_ctx": 1024,
15
  "n_embd": 768,
16
  "n_head": 12,
17
  "n_layer": 6,
18
  "n_positions": 1024,
 
19
  "num_labels": 1,
 
20
  "output_attentions": false,
21
  "output_hidden_states": false,
22
  "output_past": true,
 
23
  "pruned_heads": {},
 
24
  "resid_pdrop": 0.1,
25
  "summary_activation": null,
26
  "summary_first_dropout": 0.1,
27
  "summary_proj_to_labels": true,
28
  "summary_type": "cls_index",
29
  "summary_use_proj": true,
 
 
 
30
  "torchscript": false,
31
  "use_bfloat16": false,
32
  "vocab_size": 50257