gyulukeyi commited on
Commit
6fb5389
1 Parent(s): 5534de4

try resolving config.json issues

Browse files
Files changed (1) hide show
  1. config.json +0 -10
config.json CHANGED
@@ -1,6 +1,5 @@
1
  {
2
  "_name_or_path": "gyulukeyi/nallm-bart",
3
- // "_name_or_path": "gogamza/kobart-base-v2",
4
  "activation_dropout": 0.0,
5
  "activation_function": "gelu",
6
  "add_bias_logits": false,
@@ -10,7 +9,6 @@
10
  ],
11
  "attention_dropout": 0.0,
12
  "author": "Gyu-min Lee (his.nigel at gmail dot com)",
13
- // "author": "Heewon Jeon(madjakarta@gmail.com)",
14
  "bos_token_id": 1,
15
  "classif_dropout": 0.1,
16
  "classifier_dropout": 0.1,
@@ -31,17 +29,9 @@
31
  "force_bos_token_to_be_generated": false,
32
  "forced_eos_token_id": 1,
33
  "gradient_checkpointing": false,
34
- // "id2label": {
35
- // "0": "NEGATIVE",
36
- // "1": "POSITIVE"
37
- // },
38
  "init_std": 0.02,
39
  "is_encoder_decoder": true,
40
  "kobart_version": 2.0,
41
- // "label2id": {
42
- // "NEGATIVE": 0,
43
- // "POSITIVE": 1
44
- //},
45
  "max_position_embeddings": 1026,
46
  "model_type": "bart",
47
  "normalize_before": false,
 
1
  {
2
  "_name_or_path": "gyulukeyi/nallm-bart",
 
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "add_bias_logits": false,
 
9
  ],
10
  "attention_dropout": 0.0,
11
  "author": "Gyu-min Lee (his.nigel at gmail dot com)",
 
12
  "bos_token_id": 1,
13
  "classif_dropout": 0.1,
14
  "classifier_dropout": 0.1,
 
29
  "force_bos_token_to_be_generated": false,
30
  "forced_eos_token_id": 1,
31
  "gradient_checkpointing": false,
 
 
 
 
32
  "init_std": 0.02,
33
  "is_encoder_decoder": true,
34
  "kobart_version": 2.0,
 
 
 
 
35
  "max_position_embeddings": 1026,
36
  "model_type": "bart",
37
  "normalize_before": false,