esuriddick commited on
Commit
4b6bccc
1 Parent(s): 47c92ce

Tried to clean the config file to avoid errors in Python.

Browse files
Files changed (1) hide show
  1. config.json +2 -8
config.json CHANGED
@@ -30,6 +30,7 @@
30
  "encoder_layerdrop": 0.0,
31
  "encoder_layers": 6,
32
  "eos_token_id": 2,
 
33
  "id2label": {
34
  "0": "LABEL_0",
35
  "1": "LABEL_1",
@@ -42,16 +43,9 @@
42
  "LABEL_1": 1,
43
  "LABEL_2": 2
44
  },
45
- "length_penalty": 2,
46
  "max_decoder_position_embeddings": 1024,
47
  "max_encoder_position_embeddings": 16384,
48
- "max_length": 1024,
49
- "max_new_tokens": 1024,
50
- "min_length": 100,
51
- "min_new_tokens": 100,
52
- "model_type": "led",
53
- "no_repeat_ngram_size": 3,
54
- "num_beams": 4,
55
  "num_hidden_layers": 6,
56
  "pad_token_id": 1,
57
  "torch_dtype": "float32",
 
30
  "encoder_layerdrop": 0.0,
31
  "encoder_layers": 6,
32
  "eos_token_id": 2,
33
+ "gradient_checkpointing": false,
34
  "id2label": {
35
  "0": "LABEL_0",
36
  "1": "LABEL_1",
 
43
  "LABEL_1": 1,
44
  "LABEL_2": 2
45
  },
 
46
  "max_decoder_position_embeddings": 1024,
47
  "max_encoder_position_embeddings": 16384,
48
+ "model_type": "led",
 
 
 
 
 
 
49
  "num_hidden_layers": 6,
50
  "pad_token_id": 1,
51
  "torch_dtype": "float32",