johngiorgi commited on
Commit
6d66d93
1 Parent(s): f133a47

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +7 -4
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "activation_dropout": 0.0,
3
  "activation_function": "gelu",
4
  "architectures": [
@@ -41,13 +42,15 @@
41
  "LABEL_1": 1,
42
  "LABEL_2": 2
43
  },
44
- "max_decoder_position_embeddings": 1026,
45
- "max_encoder_position_embeddings": 16386,
46
  "model_type": "led",
 
 
47
  "num_hidden_layers": 6,
48
  "pad_token_id": 1,
49
  "torch_dtype": "float32",
50
- "transformers_version": "4.20.1",
51
  "use_cache": true,
52
- "vocab_size": 50278
53
  }
1
  {
2
+ "_name_or_path": "allenai/led-base-16384",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "architectures": [
42
  "LABEL_1": 1,
43
  "LABEL_2": 2
44
  },
45
+ "max_decoder_position_embeddings": 1024,
46
+ "max_encoder_position_embeddings": 16384,
47
  "model_type": "led",
48
+ "no_repeat_ngram_size": 3,
49
+ "num_beams": 2,
50
  "num_hidden_layers": 6,
51
  "pad_token_id": 1,
52
  "torch_dtype": "float32",
53
+ "transformers_version": "4.21.0.dev0",
54
  "use_cache": true,
55
+ "vocab_size": 50265
56
  }