josh-oo commited on
Commit
fa840f3
1 Parent(s): fcca31e

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -100
config.json CHANGED
@@ -99,106 +99,7 @@
99
  "vocab_size": 50265
100
  },
101
  "decoder_start_token_id": 0,
102
- "encoder": {
103
- "_name_or_path": "facebook/mbart-large-cc25",
104
- "_num_labels": 3,
105
- "activation_dropout": 0.0,
106
- "activation_function": "gelu",
107
- "add_bias_logits": false,
108
- "add_cross_attention": false,
109
- "add_final_layer_norm": true,
110
- "architectures": [
111
- "MBartEncoder"
112
- ],
113
- "attention_dropout": 0.0,
114
- "bad_words_ids": null,
115
- "begin_suppress_tokens": null,
116
- "bos_token_id": 0,
117
- "chunk_size_feed_forward": 0,
118
- "classif_dropout": 0.0,
119
- "classifier_dropout": 0.0,
120
- "cross_attention_hidden_size": null,
121
- "d_model": 1024,
122
- "decoder_attention_heads": 16,
123
- "decoder_ffn_dim": 4096,
124
- "decoder_layerdrop": 0.0,
125
- "decoder_layers": 12,
126
- "decoder_start_token_id": null,
127
- "diversity_penalty": 0.0,
128
- "do_sample": false,
129
- "dropout": 0.1,
130
- "early_stopping": false,
131
- "encoder_attention_heads": 16,
132
- "encoder_ffn_dim": 4096,
133
- "encoder_layerdrop": 0.0,
134
- "encoder_layers": 12,
135
- "encoder_no_repeat_ngram_size": 0,
136
- "eos_token_id": 2,
137
- "exponential_decay_length_penalty": null,
138
- "finetuning_task": null,
139
- "forced_bos_token_id": null,
140
- "forced_eos_token_id": 2,
141
- "id2label": {
142
- "0": "LABEL_0",
143
- "1": "LABEL_1",
144
- "2": "LABEL_2"
145
- },
146
- "init_std": 0.02,
147
- "is_decoder": false,
148
- "is_encoder_decoder": true,
149
- "label2id": {
150
- "LABEL_0": 0,
151
- "LABEL_1": 1,
152
- "LABEL_2": 2
153
- },
154
- "length_penalty": 1.0,
155
- "max_length": 1024,
156
- "max_position_embeddings": 1024,
157
- "min_length": 0,
158
- "model_type": "mbart",
159
- "no_repeat_ngram_size": 0,
160
- "normalize_before": true,
161
- "normalize_embedding": true,
162
- "num_beam_groups": 1,
163
- "num_beams": 5,
164
- "num_hidden_layers": 12,
165
- "num_return_sequences": 1,
166
- "output_attentions": false,
167
- "output_hidden_states": false,
168
- "output_past": true,
169
- "output_scores": false,
170
- "pad_token_id": 1,
171
- "prefix": null,
172
- "problem_type": null,
173
- "pruned_heads": {},
174
- "remove_invalid_values": false,
175
- "repetition_penalty": 1.0,
176
- "return_dict": true,
177
- "return_dict_in_generate": false,
178
- "scale_embedding": true,
179
- "sep_token_id": null,
180
- "static_position_embeddings": false,
181
- "suppress_tokens": null,
182
- "task_specific_params": {
183
- "translation_en_to_ro": {
184
- "decoder_start_token_id": 250020
185
- }
186
- },
187
- "temperature": 1.0,
188
- "tf_legacy_loss": false,
189
- "tie_encoder_decoder": false,
190
- "tie_word_embeddings": true,
191
- "tokenizer_class": null,
192
- "top_k": 50,
193
- "top_p": 1.0,
194
- "torch_dtype": null,
195
- "torchscript": false,
196
- "transformers_version": "4.25.1",
197
- "typical_p": 1.0,
198
- "use_bfloat16": false,
199
- "use_cache": true,
200
- "vocab_size": 250027
201
- },
202
  "eos_token_id": 2,
203
  "is_encoder_decoder": true,
204
  "max_length": 1024,
 
99
  "vocab_size": 50265
100
  },
101
  "decoder_start_token_id": 0,
102
+ "encoder": {},
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
103
  "eos_token_id": 2,
104
  "is_encoder_decoder": true,
105
  "max_length": 1024,