t5-english-ner / config.json
imvladikon's picture
Update config.json
3477301
{
"_name_or_path": "t5-large",
"architectures": [
"EncT5ForTokenClassification"
],
"auto_map": {
"AutoConfig": "configuration_enc_t5.EncT5Config",
"AutoModelForTokenClassification": "modeling_enc_t5.EncT5ForTokenClassification"
},
"tokenizer_class": "configuration_enc_t5.EncT5Tokenizer",
"d_ff": 4096,
"d_kv": 64,
"d_model": 1024,
"decoder_start_token_id": 0,
"dense_act_fn": "relu",
"dropout_rate": 0.1,
"eos_token_id": 1,
"feed_forward_proj": "relu",
"finetuning_task": "ner",
"id2label": {
"0": "O",
"1": "B-PERSON",
"2": "I-PERSON",
"3": "B-ORGANIZATION",
"4": "I-ORGANIZATION",
"5": "B-LOCATION",
"6": "I-LOCATION",
"7": "B-TITLE",
"8": "I-TITLE",
"9": "B-EVENT",
"10": "I-EVENT",
"11": "B-QUANTITY",
"12": "I-QUANTITY",
"13": "B-DATE",
"14": "I-DATE",
"15": "B-COMMERCIAL_ITEM",
"16": "I-COMMERCIAL_ITEM",
"17": "B-OTHER",
"18": "I-OTHER"
},
"initializer_factor": 1.0,
"is_encoder_decoder": true,
"is_gated_act": false,
"label2id": {
"B-COMMERCIAL_ITEM": 15,
"B-DATE": 13,
"B-EVENT": 9,
"B-LOCATION": 5,
"B-ORGANIZATION": 3,
"B-OTHER": 17,
"B-PERSON": 1,
"B-QUANTITY": 11,
"B-TITLE": 7,
"I-COMMERCIAL_ITEM": 16,
"I-DATE": 14,
"I-EVENT": 10,
"I-LOCATION": 6,
"I-ORGANIZATION": 4,
"I-OTHER": 18,
"I-PERSON": 2,
"I-QUANTITY": 12,
"I-TITLE": 8,
"O": 0
},
"layer_norm_epsilon": 1e-06,
"model_type": "t5",
"n_positions": 512,
"num_decoder_layers": 24,
"num_heads": 16,
"num_layers": 24,
"output_past": true,
"pad_token_id": 0,
"relative_attention_max_distance": 128,
"relative_attention_num_buckets": 32,
"task_specific_params": {
"summarization": {
"early_stopping": true,
"length_penalty": 2.0,
"max_length": 200,
"min_length": 30,
"no_repeat_ngram_size": 3,
"num_beams": 4,
"prefix": "summarize: "
},
"translation_en_to_de": {
"early_stopping": true,
"max_length": 300,
"num_beams": 4,
"prefix": "translate English to German: "
},
"translation_en_to_fr": {
"early_stopping": true,
"max_length": 300,
"num_beams": 4,
"prefix": "translate English to French: "
},
"translation_en_to_ro": {
"early_stopping": true,
"max_length": 300,
"num_beams": 4,
"prefix": "translate English to Romanian: "
}
},
"torch_dtype": "float32",
"transformers_version": "4.21.1",
"use_cache": true,
"vocab_size": 32128
}