|
{ |
|
"_name_or_path": "microsoft/llmlingua-2-xlm-roberta-large-meetingbank", |
|
"architectures": [ |
|
"XLMRobertaForTokenClassification" |
|
], |
|
"attention_probs_dropout_prob": 0.1, |
|
"bos_token_id": 0, |
|
"classifier_dropout": null, |
|
"eos_token_id": 2, |
|
"hidden_act": "gelu", |
|
"hidden_dropout_prob": 0.1, |
|
"hidden_size": 1024, |
|
"id2label": { |
|
"0": "O", |
|
"1": "B-Repetition", |
|
"2": "B-Loaded_Language", |
|
"3": "B-Name_Calling,Labeling", |
|
"4": "B-Flag-Waving", |
|
"5": "I-Flag-Waving", |
|
"6": "B-Causal_Oversimplification", |
|
"7": "I-Causal_Oversimplification", |
|
"8": "B-Doubt", |
|
"9": "I-Doubt", |
|
"10": "I-Loaded_Language", |
|
"11": "B-Appeal_to_Authority", |
|
"12": "I-Appeal_to_Authority", |
|
"13": "I-Name_Calling,Labeling", |
|
"14": "B-Appeal_to_fear-prejudice", |
|
"15": "I-Appeal_to_fear-prejudice", |
|
"16": "I-Repetition", |
|
"17": "B-Black-and-White_Fallacy", |
|
"18": "I-Black-and-White_Fallacy", |
|
"19": "B-Exaggeration,Minimisation", |
|
"20": "I-Exaggeration,Minimisation" |
|
}, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 4096, |
|
"label2id": { |
|
"B-Appeal_to_Authority": 11, |
|
"B-Appeal_to_fear-prejudice": 14, |
|
"B-Black-and-White_Fallacy": 17, |
|
"B-Causal_Oversimplification": 6, |
|
"B-Doubt": 8, |
|
"B-Exaggeration,Minimisation": 19, |
|
"B-Flag-Waving": 4, |
|
"B-Loaded_Language": 2, |
|
"B-Name_Calling,Labeling": 3, |
|
"B-Repetition": 1, |
|
"I-Appeal_to_Authority": 12, |
|
"I-Appeal_to_fear-prejudice": 15, |
|
"I-Black-and-White_Fallacy": 18, |
|
"I-Causal_Oversimplification": 7, |
|
"I-Doubt": 9, |
|
"I-Exaggeration,Minimisation": 20, |
|
"I-Flag-Waving": 5, |
|
"I-Loaded_Language": 10, |
|
"I-Name_Calling,Labeling": 13, |
|
"I-Repetition": 16, |
|
"O": 0 |
|
}, |
|
"layer_norm_eps": 1e-05, |
|
"max_position_embeddings": 514, |
|
"model_type": "xlm-roberta", |
|
"num_attention_heads": 16, |
|
"num_hidden_layers": 24, |
|
"output_past": true, |
|
"pad_token_id": 1, |
|
"position_embedding_type": "absolute", |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.30.0", |
|
"type_vocab_size": 1, |
|
"use_cache": true, |
|
"vocab_size": 250102 |
|
} |
|
|