{ "_name_or_path": "aubmindlab/bert-base-arabert", "architectures": [ "BertForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "classifier_dropout": null, "directionality": "bidi", "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "Appeal_to_Fear_Prejudice", "1": "0", "10": "Slogans", "11": "Appeal_to_Authority", "12": "Repetition", "13": "Causal_Oversimplification", "14": "False_Dilemma_No_Choice", "15": "Whataboutism", "16": "Conversation_Killer", "17": "Straw_Man", "18": "Appeal_to_Hypocrisy", "19": "Appeal_to_Popularity", "2": "Loaded_Language", "20": "Guilt_by_Association", "21": "Red_Herring", "22": "Appeal_to_Time", "23": "Consequential_Oversimplification", "3": "Name_Calling_Labeling", "4": "Questioning_the_Reputation", "5": "Exaggeration_Minimisation", "6": "Appeal_to_Values", "7": "Flag_Waving", "8": "Doubt", "9": "Obfuscation_Vagueness_Confusion" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "0": "1", "Appeal_to_Authority": "11", "Appeal_to_Fear_Prejudice": "0", "Appeal_to_Hypocrisy": "18", "Appeal_to_Popularity": "19", "Appeal_to_Time": "22", "Appeal_to_Values": "6", "Causal_Oversimplification": "13", "Consequential_Oversimplification": "23", "Conversation_Killer": "16", "Doubt": "8", "Exaggeration_Minimisation": "5", "False_Dilemma_No_Choice": "14", "Flag_Waving": "7", "Guilt_by_Association": "20", "Loaded_Language": "2", "Name_Calling_Labeling": "3", "Obfuscation_Vagueness_Confusion": "9", "Questioning_the_Reputation": "4", "Red_Herring": "21", "Repetition": "12", "Slogans": "10", "Straw_Man": "17", "Whataboutism": "15" }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "bert", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 0, "pooler_fc_size": 768, "pooler_num_attention_heads": 12, "pooler_num_fc_layers": 3, "pooler_size_per_head": 128, "pooler_type": "first_token_transform", "position_embedding_type": "absolute", "torch_dtype": "float32", "transformers_version": "4.30.2", "type_vocab_size": 2, "use_cache": true, "vocab_size": 64000 }