{ "_name_or_path": "aubmindlab/bert-base-arabert", "architectures": [ "BertForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "classifier_dropout": null, "directionality": "bidi", "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "0", "1": "Loaded_Language", "10": "Appeal_to_Authority", "11": "Repetition", "12": "False_Dilemma-No_Choice", "13": "Flag_Waving", "14": "Causal_Oversimplification", "15": "Whataboutism", "16": "Conversation_Killer", "17": "Appeal_to_Hypocrisy", "18": "Straw_Man", "19": "Appeal_to_Time", "2": "Name_Calling-Labeling", "20": "Appeal_to_Popularity", "21": "Consequential_Oversimplification", "22": "Guilt_by_Association", "23": "Red_Herring", "3": "Questioning_the_Reputation", "4": "Appeal_to_Fear-Prejudice", "5": "Exaggeration-Minimisation", "6": "Appeal_to_Values", "7": "Obfuscation-Vagueness-Confusion", "8": "Doubt", "9": "Slogans" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "0": "0", "Appeal_to_Authority": "10", "Appeal_to_Fear-Prejudice": "4", "Appeal_to_Hypocrisy": "17", "Appeal_to_Popularity": "20", "Appeal_to_Time": "19", "Appeal_to_Values": "6", "Causal_Oversimplification": "14", "Consequential_Oversimplification": "21", "Conversation_Killer": "16", "Doubt": "8", "Exaggeration-Minimisation": "5", "False_Dilemma-No_Choice": "12", "Flag_Waving": "13", "Guilt_by_Association": "22", "Loaded_Language": "1", "Name_Calling-Labeling": "2", "Obfuscation-Vagueness-Confusion": "7", "Questioning_the_Reputation": "3", "Red_Herring": "23", "Repetition": "11", "Slogans": "9", "Straw_Man": "18", "Whataboutism": "15" }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "bert", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 0, "pooler_fc_size": 768, "pooler_num_attention_heads": 12, "pooler_num_fc_layers": 3, "pooler_size_per_head": 128, "pooler_type": "first_token_transform", "position_embedding_type": "absolute", "torch_dtype": "float32", "transformers_version": "4.30.2", "type_vocab_size": 2, "use_cache": true, "vocab_size": 64000 }