my_awesome_model / config.json
ElDestructo's picture
Training in progress, epoch 1
d2c43f2 verified
raw
history blame
1.38 kB
{
"_name_or_path": "distilbert-base-uncased",
"activation": "gelu",
"architectures": [
"DistilBertForSequenceClassification"
],
"attention_dropout": 0.1,
"dim": 768,
"dropout": 0.1,
"hidden_dim": 3072,
"id2label": {
"0": "faulty generalization",
"1": "false causality",
"2": "circular reasoning",
"3": "ad populum",
"4": "ad hominem",
"5": "fallacy of logic",
"6": "appeal to emotion",
"7": "false dilemma",
"8": "equivocation",
"9": "fallacy of extension",
"10": "fallacy of relevance",
"11": "fallacy of credibility",
"12": "intentional"
},
"initializer_range": 0.02,
"label2id": {
"ad hominem": 4,
"ad populum": 3,
"appeal to emotion": 6,
"circular reasoning": 2,
"equivocation": 8,
"fallacy of credibility": 11,
"fallacy of extension": 9,
"fallacy of logic": 5,
"fallacy of relevance": 10,
"false causality": 1,
"false dilemma": 7,
"faulty generalization": 0,
"intentional": 12
},
"max_position_embeddings": 512,
"model_type": "distilbert",
"n_heads": 12,
"n_layers": 6,
"pad_token_id": 0,
"problem_type": "single_label_classification",
"qa_dropout": 0.1,
"seq_classif_dropout": 0.2,
"sinusoidal_pos_embds": false,
"tie_weights_": true,
"torch_dtype": "float32",
"transformers_version": "4.35.2",
"vocab_size": 30522
}