File size: 2,376 Bytes
7befcee |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 |
{
"_name_or_path": "distilbert/distilbert-base-uncased",
"activation": "gelu",
"architectures": [
"DistilBertForSequenceClassification"
],
"attention_dropout": 0.1,
"dim": 768,
"dropout": 0.1,
"hidden_dim": 3072,
"id2label": {
"0.0": "addiction",
"1.0": "anger-management",
"2.0": "anxiety",
"3.0": "behavioral-change",
"4.0": "children-adolescents",
"5.0": "counseling-fundamentals",
"6.0": "depression",
"7.0": "diagnosis",
"8.0": "domestic-violence",
"9.0": "eating-disorders",
"10.0": "family-conflict",
"11.0": "grief-and-loss",
"12.0": "human-sexuality",
"13.0": "intimacy",
"14.0": "legal-regulatory",
"15.0": "lgbtq",
"16.0": "marriage",
"17.0": "military-issues",
"18.0": "parenting",
"19.0": "professional-ethics",
"20.0": "relationship-dissolution",
"21.0": "relationships",
"22.0": "self-esteem",
"23.0": "sleep-improvement",
"24.0": "social-relationships",
"25.0": "spirituality",
"26.0": "stress",
"27.0": "substance-abuse",
"28.0": "trauma",
"29.0": "workplace-relationships"
},
"initializer_range": 0.02,
"label2id": {
"addiction": 0.0,
"anger-management": 1.0,
"anxiety": 2.0,
"behavioral-change": 3.0,
"children-adolescents": 4.0,
"counseling-fundamentals": 5.0,
"depression": 6.0,
"diagnosis": 7.0,
"domestic-violence": 8.0,
"eating-disorders": 9.0,
"family-conflict": 10.0,
"grief-and-loss": 11.0,
"human-sexuality": 12.0,
"intimacy": 13.0,
"legal-regulatory": 14.0,
"lgbtq": 15.0,
"marriage": 16.0,
"military-issues": 17.0,
"parenting": 18.0,
"professional-ethics": 19.0,
"relationship-dissolution": 20.0,
"relationships": 21.0,
"self-esteem": 22.0,
"sleep-improvement": 23.0,
"social-relationships": 24.0,
"spirituality": 25.0,
"stress": 26.0,
"substance-abuse": 27.0,
"trauma": 28.0,
"workplace-relationships": 29.0
},
"max_position_embeddings": 512,
"model_type": "distilbert",
"n_heads": 12,
"n_layers": 6,
"pad_token_id": 0,
"problem_type": "single_label_classification",
"qa_dropout": 0.1,
"seq_classif_dropout": 0.2,
"sinusoidal_pos_embds": false,
"tie_weights_": true,
"torch_dtype": "float32",
"transformers_version": "4.42.4",
"vocab_size": 30522
}
|