GuillemGSubies commited on
Commit
32278fd
1 Parent(s): fb635c8

Upload the model

Browse files
Files changed (2) hide show
  1. config.json +127 -0
  2. pytorch_model.bin +3 -0
config.json ADDED
@@ -0,0 +1,127 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "microsoft/mdeberta-v3-base-16-4e-05-0.2-meddocan_best",
3
+ "architectures": [
4
+ "DebertaV2ForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "hidden_act": "gelu",
8
+ "hidden_dropout_prob": 0.1,
9
+ "hidden_size": 768,
10
+ "id2label": {
11
+ "0": "LABEL_0",
12
+ "1": "LABEL_1",
13
+ "2": "LABEL_2",
14
+ "3": "LABEL_3",
15
+ "4": "LABEL_4",
16
+ "5": "LABEL_5",
17
+ "6": "LABEL_6",
18
+ "7": "LABEL_7",
19
+ "8": "LABEL_8",
20
+ "9": "LABEL_9",
21
+ "10": "LABEL_10",
22
+ "11": "LABEL_11",
23
+ "12": "LABEL_12",
24
+ "13": "LABEL_13",
25
+ "14": "LABEL_14",
26
+ "15": "LABEL_15",
27
+ "16": "LABEL_16",
28
+ "17": "LABEL_17",
29
+ "18": "LABEL_18",
30
+ "19": "LABEL_19",
31
+ "20": "LABEL_20",
32
+ "21": "LABEL_21",
33
+ "22": "LABEL_22",
34
+ "23": "LABEL_23",
35
+ "24": "LABEL_24",
36
+ "25": "LABEL_25",
37
+ "26": "LABEL_26",
38
+ "27": "LABEL_27",
39
+ "28": "LABEL_28",
40
+ "29": "LABEL_29",
41
+ "30": "LABEL_30",
42
+ "31": "LABEL_31",
43
+ "32": "LABEL_32",
44
+ "33": "LABEL_33",
45
+ "34": "LABEL_34",
46
+ "35": "LABEL_35",
47
+ "36": "LABEL_36",
48
+ "37": "LABEL_37",
49
+ "38": "LABEL_38",
50
+ "39": "LABEL_39",
51
+ "40": "LABEL_40",
52
+ "41": "LABEL_41",
53
+ "42": "LABEL_42",
54
+ "43": "LABEL_43"
55
+ },
56
+ "initializer_range": 0.02,
57
+ "intermediate_size": 3072,
58
+ "label2id": {
59
+ "B-CALLE": 0,
60
+ "B-CENTRO_SALUD": 2,
61
+ "B-CORREO_ELECTRONICO": 4,
62
+ "B-EDAD_SUJETO_ASISTENCIA": 6,
63
+ "B-FAMILIARES_SUJETO_ASISTENCIA": 8,
64
+ "B-FECHAS": 10,
65
+ "B-HOSPITAL": 12,
66
+ "B-ID_ASEGURAMIENTO": 14,
67
+ "B-ID_CONTACTO_ASISTENCIAL": 16,
68
+ "B-ID_EMPLEO_PERSONAL_SANITARIO": 17,
69
+ "B-ID_SUJETO_ASISTENCIA": 19,
70
+ "B-ID_TITULACION_PERSONAL_SANITARIO": 21,
71
+ "B-INSTITUCION": 23,
72
+ "B-NOMBRE_PERSONAL_SANITARIO": 25,
73
+ "B-NOMBRE_SUJETO_ASISTENCIA": 27,
74
+ "B-NUMERO_FAX": 29,
75
+ "B-NUMERO_TELEFONO": 31,
76
+ "B-OTROS_SUJETO_ASISTENCIA": 34,
77
+ "B-PAIS": 36,
78
+ "B-PROFESION": 38,
79
+ "B-SEXO_SUJETO_ASISTENCIA": 40,
80
+ "B-TERRITORIO": 42,
81
+ "I-CALLE": 1,
82
+ "I-CENTRO_SALUD": 3,
83
+ "I-CORREO_ELECTRONICO": 5,
84
+ "I-EDAD_SUJETO_ASISTENCIA": 7,
85
+ "I-FAMILIARES_SUJETO_ASISTENCIA": 9,
86
+ "I-FECHAS": 11,
87
+ "I-HOSPITAL": 13,
88
+ "I-ID_ASEGURAMIENTO": 15,
89
+ "I-ID_EMPLEO_PERSONAL_SANITARIO": 18,
90
+ "I-ID_SUJETO_ASISTENCIA": 20,
91
+ "I-ID_TITULACION_PERSONAL_SANITARIO": 22,
92
+ "I-INSTITUCION": 24,
93
+ "I-NOMBRE_PERSONAL_SANITARIO": 26,
94
+ "I-NOMBRE_SUJETO_ASISTENCIA": 28,
95
+ "I-NUMERO_FAX": 30,
96
+ "I-NUMERO_TELEFONO": 32,
97
+ "I-OTROS_SUJETO_ASISTENCIA": 35,
98
+ "I-PAIS": 37,
99
+ "I-PROFESION": 39,
100
+ "I-SEXO_SUJETO_ASISTENCIA": 41,
101
+ "I-TERRITORIO": 43,
102
+ "O": 33
103
+ },
104
+ "layer_norm_eps": 1e-07,
105
+ "max_position_embeddings": 512,
106
+ "max_relative_positions": -1,
107
+ "model_type": "deberta-v2",
108
+ "norm_rel_ebd": "layer_norm",
109
+ "num_attention_heads": 12,
110
+ "num_hidden_layers": 12,
111
+ "pad_token_id": 0,
112
+ "pooler_dropout": 0,
113
+ "pooler_hidden_act": "gelu",
114
+ "pooler_hidden_size": 768,
115
+ "pos_att_type": [
116
+ "p2c",
117
+ "c2p"
118
+ ],
119
+ "position_biased_input": false,
120
+ "position_buckets": 256,
121
+ "relative_attention": true,
122
+ "share_att_key": true,
123
+ "torch_dtype": "float32",
124
+ "transformers_version": "4.25.1",
125
+ "type_vocab_size": 0,
126
+ "vocab_size": 251000
127
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:39f8eb382e98448154fe622886116b6b5244e10d5c48e0940b74e0e91af1babd
3
+ size 1115443897