{ "_name_or_path": "alexyalunin/RuBioRoBERTa", "architectures": [ "RobertaForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "classifier_dropout": null, "eos_token_id": 2, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 1024, "id2label": { "0": "ACTIVITY", "1": "ADMINISTRATION_ROUTE", "2": "ANATOMY", "3": "CHEM", "4": "DEVICE", "5": "DISO", "6": "FINDING", "7": "FOOD", "8": "GENE", "9": "INJURY_POISONING", "10": "HEALTH_CARE_ACTIVITY", "11": "LABPROC", "12": "LIVB", "13": "MEDPROC", "14": "MENTALPROC", "15": "PHYS", "16": "SCIPROC", "17": "AGE", "18": "CITY", "19": "COUNTRY", "20": "DATE", "21": "DISTRICT", "22": "EVENT", "23": "FAMILY", "24": "FACILITY", "25": "LOCATION", "26": "MONEY", "27": "NATIONALITY", "28": "NUMBER", "29": "ORDINAL", "30": "ORGANIZATION", "31": "PERCENT", "32": "PERSON", "33": "PRODUCT", "34": "PROFESSION", "35": "STATE_OR_PROVINCE", "36": "TIME", "37": "AWARD", "38": "CRIME", "39": "IDEOLOGY", "40": "LANGUAGE", "41": "LAW", "42": "PENALTY", "43": "RELIGION", "44": "WORK_OF_ART" }, "initializer_range": 0.02, "intermediate_size": 4096, "label2id": { "ACTIVITY": 0, "ADMINISTRATION_ROUTE": 1, "AGE": 17, "ANATOMY": 2, "AWARD": 37, "CHEM": 3, "CITY": 18, "COUNTRY": 19, "CRIME": 38, "DATE": 20, "DEVICE": 4, "DISO": 5, "DISTRICT": 21, "EVENT": 22, "FACILITY": 24, "FAMILY": 23, "FINDING": 6, "FOOD": 7, "GENE": 8, "HEALTH_CARE_ACTIVITY": 10, "IDEOLOGY": 39, "INJURY_POISONING": 9, "LABPROC": 11, "LANGUAGE": 40, "LAW": 41, "LIVB": 12, "LOCATION": 25, "MEDPROC": 13, "MENTALPROC": 14, "MONEY": 26, "NATIONALITY": 27, "NUMBER": 28, "ORDINAL": 29, "ORGANIZATION": 30, "PENALTY": 42, "PERCENT": 31, "PERSON": 32, "PHYS": 15, "PRODUCT": 33, "PROFESSION": 34, "RELIGION": 43, "SCIPROC": 16, "STATE_OR_PROVINCE": 35, "TIME": 36, "WORK_OF_ART": 44 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "roberta", "num_attention_heads": 16, "num_hidden_layers": 24, "pad_token_id": 1, "position_embedding_type": "absolute", "torch_dtype": "float32", "transformers_version": "4.38.2", "type_vocab_size": 1, "use_cache": true, "vocab_size": 50265 }