HiTZ
/

Text2Text Generation
Transformers
Safetensors
mt5
medical
multilingual
medic
Inference Endpoints
Medical-mT5-large-multitask / added_tokens.json
Iker's picture
Upload tokenizer
be5403e verified
raw
history blame
566 Bytes
{
"</Chemical>": 250117,
"</Claim>": 250114,
"</ClinicalEntity>": 250111,
"</Dis>": 250112,
"</Disease>": 250116,
"</DiseaseNCBI>": 250115,
"</NORMALIZABLES>": 250121,
"</NO_NORMALIZABLES>": 250119,
"</PROTEINAS>": 250118,
"</Premise>": 250113,
"</UNCLEAR>": 250120,
"<Chemical>": 250106,
"<Claim>": 250103,
"<ClinicalEntity>": 250100,
"<Dis>": 250101,
"<Disease>": 250105,
"<DiseaseNCBI>": 250104,
"<NORMALIZABLES>": 250110,
"<NO_NORMALIZABLES>": 250108,
"<PROTEINAS>": 250107,
"<Premise>": 250102,
"<UNCLEAR>": 250109
}