{ "model_name": "PlanTL-GOB-ES/roberta-base-bne", "max_seq_length": 512, "epochs": 10, "warmup_steps": 1000, "batch_size": 16, "optimizer_params": { "lr": 2e-05 }, "loss": "tl", "dataset_train_size": 500000, "dataset_name": "dariolopez/ms-marco-es-500k", "seed": 42, "length_embedding": 768 }