{ "activation": "relu", "d_model": 1024, "dim_feedforward": 2048, "dropout": 0.1, "lr0": 7.5e-05, "nhead": 4, "num_layers": 2, "out_dim": 512, "transformers_version": "4.40.1", "warmup_steps": 300 }