{ "_commit_hash": null, "activation": "relu", "d_model": 1024, "dim_feedforward": 2048, "dropout": 0.1, "lr0": 5e-05, "nhead": 4, "num_layers": 2, "out_dim": 512, "transformers_version": null, "warmup_steps": 400 }