{ "act_dim": 3, "action_tanh": true, "activation_function": "relu", "adv_act_dim": 3, "architectures": [ "VanillaDT" ], "attn_pdrop": 0.1, "bos_token_id": 50256, "context_size": 20, "embd_pdrop": 0.1, "eos_token_id": 50256, "flag": false, "hidden_size": 128, "initializer_range": 0.02, "lambda1": 1.0, "lambda2": 1.0, "layer_norm_epsilon": 1e-05, "log_interval_steps": 100, "max_ep_len": 1000, "max_ep_return": 3223, "max_obs_len": 1000, "max_obs_return": 3223, "min_ep_return": 2.7032015323638916, "min_obs_return": 2.7032015323638916, "model_type": "decision_transformer", "n_head": 1, "n_inner": null, "n_layer": 3, "n_positions": 1024, "pr_act_dim": 3, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "returns_scale": 1000, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "state_dim": 11, "state_mean": [ 1.3106526835537706, -0.08469277108602184, -0.5382069126657438, -0.0720346361434995, 0.04932667926114204, 2.1067770148789027, -0.15017017617998743, 0.008783139314902764, -0.2848213106799807, -0.18540193496296434, -0.28461201593116625 ], "state_std": [ 0.17792387447055596, 0.05445541724664148, 0.2130354611105368, 0.14523059124520427, 0.6124522430667698, 0.8519473510741823, 1.4517896067649523, 0.6753408994067926, 1.5365847791743765, 1.6180626583607793, 5.6093664355155495 ], "torch_dtype": "float32", "total_train_steps": 10000, "transformers_version": "4.31.0", "use_cache": true, "vocab_size": 1, "warmup_steps": 1000 }