ddpg-AntBulletEnv-v0 / config.yml
araffin's picture
Initial commit
1816d88
raw
history blame contribute delete
482 Bytes
!!python/object/apply:collections.OrderedDict
- - - buffer_size
- 200000
- - env_wrapper
- sb3_contrib.common.wrappers.TimeFeatureWrapper
- - gamma
- 0.98
- - gradient_steps
- -1
- - learning_rate
- 0.0007
- - learning_starts
- 10000
- - n_timesteps
- 1000000.0
- - noise_std
- 0.1
- - noise_type
- normal
- - policy
- MlpPolicy
- - policy_kwargs
- dict(net_arch=[400, 300])
- - train_freq
- - 1
- episode