Upload PPO LunarLander-v2 trained agent
Browse files- README.md +1 -1
- config.json +1 -1
- ppo-LunarLander-v2.zip +2 -2
- ppo-LunarLander-v2/data +29 -29
- ppo-LunarLander-v2/policy.optimizer.pth +2 -2
- ppo-LunarLander-v2/policy.pth +2 -2
- ppo-LunarLander-v2/system_info.txt +1 -1
- replay.mp4 +0 -0
- results.json +1 -1
README.md
CHANGED
@@ -16,7 +16,7 @@ model-index:
|
|
16 |
type: LunarLander-v2
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
-
value:
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
|
|
16 |
type: LunarLander-v2
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
+
value: 276.96 +/- 17.59
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==", "__module__": "stable_baselines3.common.policies", "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function ActorCriticPolicy.__init__ at 0x7f15f58c5f30>", "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7f15f58c5fc0>", "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7f15f58c6050>", "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7f15f58c60e0>", "_build": "<function ActorCriticPolicy._build at 0x7f15f58c6170>", "forward": "<function ActorCriticPolicy.forward at 0x7f15f58c6200>", "extract_features": "<function ActorCriticPolicy.extract_features at 0x7f15f58c6290>", "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7f15f58c6320>", "_predict": "<function ActorCriticPolicy._predict at 0x7f15f58c63b0>", "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7f15f58c6440>", "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7f15f58c64d0>", "predict_values": "<function ActorCriticPolicy.predict_values at 0x7f15f58c6560>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7f15f5a68d40>"}, "verbose": 0, "policy_kwargs": {}, "num_timesteps": 1015808, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": 123456, "action_noise": null, "start_time": 1723302314568566763, "learning_rate": 0.0001, "tensorboard_log": null, "_last_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYAAgAAAAAAAKAJUT5uJbC8psXGOsIeKrmm/Rq+kkj+uQAAgD8AAIA/CpxXvkrHETxXypY8w6OIPLPr9b1MUog9AACAPwAAgD+NrvM9dJcuP+rLTD07zau+axIRPXQvDL0AAAAAAAAAAPOzkj0cMHs/kyAQPbIYAb/w1CY9/pnnvAAAAAAAAAAAxhNTPkHG97xLCIo7QHoqujbXW77TO/q6AACAPwAAgD9mkac8GZ1bP3IGhzz+f8a+s/m0O41iSjwAAAAAAAAAAACxaD4WSAE94vYuO1QxEjozG5I+FhaGugAAgD8AAIA/rZOOPvHeKzyw9346hzt/OFFMvT3FsJe5AACAPwAAgD/GPiE+z6kUvFhIcT0w4qO76l99vamKiLwAAIA/AACAP4YsOL6IsY096T6dPanoHr6u6AC9LJUPPQAAAAAAAAAAwI7LPa4hkroOezU1qWpgMFPzFzqdHki0AAAAAAAAgD+aVMI+w8DqPjViyjw68MW+RG+yPXQDCLwAAAAAAAAAAI1ei73/SWc/umoEvspU2b5R7oi9kwV1uwAAAAAAAAAA+qetPj9QPj+KOTg+nCEAv9oPHT6WOH+9AAAAAAAAAABADcy9VYwTP6a1+ztgsMa+23uQu90W/LwAAAAAAAAAACD4I76kWH+7wpp3tc9997LmZ7I8pq67NAAAgD8AAIA/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksQSwiGlIwBQ5R0lFKULg=="}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVgwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSxCFlIwBQ5R0lFKULg=="}, "_last_original_obs": null, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": -0.015808000000000044, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVJAwAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQHEAGrn1WbSMAWyUTQQBjAF0lEdArFkQqZtvXXV9lChoBkdAbxBrrPdEcGgHS/5oCEdArFkZ+az/qHV9lChoBkdAJjCWu5jH42gHS7NoCEdArFpavcJtznV9lChoBkdAbK8J79hqkGgHTQwBaAhHQKxbJTXJ5mh1fZQoaAZHQG6+Jrcj7hxoB00PAWgIR0CsW7ZLAYYSdX2UKGgGR0BwGPNX5nDjaAdNPAFoCEdArFyvgxagVXV9lChoBkdAbMdL5AQg92gHS/doCEdArFywGSpzcXV9lChoBkdAbEX3OfNA1WgHS/VoCEdArFzIBq9GqnV9lChoBkdAbKnNATqSo2gHS/xoCEdArF4M8xKxs3V9lChoBkdAcIkS1Vo6CGgHTSEBaAhHQKxfXYJ3PiV1fZQoaAZHQHBh/aL4vexoB00zAWgIR0CszYj0Dlo2dX2UKGgGR0ByGnR3NcGDaAdNOgFoCEdArM2lLrX18XV9lChoBkdAcaSEgW8AaWgHTRABaAhHQKzNwrMkhRt1fZQoaAZHQHDSR42S+xpoB00wAWgIR0CszlKKYRdydX2UKGgGR0BxMifAbhm5aAdNOwFoCEdArM6gQOFxn3V9lChoBkdAb6WdJaq0dGgHTQkBaAhHQKzO5MaCL/F1fZQoaAZHQFwhy6cy31BoB03oA2gIR0CszvsP8Q7LdX2UKGgGR0BwOMQtjCpFaAdL9GgIR0CszwiBoVVQdX2UKGgGR0Bwjz20zCUHaAdNBAFoCEdArNBCmqHXVnV9lChoBkdAbHPs8gZCOWgHTTYBaAhHQKzQgrcTJyR1fZQoaAZHQHEyh2fTTfBoB00LAWgIR0Cs0U4DcM3IdX2UKGgGR0BsvB9kSVW0aAdL/WgIR0Cs0kwKjSG8dX2UKGgGR0Bt2ol6Z6UraAdNLgFoCEdArNN83EQ5FXV9lChoBkdAcHf7gsK9f2gHTUgBaAhHQKzUNQCSzPd1fZQoaAZHQHCQa55JK8NoB00yAWgIR0Cs1MqQzUI+dX2UKGgGR0Bwf32PDHfeaAdNJwFoCEdArNULzqbBoHV9lChoBkdAX3O/UONHY2gHTegDaAhHQKzVqIBzV+Z1fZQoaAZHQHA1274BV+9oB01KAWgIR0Cs1bNjLB9DdX2UKGgGR0Bs/iudPLxJaAdL/WgIR0Cs1fjoIOYqdX2UKGgGR0Bv12OXE61caAdNXAFoCEdArNYmAf+0gXV9lChoBkdAcRa5Fw1iv2gHTR8BaAhHQKzWWKgIyCZ1fZQoaAZHQG5ofWtlqahoB00YAWgIR0Cs10/QSi/PdX2UKGgGR0BvwF1nuiN9aAdL+WgIR0Cs16udwvQGdX2UKGgGR0BbLubAk9lmaAdN6ANoCEdArNf1FDv3J3V9lChoBkdASEjyrgflqGgHS7loCEdArNk2mtQsPXV9lChoBkdAbtlmzSkTH2gHTQwBaAhHQKzZvFERaox1fZQoaAZHQHEnF5KODJ5oB0v4aAhHQKzaBHyVfNR1fZQoaAZHQHFMAG4ZuQ9oB00UAWgIR0Cs2mGITGo8dX2UKGgGR0BwNEjTrmheaAdL92gIR0Cs2o/giu+zdX2UKGgGR0Bxo9CmdiDvaAdNfgFoCEdArNt036yjYnV9lChoBkdAcJe2pAD7qWgHTSsBaAhHQKzb7hF3IMl1fZQoaAZHQHFhj7uUliVoB000AWgIR0Cs3IBun/DMdX2UKGgGR0Bw+tHvttygaAdL/WgIR0Cs3MTGPxQSdX2UKGgGR0BwZuVObiIdaAdNIwFoCEdArN0rcIqsl3V9lChoBkdAbCy5sCT2WmgHTTQBaAhHQKzerDKHO8l1fZQoaAZHQCGpLdvbXYloB0vCaAhHQKze2iV0Lc91fZQoaAZHQHDCaC+UQkJoB02vAWgIR0Cs34RhUipvdX2UKGgGR0BwxQO7QLNOaAdNEQFoCEdArN+SLMs6JnV9lChoBkdAcItOM2m52GgHS+loCEdArOA3x2B8QnV9lChoBkdAb9cqkuYhMmgHTRQBaAhHQKzgSwJw84h1fZQoaAZHQFloSkj5bhZoB03oA2gIR0Cs4GAV45cUdX2UKGgGR0Biqx0uDjBEaAdN6ANoCEdArOB4X668QXV9lChoBkdAbddTAnDziGgHTQ0BaAhHQKzgeYoiLVF1fZQoaAZHQB2Le67NB4VoB0vEaAhHQKzh6LmZE2J1fZQoaAZHQGBPHZbpu/FoB03oA2gIR0Cs4pH5zo2XdX2UKGgGR0Brqlhd+ocaaAdNEgFoCEdArOLCIFeOXHV9lChoBkdAYlnQKrq+rWgHTegDaAhHQKzkGw9q1w51fZQoaAZHQG+mV+I/JNloB00qAWgIR0Cs5BoxQBPsdX2UKGgGR0BwoFw84gieaAdL42gIR0Cs5FcmjTKDdX2UKGgGR0Bwdv4SHuZ1aAdNCwFoCEdArOUzi4rjHXV9lChoBkdAbYeQtBfKIWgHS/5oCEdArOXr6nBLwnV9lChoBkdAcKAVaOgg5mgHS/xoCEdArOXwnF5v+HV9lChoBkdAcIYpUgjhUGgHTSIBaAhHQKzmJsl9jPR1fZQoaAZHQHEQjXe3x4JoB00RAWgIR0Cs5mSnk1dgdX2UKGgGR0BwSzllsguAaAdNDgFoCEdArOZpuqFRHnV9lChoBkdAcEgtJWeYlmgHTVUBaAhHQKznGQUYbbV1fZQoaAZHQG/4al+EytVoB00rAWgIR0Cs6ODmCAc1dX2UKGgGR0Bv8/FFUhmoaAdL5GgIR0Cs6O/Zdv87dX2UKGgGR0BugqKpDNQkaAdNDAFoCEdArOmNW0Z3tHV9lChoBkdAbu3f642CNGgHTRcBaAhHQKzpyVeKKpF1fZQoaAZHQG5YuTRplBhoB0v2aAhHQKzrLlVcUud1fZQoaAZHQG5oEhq0tyxoB00MAWgIR0Cs67PBi1ArdX2UKGgGR0BwmgrH2h7FaAdNdwJoCEdArOvDiOvMbHV9lChoBkdAbyOQmu1WsGgHS/loCEdArOvX9UCJXXV9lChoBkdAbnui7Ciyp2gHTQkBaAhHQKzr5Ux20Rh1fZQoaAZHQG6AfwAlv61oB01eAWgIR0Cs7JkRjBl+dX2UKGgGR0Bw/I7KaG5+aAdNIgFoCEdArOyYm7aqTHV9lChoBkdANzZxm03OwGgHS+doCEdArO3PQ4S6D3V9lChoBkdAb5KifxtpEmgHTQEBaAhHQKzuXxdY4hl1fZQoaAZHQG+4NeD3/PxoB00AAWgIR0Cs7tx6Ww/xdX2UKGgGR0BvNjJZGKAKaAdNrgJoCEdArO8xgLJCB3V9lChoBkdAcQgLiMo+fWgHTTYBaAhHQKzwXD/EOy51fZQoaAZHQHFeKCL/CIloB0vhaAhHQKzwfu9eyAx1fZQoaAZHQG+YQTmGM4toB0vnaAhHQKzwrRYRuj11fZQoaAZHQGHaHt4RmK9oB03oA2gIR0Cs8cN/FzdUdX2UKGgGR0BvtgIv8IiUaAdNAgFoCEdArPIpJVbRnnV9lChoBkdAb4Z7zkIX02gHTU4BaAhHQKzyT0/4Zdh1fZQoaAZHQHC/xTS9du5oB00vAWgIR0Cs8xKhDgIhdX2UKGgGR0Budi8pTdcjaAdL+GgIR0Cs8zd2Pkq+dX2UKGgGR0BvwOw3YL9daAdL/GgIR0Cs8+gLiMo+dX2UKGgGR0Bw1GK0lZ5iaAdL52gIR0Cs8/t/e+EidX2UKGgGR0AbKHDaXa8IaAdL32gIR0Cs9BeXAuZkdX2UKGgGR8BEr86NlyzYaAdL2mgIR0Cs9SIxxkupdX2UKGgGR0BtM+RHPNVzaAdL62gIR0Cs9r2M85jpdX2UKGgGR0BwtfPhQ3xXaAdNLAFoCEdArPcv4fwI+nV9lChoBkdAXidpblijL2gHTegDaAhHQKz3b1UVBUt1fZQoaAZHQHA58P4EfT1oB01ZAWgIR0Cs9+LbYbsGdX2UKGgGR0BhUYWSEDhcaAdN6ANoCEdArPhSvq1PWXV9lChoBkdAbZWyu6mO2mgHTR0BaAhHQKz4f+ee4Cp1fZQoaAZHQHA8JB5X2dxoB0v3aAhHQKz4smfGuLd1ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 310, "observation_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVdgIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoCIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoESiWCAAAAAAAAAABAQEBAQEBAZRoFUsIhZRoGXSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBEoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaAtLCIWUaBl0lFKUjARoaWdolGgRKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgLSwiFlGgZdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=", "dtype": "float32", "bounded_below": "[ True True True True True True True True]", "bounded_above": "[ True True True True True True True True]", "_shape": [8], "low": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "low_repr": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high_repr": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.discrete.Discrete'>", ":serialized:": "gAWVnwEAAAAAAACMGWd5bW5hc2l1bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIBAAAAAAAAACUhpRSlIwFc3RhcnSUaAhoDkMIAAAAAAAAAACUhpRSlIwGX3NoYXBllCmMBWR0eXBllGgOjApfbnBfcmFuZG9tlIwUbnVtcHkucmFuZG9tLl9waWNrbGWUjBBfX2dlbmVyYXRvcl9jdG9ylJOUjAVQQ0c2NJRoG4wUX19iaXRfZ2VuZXJhdG9yX2N0b3KUk5SGlFKUfZQojA1iaXRfZ2VuZXJhdG9ylIwFUENHNjSUjAVzdGF0ZZR9lChoJooRVdO8GcrFTseDGMkSZ39zgACMA2luY5SKEHXUhdbOHtUaQfo9WwyNK0F1jApoYXNfdWludDMylEsAjAh1aW50ZWdlcpRLAHVidWIu", "n": "4", "start": "0", "_shape": [], "dtype": "int64", "_np_random": "Generator(PCG64)"}, "n_envs": 16, "n_steps": 2048, "gamma": 0.99, "gae_lambda": 0.95, "ent_coef": 0.0, "vf_coef": 0.5, "max_grad_norm": 0.5, "batch_size": 32, "n_epochs": 10, "clip_range": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz/JmZmZmZmahZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "clip_range_vf": null, "normalize_advantage": true, "target_kl": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz8aNuLrHEMthZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "system_info": {"OS": "Linux-6.1.85+-x86_64-with-glibc2.35 # 1 SMP PREEMPT_DYNAMIC Thu Jun 27 21:05:47 UTC 2024", "Python": "3.10.12", "Stable-Baselines3": "2.0.0a5", "PyTorch": "2.3.1+cu121", "GPU Enabled": "True", "Numpy": "1.26.4", "Cloudpickle": "2.2.1", "Gymnasium": "0.28.1", "OpenAI Gym": "0.25.2"}}
|
|
|
1 |
+
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==", "__module__": "stable_baselines3.common.policies", "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function ActorCriticPolicy.__init__ at 0x7ccd7d0a9a20>", "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7ccd7d0a9ab0>", "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7ccd7d0a9b40>", "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7ccd7d0a9bd0>", "_build": "<function ActorCriticPolicy._build at 0x7ccd7d0a9c60>", "forward": "<function ActorCriticPolicy.forward at 0x7ccd7d0a9cf0>", "extract_features": "<function ActorCriticPolicy.extract_features at 0x7ccd7d0a9d80>", "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7ccd7d0a9e10>", "_predict": "<function ActorCriticPolicy._predict at 0x7ccd7d0a9ea0>", "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7ccd7d0a9f30>", "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7ccd7d0a9fc0>", "predict_values": "<function ActorCriticPolicy.predict_values at 0x7ccd7d0aa050>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7ccd84e55a80>"}, "verbose": 1, "policy_kwargs": {}, "num_timesteps": 1015808, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1723348677579227999, "learning_rate": 0.0003, "tensorboard_log": null, "_last_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYAAgAAAAAAAOa/jj0/C1M+CFWwvePmnL52NjM9R/IGuwAAAAAAAAAAzTtxPkAH3z6gBX++RKWtvl9HdD0KC729AAAAAAAAAADmNIy94DO9PoJaCj+DANi+YCGZPg6P0j4AAAAAAAAAABpLsD24ZJc+igkMvRUvpr7BvZQ9lSxdvAAAAAAAAAAAzdDFPAOjIrwb74m9K8+3O4j7kb2uI6o8AACAPwAAgD+m6TE+6KEfP4nTg7v71gO/wgPiPeYwgLwAAAAAAAAAAPMCnb3lpqc/ctIVv35HBb/ai/u8+/iCvgAAAAAAAAAAKtCPPnyKRT+4EZI9YhrtvjICkT5GL7u9AAAAAAAAAAAAxi28e8OMPY0M8jwZyam+ltq7PTt77LwAAAAAAAAAADO78jv2BG66hashOI3aEzPmIte5TmU9twAAgD8AAIA/TXFiPYY9rT+6WIg+G6e/vrbr27ytw5w9AAAAAAAAAACa8Va9XC8oumbf37bCPdqx15LmOhkqBjYAAIA/AACAP2Y5ZT3Sx5w/NxUoPu7ACr+5Bda8M+IEPQAAAAAAAAAAmnXhuxnOsz8G4q6+Z+EZvkEvvztizhI9AAAAAAAAAAAzP+y8KSgHujAC2rwVYO84BkGxO5N7XrgAAIA/AACAP5priTxxxky7tgm+u038kDyxMIW8wCd5PQAAgD8AAIA/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksQSwiGlIwBQ5R0lFKULg=="}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVgwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSxCFlIwBQ5R0lFKULg=="}, "_last_original_obs": null, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": -0.015808000000000044, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWV7QsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQHMxOPFNtZWMAWyUS9WMAXSUR0Cm6W2dVea8dX2UKGgGR0Bx5MtDlYEGaAdL4WgIR0Cm6YCSzPa+dX2UKGgGR0BxFrKV6eGxaAdNDgFoCEdApuniCvovBnV9lChoBkdAcQZf9gnc+WgHS9RoCEdApuogm1IAfnV9lChoBkdAcTuzGPxQSGgHS/NoCEdApuo7h73PA3V9lChoBkdAbRR29L6DXmgHS+toCEdApur87QswtnV9lChoBkdAceE1ZDArQWgHTQoBaAhHQKbrGMlTm4l1fZQoaAZHQHCFLzbvgFZoB0vQaAhHQKbrHYf4h2Z1fZQoaAZHQHIiP0RODapoB00GAWgIR0Cm6zLWZqmCdX2UKGgGR0BxU+vLX+VDaAdL+mgIR0Cm63iiyprDdX2UKGgGR0Bxu8lE7W/baAdL5WgIR0Cm67lirksCdX2UKGgGR0BxqlCQcPvsaAdL82gIR0Cm7BKekHlfdX2UKGgGR0Bzuaj/MnqnaAdLyWgIR0Cm7FUNSZSfdX2UKGgGR0BwDJMDfWMCaAdL7WgIR0Cm7GI3R5TqdX2UKGgGR0BvAj5hz/6waAdL92gIR0Cm7HXTd+G5dX2UKGgGR0By1vjYI0IkaAdNcQJoCEdApu3OXw9aEHV9lChoBkdAcAGLKV6eG2gHS/xoCEdApu3frrxAjnV9lChoBkdAcpNuVopQUGgHTQMBaAhHQKbuCEBbOeJ1fZQoaAZHQHAlWPHT7VJoB0voaAhHQKbuEm6XjVB1fZQoaAZHQHEe8jRlYlpoB0v1aAhHQKbuLKJ2t+11fZQoaAZHQHHi3R5TqB5oB0vFaAhHQKbuMXVLBbh1fZQoaAZHQHJbd4A0bcZoB0vbaAhHQKbulrUsnRd1fZQoaAZHQHCgUhq0tyxoB00pAWgIR0Cm7sjlYEGJdX2UKGgGR0BxrXCzkZJkaAdL3GgIR0Cm7vMSCe3AdX2UKGgGR0BxYK8nNPgvaAdL82gIR0Cm7vKeK8+SdX2UKGgGR0Bx+cCW/rSmaAdL4GgIR0Cm7zi7TUiIdX2UKGgGR0ByUdJYkmhNaAdL0mgIR0Cm709eyAx0dX2UKGgGR0Bxdm717IDHaAdNIwFoCEdApu+o0IkZ8HV9lChoBkdAcTEHtF8XvmgHS/RoCEdApvkkGX5WR3V9lChoBkdAcapr4nF5wGgHS/hoCEdApvlE5sCT2XV9lChoBkdAcoK77Kq4pmgHTQABaAhHQKb5Q9+PRzB1fZQoaAZHQHKhq8cuJ1toB0vkaAhHQKb6QJzDGcZ1fZQoaAZHQG6tGaQV9F5oB0v4aAhHQKb6ivFFUhp1fZQoaAZHQG+TSiM5wOxoB0voaAhHQKb6izru6Vd1fZQoaAZHQHGhrqyGBWhoB0viaAhHQKb6kDq4YrJ1fZQoaAZHQHB/YZIg/1RoB0vlaAhHQKb6oMPSUkh1fZQoaAZHQHGexegL7XRoB0v+aAhHQKb62L7XQMR1fZQoaAZHQHNyes1baAZoB0vdaAhHQKb666J66at1fZQoaAZHQHOCImPYFq1oB0vOaAhHQKb7EEnLJS11fZQoaAZHQHHBePJaJRBoB0v3aAhHQKb7gqaw2VF1fZQoaAZHQHQIor8R+SdoB0vcaAhHQKb7lK4hEBt1fZQoaAZHQHEoUDIRywRoB0vbaAhHQKb7qws5GSZ1fZQoaAZHQHGr0qpcX3xoB00CAWgIR0Cm+9YE4ecQdX2UKGgGR0Bwle6iCaqkaAdL02gIR0Cm++jAaef7dX2UKGgGR0BziMGW2PT5aAdL02gIR0Cm/Exy4nWrdX2UKGgGR0BwauIsRQJpaAdLzWgIR0Cm/FdMbm2cdX2UKGgGR0By7d8eCCjDaAdL6GgIR0Cm/LovSMLndX2UKGgGR0Byg9BOYYzjaAdLw2gIR0Cm/Wy26TW5dX2UKGgGR0BySigUUO/daAdL32gIR0Cm/gQ+t8u0dX2UKGgGR0ByjoYYR/ViaAdL+WgIR0Cm/h0zCUHIdX2UKGgGR0BxSVYaHbh4aAdL9GgIR0Cm/muEEkjYdX2UKGgGR0BwmJz90ihWaAdL6mgIR0Cm/qKur6tUdX2UKGgGR0BzmOmZVn27aAdL/WgIR0Cm/qYM4LkTdX2UKGgGR0BwY/MxGlQ/aAdL52gIR0Cm/rL8BMi9dX2UKGgGR0BxPhrAP/aQaAdL12gIR0Cm/y8GcFyJdX2UKGgGR0ByVx+az/p/aAdL9mgIR0Cm/zfg75mAdX2UKGgGR0BuyLin5zo2aAdL52gIR0Cm/6qsdT5wdX2UKGgGR0By2K0ngHeKaAdL+2gIR0Cm/+woTfzjdX2UKGgGR0BzEoUxmCiAaAdL0mgIR0CnADAy2x6fdX2UKGgGR0BweHG+9Jz1aAdL+mgIR0CnAGJxFRYSdX2UKGgGR0BzaFVYISlFaAdL4GgIR0CnAGzwlSjydX2UKGgGR0Bxb2Y4Qz1saAdLy2gIR0CnAJRKHwgDdX2UKGgGR0By9BcGC7K8aAdNDQFoCEdApwCeaDwpfHV9lChoBkdAUnmp++dsi2gHS6ZoCEdApwC1BjWkJ3V9lChoBkdATHVQ0oBq9GgHS4VoCEdApwI/VkMCtHV9lChoBkdAcV4fkWAPNGgHS8hoCEdApwJi925hB3V9lChoBkdAceV5qubI92gHS+JoCEdApwKkvZh8Y3V9lChoBkdAb1lq8lHBlGgHS+loCEdApwLlZTyau3V9lChoBkdAcZ8/XoTwlWgHS95oCEdApwMcJY1YQ3V9lChoBkdAckQAAAAAAGgHS+loCEdApwNlTLns9nV9lChoBkdAcA9sOoYNzGgHS+FoCEdApwPKwD/2kHV9lChoBkdAb3sfHPu5SWgHS99oCEdApwPKrJbMYHV9lChoBkdAcFfB0IToMmgHTQABaAhHQKcD82CuloF1fZQoaAZHQHJ9aG+K0lZoB0vlaAhHQKcEsNPP9k11fZQoaAZHQHJXAnlXA/NoB0veaAhHQKcE1PO6d2B1fZQoaAZHQHEHMl9jPOZoB0vXaAhHQKcE7L/0dzZ1fZQoaAZHQHCQsj3VTaVoB0vcaAhHQKcFLzT4L1F1fZQoaAZHQHB4v6be/HpoB0vjaAhHQKcFfScbzbx1fZQoaAZHQHGIaLfk3jxoB0v6aAhHQKcFlY150KZ1fZQoaAZHQHJrh4Y77sRoB00CAWgIR0CnBfKFqSHNdX2UKGgGR0A23Cj1wo9caAdLn2gIR0CnBj8ZccENdX2UKGgGR0BxGjBl+VkdaAdL12gIR0CnBpxHPNVzdX2UKGgGR0BR8HIMjNY9aAdLoWgIR0CnBrdnK4hEdX2UKGgGR0Bx5ZEhJRO2aAdL12gIR0CnBsi0OVgQdX2UKGgGR0By8at0V8CxaAdL8GgIR0CnBuvmxMWXdX2UKGgGR0BStREBsANoaAdLxWgIR0CnBzpwCKaYdX2UKGgGR0BzLYSqU/wBaAdLyGgIR0CnB2Pv0AcUdX2UKGgGR0BuBAPGyX2NaAdL42gIR0CnB2we/5+IdX2UKGgGR0Bysgzdk8RuaAdL/WgIR0CnB4CyyD7JdX2UKGgGR0BxukAAAAAAaAdLzGgIR0CnCBoHLRrrdX2UKGgGR0ByJDpLVWjoaAdL2mgIR0CnCD1wo9cKdX2UKGgGR0Byxy9RJmNBaAdL1WgIR0CnCG83dbgTdX2UKGgGR0ByskjeKsMiaAdL/mgIR0CnCKj5j6N3dX2UKGgGR0BzEJs9B8hLaAdL3WgIR0CnCMMo+fRNdX2UKGgGR0BvN3qZ+hGpaAdL22gIR0CnCRmwRoRJdX2UKGgGR0BxgbsdDIBBaAdL9GgIR0CnCSd+w1R+dX2UKGgGR0Bx2mGKyfL+aAdL0mgIR0CnCaTGgi/xdX2UKGgGR0BymRflZHNHaAdL4GgIR0CnCfRJNCZ4dX2UKGgGR0Bx5FJoTPB0aAdNBgFoCEdApwoTIYFaCHV9lChoBkdAcrrOymhufmgHS8BoCEdApwo2wzLwF3VlLg=="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 496, "observation_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVdgIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoCIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoESiWCAAAAAAAAAABAQEBAQEBAZRoFUsIhZRoGXSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBEoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaAtLCIWUaBl0lFKUjARoaWdolGgRKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgLSwiFlGgZdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=", "dtype": "float32", "bounded_below": "[ True True True True True True True True]", "bounded_above": "[ True True True True True True True True]", "_shape": [8], "low": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "low_repr": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high_repr": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.discrete.Discrete'>", ":serialized:": "gAWV2wAAAAAAAACMGWd5bW5hc2l1bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIBAAAAAAAAACUhpRSlIwFc3RhcnSUaAhoDkMIAAAAAAAAAACUhpRSlIwGX3NoYXBllCmMBWR0eXBllGgOjApfbnBfcmFuZG9tlE51Yi4=", "n": "4", "start": "0", "_shape": [], "dtype": "int64", "_np_random": null}, "n_envs": 16, "n_steps": 1024, "gamma": 0.999, "gae_lambda": 0.98, "ent_coef": 0.01, "vf_coef": 0.5, "max_grad_norm": 0.5, "batch_size": 64, "n_epochs": 4, "clip_range": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz/JmZmZmZmahZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "clip_range_vf": null, "normalize_advantage": true, "target_kl": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz8zqSowVTJhhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "system_info": {"OS": "Linux-6.1.85+-x86_64-with-glibc2.35 # 1 SMP PREEMPT_DYNAMIC Thu Jun 27 21:05:47 UTC 2024", "Python": "3.10.12", "Stable-Baselines3": "2.0.0a5", "PyTorch": "2.3.1+cu121", "GPU Enabled": "False", "Numpy": "1.26.4", "Cloudpickle": "2.2.1", "Gymnasium": "0.28.1", "OpenAI Gym": "0.25.2"}}
|
ppo-LunarLander-v2.zip
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9686b61448bf39f2104ddcc7671275c5db4a006494535f54761e9a7b19727d90
|
3 |
+
size 147461
|
ppo-LunarLander-v2/data
CHANGED
@@ -4,34 +4,34 @@
|
|
4 |
":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
-
"__init__": "<function ActorCriticPolicy.__init__ at
|
8 |
-
"_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at
|
9 |
-
"reset_noise": "<function ActorCriticPolicy.reset_noise at
|
10 |
-
"_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at
|
11 |
-
"_build": "<function ActorCriticPolicy._build at
|
12 |
-
"forward": "<function ActorCriticPolicy.forward at
|
13 |
-
"extract_features": "<function ActorCriticPolicy.extract_features at
|
14 |
-
"_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at
|
15 |
-
"_predict": "<function ActorCriticPolicy._predict at
|
16 |
-
"evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at
|
17 |
-
"get_distribution": "<function ActorCriticPolicy.get_distribution at
|
18 |
-
"predict_values": "<function ActorCriticPolicy.predict_values at
|
19 |
"__abstractmethods__": "frozenset()",
|
20 |
-
"_abc_impl": "<_abc._abc_data object at
|
21 |
},
|
22 |
-
"verbose":
|
23 |
"policy_kwargs": {},
|
24 |
"num_timesteps": 1015808,
|
25 |
"_total_timesteps": 1000000,
|
26 |
"_num_timesteps_at_start": 0,
|
27 |
-
"seed":
|
28 |
"action_noise": null,
|
29 |
-
"start_time":
|
30 |
-
"learning_rate": 0.
|
31 |
"tensorboard_log": null,
|
32 |
"_last_obs": {
|
33 |
":type:": "<class 'numpy.ndarray'>",
|
34 |
-
":serialized:": "
|
35 |
},
|
36 |
"_last_episode_starts": {
|
37 |
":type:": "<class 'numpy.ndarray'>",
|
@@ -45,13 +45,13 @@
|
|
45 |
"_stats_window_size": 100,
|
46 |
"ep_info_buffer": {
|
47 |
":type:": "<class 'collections.deque'>",
|
48 |
-
":serialized:": "
|
49 |
},
|
50 |
"ep_success_buffer": {
|
51 |
":type:": "<class 'collections.deque'>",
|
52 |
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
53 |
},
|
54 |
-
"_n_updates":
|
55 |
"observation_space": {
|
56 |
":type:": "<class 'gymnasium.spaces.box.Box'>",
|
57 |
":serialized:": "gAWVdgIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoCIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoESiWCAAAAAAAAAABAQEBAQEBAZRoFUsIhZRoGXSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBEoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaAtLCIWUaBl0lFKUjARoaWdolGgRKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgLSwiFlGgZdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=",
|
@@ -69,22 +69,22 @@
|
|
69 |
},
|
70 |
"action_space": {
|
71 |
":type:": "<class 'gymnasium.spaces.discrete.Discrete'>",
|
72 |
-
":serialized:": "
|
73 |
"n": "4",
|
74 |
"start": "0",
|
75 |
"_shape": [],
|
76 |
"dtype": "int64",
|
77 |
-
"_np_random":
|
78 |
},
|
79 |
"n_envs": 16,
|
80 |
-
"n_steps":
|
81 |
-
"gamma": 0.
|
82 |
-
"gae_lambda": 0.
|
83 |
-
"ent_coef": 0.
|
84 |
"vf_coef": 0.5,
|
85 |
"max_grad_norm": 0.5,
|
86 |
-
"batch_size":
|
87 |
-
"n_epochs":
|
88 |
"clip_range": {
|
89 |
":type:": "<class 'function'>",
|
90 |
":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz/JmZmZmZmahZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"
|
@@ -94,6 +94,6 @@
|
|
94 |
"target_kl": null,
|
95 |
"lr_schedule": {
|
96 |
":type:": "<class 'function'>",
|
97 |
-
":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+
|
98 |
}
|
99 |
}
|
|
|
4 |
":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
+
"__init__": "<function ActorCriticPolicy.__init__ at 0x7ccd7d0a9a20>",
|
8 |
+
"_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7ccd7d0a9ab0>",
|
9 |
+
"reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7ccd7d0a9b40>",
|
10 |
+
"_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7ccd7d0a9bd0>",
|
11 |
+
"_build": "<function ActorCriticPolicy._build at 0x7ccd7d0a9c60>",
|
12 |
+
"forward": "<function ActorCriticPolicy.forward at 0x7ccd7d0a9cf0>",
|
13 |
+
"extract_features": "<function ActorCriticPolicy.extract_features at 0x7ccd7d0a9d80>",
|
14 |
+
"_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7ccd7d0a9e10>",
|
15 |
+
"_predict": "<function ActorCriticPolicy._predict at 0x7ccd7d0a9ea0>",
|
16 |
+
"evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7ccd7d0a9f30>",
|
17 |
+
"get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7ccd7d0a9fc0>",
|
18 |
+
"predict_values": "<function ActorCriticPolicy.predict_values at 0x7ccd7d0aa050>",
|
19 |
"__abstractmethods__": "frozenset()",
|
20 |
+
"_abc_impl": "<_abc._abc_data object at 0x7ccd84e55a80>"
|
21 |
},
|
22 |
+
"verbose": 1,
|
23 |
"policy_kwargs": {},
|
24 |
"num_timesteps": 1015808,
|
25 |
"_total_timesteps": 1000000,
|
26 |
"_num_timesteps_at_start": 0,
|
27 |
+
"seed": null,
|
28 |
"action_noise": null,
|
29 |
+
"start_time": 1723348677579227999,
|
30 |
+
"learning_rate": 0.0003,
|
31 |
"tensorboard_log": null,
|
32 |
"_last_obs": {
|
33 |
":type:": "<class 'numpy.ndarray'>",
|
34 |
+
":serialized:": "gAWVdQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYAAgAAAAAAAOa/jj0/C1M+CFWwvePmnL52NjM9R/IGuwAAAAAAAAAAzTtxPkAH3z6gBX++RKWtvl9HdD0KC729AAAAAAAAAADmNIy94DO9PoJaCj+DANi+YCGZPg6P0j4AAAAAAAAAABpLsD24ZJc+igkMvRUvpr7BvZQ9lSxdvAAAAAAAAAAAzdDFPAOjIrwb74m9K8+3O4j7kb2uI6o8AACAPwAAgD+m6TE+6KEfP4nTg7v71gO/wgPiPeYwgLwAAAAAAAAAAPMCnb3lpqc/ctIVv35HBb/ai/u8+/iCvgAAAAAAAAAAKtCPPnyKRT+4EZI9YhrtvjICkT5GL7u9AAAAAAAAAAAAxi28e8OMPY0M8jwZyam+ltq7PTt77LwAAAAAAAAAADO78jv2BG66hashOI3aEzPmIte5TmU9twAAgD8AAIA/TXFiPYY9rT+6WIg+G6e/vrbr27ytw5w9AAAAAAAAAACa8Va9XC8oumbf37bCPdqx15LmOhkqBjYAAIA/AACAP2Y5ZT3Sx5w/NxUoPu7ACr+5Bda8M+IEPQAAAAAAAAAAmnXhuxnOsz8G4q6+Z+EZvkEvvztizhI9AAAAAAAAAAAzP+y8KSgHujAC2rwVYO84BkGxO5N7XrgAAIA/AACAP5priTxxxky7tgm+u038kDyxMIW8wCd5PQAAgD8AAIA/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksQSwiGlIwBQ5R0lFKULg=="
|
35 |
},
|
36 |
"_last_episode_starts": {
|
37 |
":type:": "<class 'numpy.ndarray'>",
|
|
|
45 |
"_stats_window_size": 100,
|
46 |
"ep_info_buffer": {
|
47 |
":type:": "<class 'collections.deque'>",
|
48 |
+
":serialized:": "gAWV7QsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQHMxOPFNtZWMAWyUS9WMAXSUR0Cm6W2dVea8dX2UKGgGR0Bx5MtDlYEGaAdL4WgIR0Cm6YCSzPa+dX2UKGgGR0BxFrKV6eGxaAdNDgFoCEdApuniCvovBnV9lChoBkdAcQZf9gnc+WgHS9RoCEdApuogm1IAfnV9lChoBkdAcTuzGPxQSGgHS/NoCEdApuo7h73PA3V9lChoBkdAbRR29L6DXmgHS+toCEdApur87QswtnV9lChoBkdAceE1ZDArQWgHTQoBaAhHQKbrGMlTm4l1fZQoaAZHQHCFLzbvgFZoB0vQaAhHQKbrHYf4h2Z1fZQoaAZHQHIiP0RODapoB00GAWgIR0Cm6zLWZqmCdX2UKGgGR0BxU+vLX+VDaAdL+mgIR0Cm63iiyprDdX2UKGgGR0Bxu8lE7W/baAdL5WgIR0Cm67lirksCdX2UKGgGR0BxqlCQcPvsaAdL82gIR0Cm7BKekHlfdX2UKGgGR0Bzuaj/MnqnaAdLyWgIR0Cm7FUNSZSfdX2UKGgGR0BwDJMDfWMCaAdL7WgIR0Cm7GI3R5TqdX2UKGgGR0BvAj5hz/6waAdL92gIR0Cm7HXTd+G5dX2UKGgGR0By1vjYI0IkaAdNcQJoCEdApu3OXw9aEHV9lChoBkdAcAGLKV6eG2gHS/xoCEdApu3frrxAjnV9lChoBkdAcpNuVopQUGgHTQMBaAhHQKbuCEBbOeJ1fZQoaAZHQHAlWPHT7VJoB0voaAhHQKbuEm6XjVB1fZQoaAZHQHEe8jRlYlpoB0v1aAhHQKbuLKJ2t+11fZQoaAZHQHHi3R5TqB5oB0vFaAhHQKbuMXVLBbh1fZQoaAZHQHJbd4A0bcZoB0vbaAhHQKbulrUsnRd1fZQoaAZHQHCgUhq0tyxoB00pAWgIR0Cm7sjlYEGJdX2UKGgGR0BxrXCzkZJkaAdL3GgIR0Cm7vMSCe3AdX2UKGgGR0BxYK8nNPgvaAdL82gIR0Cm7vKeK8+SdX2UKGgGR0Bx+cCW/rSmaAdL4GgIR0Cm7zi7TUiIdX2UKGgGR0ByUdJYkmhNaAdL0mgIR0Cm709eyAx0dX2UKGgGR0Bxdm717IDHaAdNIwFoCEdApu+o0IkZ8HV9lChoBkdAcTEHtF8XvmgHS/RoCEdApvkkGX5WR3V9lChoBkdAcapr4nF5wGgHS/hoCEdApvlE5sCT2XV9lChoBkdAcoK77Kq4pmgHTQABaAhHQKb5Q9+PRzB1fZQoaAZHQHKhq8cuJ1toB0vkaAhHQKb6QJzDGcZ1fZQoaAZHQG6tGaQV9F5oB0v4aAhHQKb6ivFFUhp1fZQoaAZHQG+TSiM5wOxoB0voaAhHQKb6izru6Vd1fZQoaAZHQHGhrqyGBWhoB0viaAhHQKb6kDq4YrJ1fZQoaAZHQHB/YZIg/1RoB0vlaAhHQKb6oMPSUkh1fZQoaAZHQHGexegL7XRoB0v+aAhHQKb62L7XQMR1fZQoaAZHQHNyes1baAZoB0vdaAhHQKb666J66at1fZQoaAZHQHOCImPYFq1oB0vOaAhHQKb7EEnLJS11fZQoaAZHQHHBePJaJRBoB0v3aAhHQKb7gqaw2VF1fZQoaAZHQHQIor8R+SdoB0vcaAhHQKb7lK4hEBt1fZQoaAZHQHEoUDIRywRoB0vbaAhHQKb7qws5GSZ1fZQoaAZHQHGr0qpcX3xoB00CAWgIR0Cm+9YE4ecQdX2UKGgGR0Bwle6iCaqkaAdL02gIR0Cm++jAaef7dX2UKGgGR0BziMGW2PT5aAdL02gIR0Cm/Exy4nWrdX2UKGgGR0BwauIsRQJpaAdLzWgIR0Cm/FdMbm2cdX2UKGgGR0By7d8eCCjDaAdL6GgIR0Cm/LovSMLndX2UKGgGR0Byg9BOYYzjaAdLw2gIR0Cm/Wy26TW5dX2UKGgGR0BySigUUO/daAdL32gIR0Cm/gQ+t8u0dX2UKGgGR0ByjoYYR/ViaAdL+WgIR0Cm/h0zCUHIdX2UKGgGR0BxSVYaHbh4aAdL9GgIR0Cm/muEEkjYdX2UKGgGR0BwmJz90ihWaAdL6mgIR0Cm/qKur6tUdX2UKGgGR0BzmOmZVn27aAdL/WgIR0Cm/qYM4LkTdX2UKGgGR0BwY/MxGlQ/aAdL52gIR0Cm/rL8BMi9dX2UKGgGR0BxPhrAP/aQaAdL12gIR0Cm/y8GcFyJdX2UKGgGR0ByVx+az/p/aAdL9mgIR0Cm/zfg75mAdX2UKGgGR0BuyLin5zo2aAdL52gIR0Cm/6qsdT5wdX2UKGgGR0By2K0ngHeKaAdL+2gIR0Cm/+woTfzjdX2UKGgGR0BzEoUxmCiAaAdL0mgIR0CnADAy2x6fdX2UKGgGR0BweHG+9Jz1aAdL+mgIR0CnAGJxFRYSdX2UKGgGR0BzaFVYISlFaAdL4GgIR0CnAGzwlSjydX2UKGgGR0Bxb2Y4Qz1saAdLy2gIR0CnAJRKHwgDdX2UKGgGR0By9BcGC7K8aAdNDQFoCEdApwCeaDwpfHV9lChoBkdAUnmp++dsi2gHS6ZoCEdApwC1BjWkJ3V9lChoBkdATHVQ0oBq9GgHS4VoCEdApwI/VkMCtHV9lChoBkdAcV4fkWAPNGgHS8hoCEdApwJi925hB3V9lChoBkdAceV5qubI92gHS+JoCEdApwKkvZh8Y3V9lChoBkdAb1lq8lHBlGgHS+loCEdApwLlZTyau3V9lChoBkdAcZ8/XoTwlWgHS95oCEdApwMcJY1YQ3V9lChoBkdAckQAAAAAAGgHS+loCEdApwNlTLns9nV9lChoBkdAcA9sOoYNzGgHS+FoCEdApwPKwD/2kHV9lChoBkdAb3sfHPu5SWgHS99oCEdApwPKrJbMYHV9lChoBkdAcFfB0IToMmgHTQABaAhHQKcD82CuloF1fZQoaAZHQHJ9aG+K0lZoB0vlaAhHQKcEsNPP9k11fZQoaAZHQHJXAnlXA/NoB0veaAhHQKcE1PO6d2B1fZQoaAZHQHEHMl9jPOZoB0vXaAhHQKcE7L/0dzZ1fZQoaAZHQHCQsj3VTaVoB0vcaAhHQKcFLzT4L1F1fZQoaAZHQHB4v6be/HpoB0vjaAhHQKcFfScbzbx1fZQoaAZHQHGIaLfk3jxoB0v6aAhHQKcFlY150KZ1fZQoaAZHQHJrh4Y77sRoB00CAWgIR0CnBfKFqSHNdX2UKGgGR0A23Cj1wo9caAdLn2gIR0CnBj8ZccENdX2UKGgGR0BxGjBl+VkdaAdL12gIR0CnBpxHPNVzdX2UKGgGR0BR8HIMjNY9aAdLoWgIR0CnBrdnK4hEdX2UKGgGR0Bx5ZEhJRO2aAdL12gIR0CnBsi0OVgQdX2UKGgGR0By8at0V8CxaAdL8GgIR0CnBuvmxMWXdX2UKGgGR0BStREBsANoaAdLxWgIR0CnBzpwCKaYdX2UKGgGR0BzLYSqU/wBaAdLyGgIR0CnB2Pv0AcUdX2UKGgGR0BuBAPGyX2NaAdL42gIR0CnB2we/5+IdX2UKGgGR0Bysgzdk8RuaAdL/WgIR0CnB4CyyD7JdX2UKGgGR0BxukAAAAAAaAdLzGgIR0CnCBoHLRrrdX2UKGgGR0ByJDpLVWjoaAdL2mgIR0CnCD1wo9cKdX2UKGgGR0Byxy9RJmNBaAdL1WgIR0CnCG83dbgTdX2UKGgGR0ByskjeKsMiaAdL/mgIR0CnCKj5j6N3dX2UKGgGR0BzEJs9B8hLaAdL3WgIR0CnCMMo+fRNdX2UKGgGR0BvN3qZ+hGpaAdL22gIR0CnCRmwRoRJdX2UKGgGR0BxgbsdDIBBaAdL9GgIR0CnCSd+w1R+dX2UKGgGR0Bx2mGKyfL+aAdL0mgIR0CnCaTGgi/xdX2UKGgGR0BymRflZHNHaAdL4GgIR0CnCfRJNCZ4dX2UKGgGR0Bx5FJoTPB0aAdNBgFoCEdApwoTIYFaCHV9lChoBkdAcrrOymhufmgHS8BoCEdApwo2wzLwF3VlLg=="
|
49 |
},
|
50 |
"ep_success_buffer": {
|
51 |
":type:": "<class 'collections.deque'>",
|
52 |
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
53 |
},
|
54 |
+
"_n_updates": 496,
|
55 |
"observation_space": {
|
56 |
":type:": "<class 'gymnasium.spaces.box.Box'>",
|
57 |
":serialized:": "gAWVdgIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoCIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoESiWCAAAAAAAAAABAQEBAQEBAZRoFUsIhZRoGXSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBEoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaAtLCIWUaBl0lFKUjARoaWdolGgRKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgLSwiFlGgZdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=",
|
|
|
69 |
},
|
70 |
"action_space": {
|
71 |
":type:": "<class 'gymnasium.spaces.discrete.Discrete'>",
|
72 |
+
":serialized:": "gAWV2wAAAAAAAACMGWd5bW5hc2l1bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIBAAAAAAAAACUhpRSlIwFc3RhcnSUaAhoDkMIAAAAAAAAAACUhpRSlIwGX3NoYXBllCmMBWR0eXBllGgOjApfbnBfcmFuZG9tlE51Yi4=",
|
73 |
"n": "4",
|
74 |
"start": "0",
|
75 |
"_shape": [],
|
76 |
"dtype": "int64",
|
77 |
+
"_np_random": null
|
78 |
},
|
79 |
"n_envs": 16,
|
80 |
+
"n_steps": 1024,
|
81 |
+
"gamma": 0.999,
|
82 |
+
"gae_lambda": 0.98,
|
83 |
+
"ent_coef": 0.01,
|
84 |
"vf_coef": 0.5,
|
85 |
"max_grad_norm": 0.5,
|
86 |
+
"batch_size": 64,
|
87 |
+
"n_epochs": 4,
|
88 |
"clip_range": {
|
89 |
":type:": "<class 'function'>",
|
90 |
":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz/JmZmZmZmahZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"
|
|
|
94 |
"target_kl": null,
|
95 |
"lr_schedule": {
|
96 |
":type:": "<class 'function'>",
|
97 |
+
":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz8zqSowVTJhhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"
|
98 |
}
|
99 |
}
|
ppo-LunarLander-v2/policy.optimizer.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d07d48edef6d848c2b2c215ee03243dbc313c2d8894b3776feddd5c3d13fe2ec
|
3 |
+
size 87978
|
ppo-LunarLander-v2/policy.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bdd6cb623c698050a160e11d1238cff11d62c467ea06bbda3e2eafed6ab814c1
|
3 |
+
size 43634
|
ppo-LunarLander-v2/system_info.txt
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
- Python: 3.10.12
|
3 |
- Stable-Baselines3: 2.0.0a5
|
4 |
- PyTorch: 2.3.1+cu121
|
5 |
-
- GPU Enabled:
|
6 |
- Numpy: 1.26.4
|
7 |
- Cloudpickle: 2.2.1
|
8 |
- Gymnasium: 0.28.1
|
|
|
2 |
- Python: 3.10.12
|
3 |
- Stable-Baselines3: 2.0.0a5
|
4 |
- PyTorch: 2.3.1+cu121
|
5 |
+
- GPU Enabled: False
|
6 |
- Numpy: 1.26.4
|
7 |
- Cloudpickle: 2.2.1
|
8 |
- Gymnasium: 0.28.1
|
replay.mp4
CHANGED
Binary files a/replay.mp4 and b/replay.mp4 differ
|
|
results.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"mean_reward":
|
|
|
1 |
+
{"mean_reward": 276.96300560000003, "std_reward": 17.59343489657891, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2024-08-11T04:31:49.740478"}
|