s14pe commited on
Commit
d534eb3
1 Parent(s): 7403b3f

Upload folder using huggingface_hub

Browse files
README.md CHANGED
@@ -16,7 +16,7 @@ model-index:
16
  type: LunarLander-v2
17
  metrics:
18
  - type: mean_reward
19
- value: 223.35 +/- 80.59
20
  name: mean_reward
21
  verified: false
22
  ---
 
16
  type: LunarLander-v2
17
  metrics:
18
  - type: mean_reward
19
+ value: 279.00 +/- 15.83
20
  name: mean_reward
21
  verified: false
22
  ---
config.json CHANGED
@@ -1 +1 @@
1
- {"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==", "__module__": "stable_baselines3.common.policies", "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function ActorCriticPolicy.__init__ at 0x7da8e4a2caf0>", "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7da8e4a2cb80>", "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7da8e4a2cc10>", "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7da8e4a2cca0>", "_build": "<function ActorCriticPolicy._build at 0x7da8e4a2cd30>", "forward": "<function ActorCriticPolicy.forward at 0x7da8e4a2cdc0>", "extract_features": "<function ActorCriticPolicy.extract_features at 0x7da8e4a2ce50>", "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7da8e4a2cee0>", "_predict": "<function ActorCriticPolicy._predict at 0x7da8e4a2cf70>", "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7da8e4a2d000>", "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7da8e4a2d090>", "predict_values": "<function ActorCriticPolicy.predict_values at 0x7da8e4a2d120>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7da8e49cd200>"}, "verbose": 1, "policy_kwargs": {}, "num_timesteps": 11600, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1709647256978322952, "learning_rate": 0.0003, "tensorboard_log": null, "_last_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYAAgAAAAAAAE2mKj608XI/edGmPtOt6L6hqEU+MilBPQAAAAAAAAAAZn2lPa6htLrugxW5m9V9tmJkYTrzwCw4AACAPwAAgD8wn1S+rv2RvLUO/bodYjC5PqoCPi9oGjoAAIA/AACAP5oeZz5ndbw+6D9UvaHInb6npak9EFMgvQAAAAAAAAAAmtIwvvRDrT9CrBa/ZbbbvuPLHL5cASe+AAAAAAAAAACai9Y9V/ZMPM4Llr7wwfq9VQghvWbPGb0AAAAAAAAAAOZlWz1F8rQ/1is+P0AbgL3yCMy8It4IPQAAAAAAAAAAACuyPXvKmbpQl7g622MFtlvB0roazNm5AAAAAAAAgD+NkBY+7Nvsu1rZAb6WlCo9uhtovQKcCj4AAIA/AACAP01VUz0UfJS6ohDKNIEdXTAYqK25jXHXswAAgD8AAIA/jTKnvQp6aDwbcEM+UdttvhIACT0yFjG9AAAAAAAAAADgOiM+1MGIvAhyATx+TZ26FunrvUzDgbsAAIA/AACAP8Bs1j3hjKy6rQuhspujTTB4nNU4bsewMwAAgD8AAIA/gL5PPpyBED0umOC3N+oit3kBpz6OgJ82AACAPwAAgD86uKQ+lPPFPh2gZL0ck2++j/bOPZFflLwAAAAAAAAAAKBrKD6UqIe8DSSRue0RR7ak2O29l/m1OAAAgD8AAIA/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksQSwiGlIwBQ5R0lFKULg=="}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVgwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSxCFlIwBQ5R0lFKULg=="}, "_last_original_obs": null, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": -0.015808000000000044, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVPgMAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQEYl2VVxS52MAWyUS8CMAXSUR0CiPBV/Ue+3dX2UKGgGR0BvwsiY9gWraAdL52gIR0CiPMTBRAKOdX2UKGgGR0BxdY0aZQYUaAdL6GgIR0CiPMm2LHdXdX2UKGgGR0BzDnUc4o7WaAdL6GgIR0CiPMmYKIBSdX2UKGgGR0Bx3/qGDcubaAdL6WgIR0CiPM39zfaYdX2UKGgGR0BwzCoZQ53laAdL9WgIR0CiPQHg5zYFdX2UKGgGR0Bwl5VaOgg6aAdL+mgIR0CiPReIuXeFdX2UKGgGR0BwYiBEroW6aAdNCQFoCEdAoj1T15B1LnV9lChoBkdAcQJoyKvV3GgHTQsBaAhHQKI9WfgaWHF1fZQoaAZHQHJiqn3ta6loB00OAWgIR0CiPWdYfW+XdX2UKGgGR0BwmDfqHGjsaAdNEwFoCEdAoj14+yJKrnV9lChoBkdAcnWcTakAP2gHTWEBaAhHQKI+lLQokRl1fZQoaAZHQG0rs7+1jRVoB01jAWgIR0CiPpvy08eTdX2UKGgGR0Bx+B6eGwiaaAdL7GgIR0CiP8MVLzwudX2UKGgGR0BvQm9g4OtoaAdL12gIR0CiQM1DjR2KdX2UKGgGR0BwUlJpWV/uaAdL/WgIR0CiQSbe/Ho6dX2UKGgGR0BttUB8x9G7aAdL6WgIR0CiQUEmY0EYdX2UKGgGR0BxAQona37UaAdNHgFoCEdAokFowoLG73V9lChoBkdAcwjSJCSid2gHS/9oCEdAokF2L3sXznV9lChoBkdAcUJsenyd4GgHTQcBaAhHQKJBn1nM+vB1fZQoaAZHQHH3GOZLIxRoB00lAWgIR0CiQcUEovzwdX2UKGgGR0BwqVNGmUGFaAdL9GgIR0CiQtS8an76dX2UKGgGR0BwMxT6zmfXaAdL+mgIR0CiQupmVZ9vdX2UKGgGR0BwGVOclPadaAdNAQFoCEdAokVN0knkUHV9lChoBkdAcZJNbTtsvmgHS9VoCEdAokWjgVGkOHV9lChoBkdAcQVypaRp12gHS9hoCEdAokY3yGzrvHVlLg=="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 310, "observation_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVdgIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoCIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoESiWCAAAAAAAAAABAQEBAQEBAZRoFUsIhZRoGXSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBEoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaAtLCIWUaBl0lFKUjARoaWdolGgRKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgLSwiFlGgZdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=", "dtype": "float32", "bounded_below": "[ True True True True True True True True]", "bounded_above": "[ True True True True True True True True]", "_shape": [8], "low": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "low_repr": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high_repr": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.discrete.Discrete'>", ":serialized:": "gAWV2wAAAAAAAACMGWd5bW5hc2l1bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIBAAAAAAAAACUhpRSlIwFc3RhcnSUaAhoDkMIAAAAAAAAAACUhpRSlIwGX3NoYXBllCmMBWR0eXBllGgOjApfbnBfcmFuZG9tlE51Yi4=", "n": "4", "start": "0", "_shape": [], "dtype": "int64", "_np_random": null}, "n_envs": 16, "n_steps": 2048, "gamma": 0.99, "gae_lambda": 0.95, "ent_coef": 0.0, "vf_coef": 0.5, "max_grad_norm": 0.5, "batch_size": 64, "n_epochs": 10, "clip_range": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz/JmZmZmZmahZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "clip_range_vf": null, "normalize_advantage": true, "target_kl": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz8zqSowVTJhhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "system_info": {"OS": "Linux-6.1.58+-x86_64-with-glibc2.35 # 1 SMP PREEMPT_DYNAMIC Sat Nov 18 15:31:17 UTC 2023", "Python": "3.10.12", "Stable-Baselines3": "2.0.0a5", "PyTorch": "2.1.0+cu121", "GPU Enabled": "True", "Numpy": "1.25.2", "Cloudpickle": "2.2.1", "Gymnasium": "0.28.1", "OpenAI Gym": "0.25.2"}}
 
1
+ {"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==", "__module__": "stable_baselines3.common.policies", "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function ActorCriticPolicy.__init__ at 0x7d4a72346c20>", "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7d4a72346cb0>", "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7d4a72346d40>", "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7d4a72346dd0>", "_build": "<function ActorCriticPolicy._build at 0x7d4a72346e60>", "forward": "<function ActorCriticPolicy.forward at 0x7d4a72346ef0>", "extract_features": "<function ActorCriticPolicy.extract_features at 0x7d4a72346f80>", "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7d4a72347010>", "_predict": "<function ActorCriticPolicy._predict at 0x7d4a723470a0>", "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7d4a72347130>", "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7d4a723471c0>", "predict_values": "<function ActorCriticPolicy.predict_values at 0x7d4a72347250>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7d4a722eda80>"}, "verbose": 1, "policy_kwargs": {}, "num_timesteps": 10027008, "_total_timesteps": 10000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1709710391571335951, "learning_rate": 0.0003, "tensorboard_log": null, "_last_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdQQAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYABAAAAAAAAE3kPz177rG6kMJ7vT4B97ppESW6cxGEOwAAgD8AAIA/2oUrPmFqOj8488U90WP8vhvXIz7upRi9AAAAAAAAAAAz8hK9PRhWPPUS2b2PNme+cLHEvOIiB70AAAAAAAAAAEbQbr6D0Cs/hYt0PpLP4r57niS+Y0kiPgAAAAAAAAAAQOrcPUozSD9dKuQ9w2kLv1I0FD6iciK9AAAAAAAAAACal1+8xBf+PeXTzT0AC7e+ngbDPTKvg70AAAAAAAAAAI2AjL02BjC8qpZ6vE5psLvb7ou94PCRvAAAgD8AAIA/M4HzvHFZKDpI2nO54gmQMxY+Lry7YJc4AACAPwAAgD9mWOQ9fp5XP+1fvD0TCva+K5jyPd6VG7sAAAAAAAAAAICqHj3BDus9f+QGvrJSMb6fsoG9LPMnOgAAAAAAAAAAQO2JPUgZibpW70Azb99vLmMPCTulCLWzAACAPwAAgD+mAz4+/e4xP9t3aT5n/w+/NP+SPgqjkTwAAAAAAAAAAOZuyb1+soc/u7O9vZ3eBr8Zcxm+kwxVPQAAAAAAAAAAY6qQPv7ROj8B6iA8jvL/vlRqkD5DFsO9AAAAAAAAAACaOY+7exKXuoi3LTNRc+evhhp+uuZG0rMAAIA/AACAP8207Lt7UMk7TaBavlV1IL7l68m9/fBqPgAAgD8AAAAAM6P2vWjCp7zeqme8Dc6QvTCxnjxw0Dy9AACAPwAAgD9mAM0818sYPHY7QL66+R++hgBvvX+XpL0AAAAAAAAAAM0ci7qkwH25843cM79i/C7VKU464pG2swAAgD8AAIA/ABYgPDRn1j7DZWQ9V9/Zvu1VqT0TfGQ9AAAAAAAAAADtkDm+zA+BPgx/Gj6q76S+cmZgvMaTXD0AAAAAAAAAAMB6sj3Lpcw9lacQvgBqbb6x/4O8BzakvAAAAAAAAAAAgDd1PUWfyDxDBjS+Q2iAviyezbyRRyK8AAAAAAAAAADNKfq8zPcjPsiatjwEBZ2+OFimPYHLxboAAAAAAAAAADP//Lvh8I26NiS5NfEppDAKb9G6hfz9tAAAgD8AAIA/zfObvRnmaz7z5Jw+grDNvo4Dbz7bZ3U8AAAAAAAAAABmmn28rrWjuuEEqrezWp6y+s+xOUzXwzYAAIA/AACAP5oHVL0J6wA/XrYqPvdo9b4Xo109upz8PQAAAAAAAAAAZhaNu1KI9Lmagjs4qxBRMiI3pbsghF+3AACAPwAAgD/NVkg+x6nHPkMw970Sery+lGkAPofdxL0AAAAAAAAAAH0/nD4z6GE/1fR2PvogGL8Q/L0+yvwbvQAAAAAAAAAAZm4ivFiKVD9GBrI9verrvm9yPL2a+5w9AAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSyBLCIaUjAFDlHSUUpQu"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVkwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlIwFbnVtcHmUjAVkdHlwZZSTlIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksghZSMAUOUdJRSlC4="}, "_last_original_obs": null, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": -0.0027007999999999477, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWV6gsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQHI97fP5YYCMAWyUS+mMAXSUR0C5JXplz2eydX2UKGgGR0Byn+ISDh99aAdL5GgIR0C5JagUtZmqdX2UKGgGR0BwcxHI6r/9aAdLxWgIR0C5JaRYeT3ZdX2UKGgGR0Bxsm+GoJiRaAdLx2gIR0C5Jc4rrgO0dX2UKGgGR0ByBzhS9/SZaAdL32gIR0C5JeMnVoYfdX2UKGgGR0Bx91/4IrvtaAdL1WgIR0C5Je0ug6EKdX2UKGgGR0ByR6YfGMn7aAdL9WgIR0C5JfBxcVxkdX2UKGgGR0ByUFD0Dlo2aAdL4GgIR0C5Je01Mue0dX2UKGgGR0Bx9kiY9gWraAdL82gIR0C5JjBh2GIsdX2UKGgGR0By0tD+irT6aAdNBwFoCEdAuSZHaSLZSXV9lChoBkdAcxAiHZbpvGgHS8xoCEdAuSaEMG5c1XV9lChoBkdAcO9YtQKrrGgHS8poCEdAuScBI9TxXnV9lChoBkdAcjEpoK2KEWgHS+hoCEdAuScWIInjQ3V9lChoBkdAbzsYb83uNWgHS9doCEdAuScUfEGZ/nV9lChoBkdAcTvcMVk+YGgHS9FoCEdAuScryz5XVHV9lChoBkdAcUJBvrGBF2gHS9VoCEdAuSc5mPHT7XV9lChoBkdAcRb3dbgTAWgHS+loCEdAuSdXHmzSkXV9lChoBkdAb5gvDgqEvmgHS+toCEdAuSd4iFCb+nV9lChoBkdAcvSRhc7hemgHS/toCEdAuSeCu1WsBHV9lChoBkdAcu7qaPS2IGgHS89oCEdAuSeGZc9nsnV9lChoBkdAc7QCJ40Mw2gHS+doCEdAuSehTefqYHV9lChoBkdAb/hBGhEjPmgHS+FoCEdAuSewoqkM1HV9lChoBkdAciyjghr302gHS8xoCEdAuSfb7pFCs3V9lChoBkdAcnd2AoXsPmgHS9toCEdAuSfdje9BbHV9lChoBkdAcksuK4x1xWgHS9toCEdAuSgCekHlfnV9lChoBkdAcU5sBhhH9WgHS+NoCEdAuSgV2s7uD3V9lChoBkdAcItWCEpRXWgHS+VoCEdAuSg0JVsDXHV9lChoBkdAct7dmQKa5WgHS+loCEdAuSh6/j81oHV9lChoBkdAcoiOpbUwz2gHS9JoCEdAuSiT8rI5pHV9lChoBkdAcvnvFm4Aj2gHS9JoCEdAuSiilCTlk3V9lChoBkdAc6l9yLhrFmgHTQ0BaAhHQLkorRpUPxx1fZQoaAZHQG63mNrCWNZoB0vXaAhHQLko0iiZfD11fZQoaAZHQHITOPJaJRBoB0vlaAhHQLko5SlnAZd1fZQoaAZHQHJdnyEtdzJoB0vOaAhHQLko7UJfICF1fZQoaAZHQHLQPK+zt1JoB0vfaAhHQLko9DJEH+t1fZQoaAZHQG7crTYukDZoB0vVaAhHQLkpA/RE4Nt1fZQoaAZHQG6IQr+YMORoB0vbaAhHQLkpR8q4H5d1fZQoaAZHQHI6biVB2OhoB0vSaAhHQLkpZ7bL2Yh1fZQoaAZHQFEg4uK4x1xoB0uUaAhHQLkpcH5Jsft1fZQoaAZHQHB0g66reZZoB0vraAhHQLkplFOO8011fZQoaAZHQHF9N3KSxJNoB00EAWgIR0C5KY5iiItUdX2UKGgGR0BVdZx7zCk5aAdLumgIR0C5Kaw7LdN4dX2UKGgGR0BwuDPSlWOqaAdNDAFoCEdAuSm8fwI+n3V9lChoBkdAce5RaX8fm2gHTRMBaAhHQLkp0R5C4SZ1fZQoaAZHQHJ5fWxyGSJoB0vbaAhHQLkp9ZntfHB1fZQoaAZHQHHVQV0tAcFoB0vMaAhHQLkqNTRYzSF1fZQoaAZHQHOK48QqZtxoB0v1aAhHQLkqU/JvHcV1fZQoaAZHQHJo5DArQPZoB0vaaAhHQLkqV/GVAzJ1fZQoaAZHQHFhwaBI4ERoB0vTaAhHQLkqfxREWqN1fZQoaAZHQHJMwwK0D2doB00JAWgIR0C5KoWmLtNSdX2UKGgGR0BvM4SrYGt7aAdL3GgIR0C5KpAN9YwJdX2UKGgGR0BxPB2wFC9iaAdNBAFoCEdAuSqfOVxCIHV9lChoBkdAcYl65Xlr/WgHTQIBaAhHQLkqwFm4Ajp1fZQoaAZHQHESUN4JNTNoB0vOaAhHQLkq2awD/2l1fZQoaAZHQG1uiJXQtz1oB0vUaAhHQLkq2ZYPoV51fZQoaAZHQHIzOvECNjtoB0vsaAhHQLkrBOyVv/B1fZQoaAZHQHLCoBq9GqhoB0vGaAhHQLkrIY7JW/91fZQoaAZHQHLn4Iv8IiVoB0v2aAhHQLkrJQxesxR1fZQoaAZHQHAqAIQe3hJoB0vbaAhHQLkrJG2TgVJ1fZQoaAZHQHHeI3Ns3yZoB0vIaAhHQLkrPvZAY511fZQoaAZHQHFyMw+MZP5oB0vIaAhHQLkrTZmqYJF1fZQoaAZHQHEqygGr0atoB0vMaAhHQLkrk/BnBcl1fZQoaAZHQHFqeFtbcGloB0vZaAhHQLkrrZm7J4l1fZQoaAZHQHGCMMNMGotoB0vUaAhHQLkrtXBP9DR1fZQoaAZHQHF9x8x9G7VoB0vuaAhHQLkrzzXSSeR1fZQoaAZHQG8U+MQ2/BZoB0vYaAhHQLkr2JnQID51fZQoaAZHQHNUnjU/fO5oB0vmaAhHQLkr95yU9p11fZQoaAZHQHIO2VeKKpFoB0vMaAhHQLksEqQRwqB1fZQoaAZHQHHrwtrbg0loB0vjaAhHQLksQUZvUBp1fZQoaAZHQGNNj3dsSChoB03oA2gIR0C5LIIddVvNdX2UKGgGR0Bxi9qJuVHGaAdL42gIR0C5LITi0fHQdX2UKGgGR0Bx9Rsxfv4NaAdL3WgIR0C5LI9vjwQUdX2UKGgGR0BwL8scyWRjaAdL1GgIR0C5LJYLofSydX2UKGgGR0Byz2EXcgyNaAdL2mgIR0C5LJfDLr5ZdX2UKGgGR0BzDl5TqB3BaAdNAQFoCEdAuSzNg3Lmp3V9lChoBkdAcjpwNb1RL2gHS/loCEdAuSzTo6jnFHV9lChoBkdAcu57mdRR/GgHS85oCEdAuSzi1MM7VHV9lChoBkdAcYL/zJ6ppGgHS8xoCEdAuSz424uscXV9lChoBkdAcCrljVhCt2gHS+loCEdAuSz9S3solXV9lChoBkdAcRj7voePrGgHS8poCEdAuS0WRoysS3V9lChoBkdAc2OMjeKsMmgHS99oCEdAuS0v3WWhRXV9lChoBkdAcFMvV3EAHWgHS9loCEdAuS1JaSs8xXV9lChoBkdAcBACZWq95GgHS8doCEdAuS1HPv8ZUHV9lChoBkdAcnipzcRDkWgHS+BoCEdAuS1lfx+a0HV9lChoBkdAcNm31BdD6WgHS+doCEdAuS2IqpcX33V9lChoBkdAcV7nIQvpQmgHS9ZoCEdAuS2LMTviLnV9lChoBkdAcdQYIBzV+mgHS8hoCEdAuS2m7wrlNnV9lChoBkdAcYhjDKoybmgHS8xoCEdAuS2zszEaVHV9lChoBkdAcHJLLIPsiWgHS9FoCEdAuS2/NNahYnV9lChoBkdAchmXJYDDCWgHS/loCEdAuS31u63AmHV9lChoBkdAc0wrEtNBW2gHS/1oCEdAuS4qwJPZZnV9lChoBkdAcjAvHtF8X2gHS+9oCEdAuS42xHG0eHV9lChoBkdAcNINFjNILGgHS9doCEdAuS5CL61stXV9lChoBkdAc+BIMjNY82gHS8doCEdAuS5JGnXNDHV9lChoBkdAcSA1baAWi2gHS/NoCEdAuS5Shxo7FXV9lChoBkdAcGQ0jkdWAGgHS99oCEdAuS5w2eg+QnV9lChoBkdAcfmOavzOHGgHS8hoCEdAuS6I7vG6w3V9lChoBkdAbqvVlPJq7GgHS9NoCEdAuS6Pw5NoJ3V9lChoBkdAbgJXnyNGVmgHS+poCEdAuS6X7tReknV9lChoBkdAcm72mYSg5GgHS+FoCEdAuS6dqASWaHVlLg=="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 612, "observation_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVdgIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoCIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoESiWCAAAAAAAAAABAQEBAQEBAZRoFUsIhZRoGXSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBEoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaAtLCIWUaBl0lFKUjARoaWdolGgRKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgLSwiFlGgZdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=", "dtype": "float32", "bounded_below": "[ True True True True True True True True]", "bounded_above": "[ True True True True True True True True]", "_shape": [8], "low": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "low_repr": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high_repr": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.discrete.Discrete'>", ":serialized:": "gAWV2wAAAAAAAACMGWd5bW5hc2l1bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIBAAAAAAAAACUhpRSlIwFc3RhcnSUaAhoDkMIAAAAAAAAAACUhpRSlIwGX3NoYXBllCmMBWR0eXBllGgOjApfbnBfcmFuZG9tlE51Yi4=", "n": "4", "start": "0", "_shape": [], "dtype": "int64", "_np_random": null}, "n_envs": 32, "n_steps": 1024, "gamma": 0.999, "gae_lambda": 0.98, "ent_coef": 0.01, "vf_coef": 0.5, "max_grad_norm": 0.5, "batch_size": 128, "n_epochs": 2, "clip_range": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz/JmZmZmZmahZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "clip_range_vf": null, "normalize_advantage": true, "target_kl": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz8zqSowVTJhhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "system_info": {"OS": "Linux-6.1.58+-x86_64-with-glibc2.35 # 1 SMP PREEMPT_DYNAMIC Sat Nov 18 15:31:17 UTC 2023", "Python": "3.10.12", "Stable-Baselines3": "2.0.0a5", "PyTorch": "2.1.0+cu121", "GPU Enabled": "False", "Numpy": "1.25.2", "Cloudpickle": "2.2.1", "Gymnasium": "0.28.1", "OpenAI Gym": "0.25.2"}}
ppo-LunarLander-v2.zip CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cc49323d2872efcc7c4368272b806c496b4fc8a0d79af4533b88609252c8c823
3
- size 145005
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9710adef7a2ddd1783511540c0be15d511d99415ca649b3f412d162ead1d18f2
3
+ size 148161
ppo-LunarLander-v2/data CHANGED
@@ -4,54 +4,54 @@
4
  ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
5
  "__module__": "stable_baselines3.common.policies",
6
  "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
7
- "__init__": "<function ActorCriticPolicy.__init__ at 0x7da8e4a2caf0>",
8
- "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7da8e4a2cb80>",
9
- "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7da8e4a2cc10>",
10
- "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7da8e4a2cca0>",
11
- "_build": "<function ActorCriticPolicy._build at 0x7da8e4a2cd30>",
12
- "forward": "<function ActorCriticPolicy.forward at 0x7da8e4a2cdc0>",
13
- "extract_features": "<function ActorCriticPolicy.extract_features at 0x7da8e4a2ce50>",
14
- "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7da8e4a2cee0>",
15
- "_predict": "<function ActorCriticPolicy._predict at 0x7da8e4a2cf70>",
16
- "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7da8e4a2d000>",
17
- "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7da8e4a2d090>",
18
- "predict_values": "<function ActorCriticPolicy.predict_values at 0x7da8e4a2d120>",
19
  "__abstractmethods__": "frozenset()",
20
- "_abc_impl": "<_abc._abc_data object at 0x7da8e49cd200>"
21
  },
22
  "verbose": 1,
23
  "policy_kwargs": {},
24
- "num_timesteps": 11600,
25
- "_total_timesteps": 1000000,
26
  "_num_timesteps_at_start": 0,
27
  "seed": null,
28
  "action_noise": null,
29
- "start_time": 1709647256978322952,
30
  "learning_rate": 0.0003,
31
  "tensorboard_log": null,
32
  "_last_obs": {
33
  ":type:": "<class 'numpy.ndarray'>",
34
- ":serialized:": "gAWVdQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYAAgAAAAAAAE2mKj608XI/edGmPtOt6L6hqEU+MilBPQAAAAAAAAAAZn2lPa6htLrugxW5m9V9tmJkYTrzwCw4AACAPwAAgD8wn1S+rv2RvLUO/bodYjC5PqoCPi9oGjoAAIA/AACAP5oeZz5ndbw+6D9UvaHInb6npak9EFMgvQAAAAAAAAAAmtIwvvRDrT9CrBa/ZbbbvuPLHL5cASe+AAAAAAAAAACai9Y9V/ZMPM4Llr7wwfq9VQghvWbPGb0AAAAAAAAAAOZlWz1F8rQ/1is+P0AbgL3yCMy8It4IPQAAAAAAAAAAACuyPXvKmbpQl7g622MFtlvB0roazNm5AAAAAAAAgD+NkBY+7Nvsu1rZAb6WlCo9uhtovQKcCj4AAIA/AACAP01VUz0UfJS6ohDKNIEdXTAYqK25jXHXswAAgD8AAIA/jTKnvQp6aDwbcEM+UdttvhIACT0yFjG9AAAAAAAAAADgOiM+1MGIvAhyATx+TZ26FunrvUzDgbsAAIA/AACAP8Bs1j3hjKy6rQuhspujTTB4nNU4bsewMwAAgD8AAIA/gL5PPpyBED0umOC3N+oit3kBpz6OgJ82AACAPwAAgD86uKQ+lPPFPh2gZL0ck2++j/bOPZFflLwAAAAAAAAAAKBrKD6UqIe8DSSRue0RR7ak2O29l/m1OAAAgD8AAIA/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksQSwiGlIwBQ5R0lFKULg=="
35
  },
36
  "_last_episode_starts": {
37
  ":type:": "<class 'numpy.ndarray'>",
38
- ":serialized:": "gAWVgwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSxCFlIwBQ5R0lFKULg=="
39
  },
40
  "_last_original_obs": null,
41
  "_episode_num": 0,
42
  "use_sde": false,
43
  "sde_sample_freq": -1,
44
- "_current_progress_remaining": -0.015808000000000044,
45
  "_stats_window_size": 100,
46
  "ep_info_buffer": {
47
  ":type:": "<class 'collections.deque'>",
48
- ":serialized:": "gAWVPgMAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQEYl2VVxS52MAWyUS8CMAXSUR0CiPBV/Ue+3dX2UKGgGR0BvwsiY9gWraAdL52gIR0CiPMTBRAKOdX2UKGgGR0BxdY0aZQYUaAdL6GgIR0CiPMm2LHdXdX2UKGgGR0BzDnUc4o7WaAdL6GgIR0CiPMmYKIBSdX2UKGgGR0Bx3/qGDcubaAdL6WgIR0CiPM39zfaYdX2UKGgGR0BwzCoZQ53laAdL9WgIR0CiPQHg5zYFdX2UKGgGR0Bwl5VaOgg6aAdL+mgIR0CiPReIuXeFdX2UKGgGR0BwYiBEroW6aAdNCQFoCEdAoj1T15B1LnV9lChoBkdAcQJoyKvV3GgHTQsBaAhHQKI9WfgaWHF1fZQoaAZHQHJiqn3ta6loB00OAWgIR0CiPWdYfW+XdX2UKGgGR0BwmDfqHGjsaAdNEwFoCEdAoj14+yJKrnV9lChoBkdAcnWcTakAP2gHTWEBaAhHQKI+lLQokRl1fZQoaAZHQG0rs7+1jRVoB01jAWgIR0CiPpvy08eTdX2UKGgGR0Bx+B6eGwiaaAdL7GgIR0CiP8MVLzwudX2UKGgGR0BvQm9g4OtoaAdL12gIR0CiQM1DjR2KdX2UKGgGR0BwUlJpWV/uaAdL/WgIR0CiQSbe/Ho6dX2UKGgGR0BttUB8x9G7aAdL6WgIR0CiQUEmY0EYdX2UKGgGR0BxAQona37UaAdNHgFoCEdAokFowoLG73V9lChoBkdAcwjSJCSid2gHS/9oCEdAokF2L3sXznV9lChoBkdAcUJsenyd4GgHTQcBaAhHQKJBn1nM+vB1fZQoaAZHQHH3GOZLIxRoB00lAWgIR0CiQcUEovzwdX2UKGgGR0BwqVNGmUGFaAdL9GgIR0CiQtS8an76dX2UKGgGR0BwMxT6zmfXaAdL+mgIR0CiQupmVZ9vdX2UKGgGR0BwGVOclPadaAdNAQFoCEdAokVN0knkUHV9lChoBkdAcZJNbTtsvmgHS9VoCEdAokWjgVGkOHV9lChoBkdAcQVypaRp12gHS9hoCEdAokY3yGzrvHVlLg=="
49
  },
50
  "ep_success_buffer": {
51
  ":type:": "<class 'collections.deque'>",
52
  ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
53
  },
54
- "_n_updates": 310,
55
  "observation_space": {
56
  ":type:": "<class 'gymnasium.spaces.box.Box'>",
57
  ":serialized:": "gAWVdgIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoCIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoESiWCAAAAAAAAAABAQEBAQEBAZRoFUsIhZRoGXSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBEoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaAtLCIWUaBl0lFKUjARoaWdolGgRKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgLSwiFlGgZdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=",
@@ -76,15 +76,15 @@
76
  "dtype": "int64",
77
  "_np_random": null
78
  },
79
- "n_envs": 16,
80
- "n_steps": 2048,
81
- "gamma": 0.99,
82
- "gae_lambda": 0.95,
83
- "ent_coef": 0.0,
84
  "vf_coef": 0.5,
85
  "max_grad_norm": 0.5,
86
- "batch_size": 64,
87
- "n_epochs": 10,
88
  "clip_range": {
89
  ":type:": "<class 'function'>",
90
  ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz/JmZmZmZmahZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"
 
4
  ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
5
  "__module__": "stable_baselines3.common.policies",
6
  "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
7
+ "__init__": "<function ActorCriticPolicy.__init__ at 0x7d4a72346c20>",
8
+ "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7d4a72346cb0>",
9
+ "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7d4a72346d40>",
10
+ "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7d4a72346dd0>",
11
+ "_build": "<function ActorCriticPolicy._build at 0x7d4a72346e60>",
12
+ "forward": "<function ActorCriticPolicy.forward at 0x7d4a72346ef0>",
13
+ "extract_features": "<function ActorCriticPolicy.extract_features at 0x7d4a72346f80>",
14
+ "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7d4a72347010>",
15
+ "_predict": "<function ActorCriticPolicy._predict at 0x7d4a723470a0>",
16
+ "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7d4a72347130>",
17
+ "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7d4a723471c0>",
18
+ "predict_values": "<function ActorCriticPolicy.predict_values at 0x7d4a72347250>",
19
  "__abstractmethods__": "frozenset()",
20
+ "_abc_impl": "<_abc._abc_data object at 0x7d4a722eda80>"
21
  },
22
  "verbose": 1,
23
  "policy_kwargs": {},
24
+ "num_timesteps": 10027008,
25
+ "_total_timesteps": 10000000,
26
  "_num_timesteps_at_start": 0,
27
  "seed": null,
28
  "action_noise": null,
29
+ "start_time": 1709710391571335951,
30
  "learning_rate": 0.0003,
31
  "tensorboard_log": null,
32
  "_last_obs": {
33
  ":type:": "<class 'numpy.ndarray'>",
34
+ ":serialized:": "gAWVdQQAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYABAAAAAAAAE3kPz177rG6kMJ7vT4B97ppESW6cxGEOwAAgD8AAIA/2oUrPmFqOj8488U90WP8vhvXIz7upRi9AAAAAAAAAAAz8hK9PRhWPPUS2b2PNme+cLHEvOIiB70AAAAAAAAAAEbQbr6D0Cs/hYt0PpLP4r57niS+Y0kiPgAAAAAAAAAAQOrcPUozSD9dKuQ9w2kLv1I0FD6iciK9AAAAAAAAAACal1+8xBf+PeXTzT0AC7e+ngbDPTKvg70AAAAAAAAAAI2AjL02BjC8qpZ6vE5psLvb7ou94PCRvAAAgD8AAIA/M4HzvHFZKDpI2nO54gmQMxY+Lry7YJc4AACAPwAAgD9mWOQ9fp5XP+1fvD0TCva+K5jyPd6VG7sAAAAAAAAAAICqHj3BDus9f+QGvrJSMb6fsoG9LPMnOgAAAAAAAAAAQO2JPUgZibpW70Azb99vLmMPCTulCLWzAACAPwAAgD+mAz4+/e4xP9t3aT5n/w+/NP+SPgqjkTwAAAAAAAAAAOZuyb1+soc/u7O9vZ3eBr8Zcxm+kwxVPQAAAAAAAAAAY6qQPv7ROj8B6iA8jvL/vlRqkD5DFsO9AAAAAAAAAACaOY+7exKXuoi3LTNRc+evhhp+uuZG0rMAAIA/AACAP8207Lt7UMk7TaBavlV1IL7l68m9/fBqPgAAgD8AAAAAM6P2vWjCp7zeqme8Dc6QvTCxnjxw0Dy9AACAPwAAgD9mAM0818sYPHY7QL66+R++hgBvvX+XpL0AAAAAAAAAAM0ci7qkwH25843cM79i/C7VKU464pG2swAAgD8AAIA/ABYgPDRn1j7DZWQ9V9/Zvu1VqT0TfGQ9AAAAAAAAAADtkDm+zA+BPgx/Gj6q76S+cmZgvMaTXD0AAAAAAAAAAMB6sj3Lpcw9lacQvgBqbb6x/4O8BzakvAAAAAAAAAAAgDd1PUWfyDxDBjS+Q2iAviyezbyRRyK8AAAAAAAAAADNKfq8zPcjPsiatjwEBZ2+OFimPYHLxboAAAAAAAAAADP//Lvh8I26NiS5NfEppDAKb9G6hfz9tAAAgD8AAIA/zfObvRnmaz7z5Jw+grDNvo4Dbz7bZ3U8AAAAAAAAAABmmn28rrWjuuEEqrezWp6y+s+xOUzXwzYAAIA/AACAP5oHVL0J6wA/XrYqPvdo9b4Xo109upz8PQAAAAAAAAAAZhaNu1KI9Lmagjs4qxBRMiI3pbsghF+3AACAPwAAgD/NVkg+x6nHPkMw970Sery+lGkAPofdxL0AAAAAAAAAAH0/nD4z6GE/1fR2PvogGL8Q/L0+yvwbvQAAAAAAAAAAZm4ivFiKVD9GBrI9verrvm9yPL2a+5w9AAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSyBLCIaUjAFDlHSUUpQu"
35
  },
36
  "_last_episode_starts": {
37
  ":type:": "<class 'numpy.ndarray'>",
38
+ ":serialized:": "gAWVkwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlIwFbnVtcHmUjAVkdHlwZZSTlIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksghZSMAUOUdJRSlC4="
39
  },
40
  "_last_original_obs": null,
41
  "_episode_num": 0,
42
  "use_sde": false,
43
  "sde_sample_freq": -1,
44
+ "_current_progress_remaining": -0.0027007999999999477,
45
  "_stats_window_size": 100,
46
  "ep_info_buffer": {
47
  ":type:": "<class 'collections.deque'>",
48
+ ":serialized:": "gAWV6gsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQHI97fP5YYCMAWyUS+mMAXSUR0C5JXplz2eydX2UKGgGR0Byn+ISDh99aAdL5GgIR0C5JagUtZmqdX2UKGgGR0BwcxHI6r/9aAdLxWgIR0C5JaRYeT3ZdX2UKGgGR0Bxsm+GoJiRaAdLx2gIR0C5Jc4rrgO0dX2UKGgGR0ByBzhS9/SZaAdL32gIR0C5JeMnVoYfdX2UKGgGR0Bx91/4IrvtaAdL1WgIR0C5Je0ug6EKdX2UKGgGR0ByR6YfGMn7aAdL9WgIR0C5JfBxcVxkdX2UKGgGR0ByUFD0Dlo2aAdL4GgIR0C5Je01Mue0dX2UKGgGR0Bx9kiY9gWraAdL82gIR0C5JjBh2GIsdX2UKGgGR0By0tD+irT6aAdNBwFoCEdAuSZHaSLZSXV9lChoBkdAcxAiHZbpvGgHS8xoCEdAuSaEMG5c1XV9lChoBkdAcO9YtQKrrGgHS8poCEdAuScBI9TxXnV9lChoBkdAcjEpoK2KEWgHS+hoCEdAuScWIInjQ3V9lChoBkdAbzsYb83uNWgHS9doCEdAuScUfEGZ/nV9lChoBkdAcTvcMVk+YGgHS9FoCEdAuScryz5XVHV9lChoBkdAcUJBvrGBF2gHS9VoCEdAuSc5mPHT7XV9lChoBkdAcRb3dbgTAWgHS+loCEdAuSdXHmzSkXV9lChoBkdAb5gvDgqEvmgHS+toCEdAuSd4iFCb+nV9lChoBkdAcvSRhc7hemgHS/toCEdAuSeCu1WsBHV9lChoBkdAcu7qaPS2IGgHS89oCEdAuSeGZc9nsnV9lChoBkdAc7QCJ40Mw2gHS+doCEdAuSehTefqYHV9lChoBkdAb/hBGhEjPmgHS+FoCEdAuSewoqkM1HV9lChoBkdAciyjghr302gHS8xoCEdAuSfb7pFCs3V9lChoBkdAcnd2AoXsPmgHS9toCEdAuSfdje9BbHV9lChoBkdAcksuK4x1xWgHS9toCEdAuSgCekHlfnV9lChoBkdAcU5sBhhH9WgHS+NoCEdAuSgV2s7uD3V9lChoBkdAcItWCEpRXWgHS+VoCEdAuSg0JVsDXHV9lChoBkdAct7dmQKa5WgHS+loCEdAuSh6/j81oHV9lChoBkdAcoiOpbUwz2gHS9JoCEdAuSiT8rI5pHV9lChoBkdAcvnvFm4Aj2gHS9JoCEdAuSiilCTlk3V9lChoBkdAc6l9yLhrFmgHTQ0BaAhHQLkorRpUPxx1fZQoaAZHQG63mNrCWNZoB0vXaAhHQLko0iiZfD11fZQoaAZHQHITOPJaJRBoB0vlaAhHQLko5SlnAZd1fZQoaAZHQHJdnyEtdzJoB0vOaAhHQLko7UJfICF1fZQoaAZHQHLQPK+zt1JoB0vfaAhHQLko9DJEH+t1fZQoaAZHQG7crTYukDZoB0vVaAhHQLkpA/RE4Nt1fZQoaAZHQG6IQr+YMORoB0vbaAhHQLkpR8q4H5d1fZQoaAZHQHI6biVB2OhoB0vSaAhHQLkpZ7bL2Yh1fZQoaAZHQFEg4uK4x1xoB0uUaAhHQLkpcH5Jsft1fZQoaAZHQHB0g66reZZoB0vraAhHQLkplFOO8011fZQoaAZHQHF9N3KSxJNoB00EAWgIR0C5KY5iiItUdX2UKGgGR0BVdZx7zCk5aAdLumgIR0C5Kaw7LdN4dX2UKGgGR0BwuDPSlWOqaAdNDAFoCEdAuSm8fwI+n3V9lChoBkdAce5RaX8fm2gHTRMBaAhHQLkp0R5C4SZ1fZQoaAZHQHJ5fWxyGSJoB0vbaAhHQLkp9ZntfHB1fZQoaAZHQHHVQV0tAcFoB0vMaAhHQLkqNTRYzSF1fZQoaAZHQHOK48QqZtxoB0v1aAhHQLkqU/JvHcV1fZQoaAZHQHJo5DArQPZoB0vaaAhHQLkqV/GVAzJ1fZQoaAZHQHFhwaBI4ERoB0vTaAhHQLkqfxREWqN1fZQoaAZHQHJMwwK0D2doB00JAWgIR0C5KoWmLtNSdX2UKGgGR0BvM4SrYGt7aAdL3GgIR0C5KpAN9YwJdX2UKGgGR0BxPB2wFC9iaAdNBAFoCEdAuSqfOVxCIHV9lChoBkdAcYl65Xlr/WgHTQIBaAhHQLkqwFm4Ajp1fZQoaAZHQHESUN4JNTNoB0vOaAhHQLkq2awD/2l1fZQoaAZHQG1uiJXQtz1oB0vUaAhHQLkq2ZYPoV51fZQoaAZHQHIzOvECNjtoB0vsaAhHQLkrBOyVv/B1fZQoaAZHQHLCoBq9GqhoB0vGaAhHQLkrIY7JW/91fZQoaAZHQHLn4Iv8IiVoB0v2aAhHQLkrJQxesxR1fZQoaAZHQHAqAIQe3hJoB0vbaAhHQLkrJG2TgVJ1fZQoaAZHQHHeI3Ns3yZoB0vIaAhHQLkrPvZAY511fZQoaAZHQHFyMw+MZP5oB0vIaAhHQLkrTZmqYJF1fZQoaAZHQHEqygGr0atoB0vMaAhHQLkrk/BnBcl1fZQoaAZHQHFqeFtbcGloB0vZaAhHQLkrrZm7J4l1fZQoaAZHQHGCMMNMGotoB0vUaAhHQLkrtXBP9DR1fZQoaAZHQHF9x8x9G7VoB0vuaAhHQLkrzzXSSeR1fZQoaAZHQG8U+MQ2/BZoB0vYaAhHQLkr2JnQID51fZQoaAZHQHNUnjU/fO5oB0vmaAhHQLkr95yU9p11fZQoaAZHQHIO2VeKKpFoB0vMaAhHQLksEqQRwqB1fZQoaAZHQHHrwtrbg0loB0vjaAhHQLksQUZvUBp1fZQoaAZHQGNNj3dsSChoB03oA2gIR0C5LIIddVvNdX2UKGgGR0Bxi9qJuVHGaAdL42gIR0C5LITi0fHQdX2UKGgGR0Bx9Rsxfv4NaAdL3WgIR0C5LI9vjwQUdX2UKGgGR0BwL8scyWRjaAdL1GgIR0C5LJYLofSydX2UKGgGR0Byz2EXcgyNaAdL2mgIR0C5LJfDLr5ZdX2UKGgGR0BzDl5TqB3BaAdNAQFoCEdAuSzNg3Lmp3V9lChoBkdAcjpwNb1RL2gHS/loCEdAuSzTo6jnFHV9lChoBkdAcu57mdRR/GgHS85oCEdAuSzi1MM7VHV9lChoBkdAcYL/zJ6ppGgHS8xoCEdAuSz424uscXV9lChoBkdAcCrljVhCt2gHS+loCEdAuSz9S3solXV9lChoBkdAcRj7voePrGgHS8poCEdAuS0WRoysS3V9lChoBkdAc2OMjeKsMmgHS99oCEdAuS0v3WWhRXV9lChoBkdAcFMvV3EAHWgHS9loCEdAuS1JaSs8xXV9lChoBkdAcBACZWq95GgHS8doCEdAuS1HPv8ZUHV9lChoBkdAcnipzcRDkWgHS+BoCEdAuS1lfx+a0HV9lChoBkdAcNm31BdD6WgHS+doCEdAuS2IqpcX33V9lChoBkdAcV7nIQvpQmgHS9ZoCEdAuS2LMTviLnV9lChoBkdAcdQYIBzV+mgHS8hoCEdAuS2m7wrlNnV9lChoBkdAcYhjDKoybmgHS8xoCEdAuS2zszEaVHV9lChoBkdAcHJLLIPsiWgHS9FoCEdAuS2/NNahYnV9lChoBkdAchmXJYDDCWgHS/loCEdAuS31u63AmHV9lChoBkdAc0wrEtNBW2gHS/1oCEdAuS4qwJPZZnV9lChoBkdAcjAvHtF8X2gHS+9oCEdAuS42xHG0eHV9lChoBkdAcNINFjNILGgHS9doCEdAuS5CL61stXV9lChoBkdAc+BIMjNY82gHS8doCEdAuS5JGnXNDHV9lChoBkdAcSA1baAWi2gHS/NoCEdAuS5Shxo7FXV9lChoBkdAcGQ0jkdWAGgHS99oCEdAuS5w2eg+QnV9lChoBkdAcfmOavzOHGgHS8hoCEdAuS6I7vG6w3V9lChoBkdAbqvVlPJq7GgHS9NoCEdAuS6Pw5NoJ3V9lChoBkdAbgJXnyNGVmgHS+poCEdAuS6X7tReknV9lChoBkdAcm72mYSg5GgHS+FoCEdAuS6dqASWaHVlLg=="
49
  },
50
  "ep_success_buffer": {
51
  ":type:": "<class 'collections.deque'>",
52
  ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
53
  },
54
+ "_n_updates": 612,
55
  "observation_space": {
56
  ":type:": "<class 'gymnasium.spaces.box.Box'>",
57
  ":serialized:": "gAWVdgIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoCIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoESiWCAAAAAAAAAABAQEBAQEBAZRoFUsIhZRoGXSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBEoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaAtLCIWUaBl0lFKUjARoaWdolGgRKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgLSwiFlGgZdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=",
 
76
  "dtype": "int64",
77
  "_np_random": null
78
  },
79
+ "n_envs": 32,
80
+ "n_steps": 1024,
81
+ "gamma": 0.999,
82
+ "gae_lambda": 0.98,
83
+ "ent_coef": 0.01,
84
  "vf_coef": 0.5,
85
  "max_grad_norm": 0.5,
86
+ "batch_size": 128,
87
+ "n_epochs": 2,
88
  "clip_range": {
89
  ":type:": "<class 'function'>",
90
  ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz/JmZmZmZmahZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"
ppo-LunarLander-v2/policy.optimizer.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e80d46bf9b69ff12f459b1f2e61c4841eba5e735cf9c9b59e41f28ef055ab3f6
3
- size 88362
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d252b2ad2ba316d6cd6b3ee22e29aa55036d82d754d3cf10ab78da9e750491e1
3
+ size 87978
ppo-LunarLander-v2/policy.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:87062ca59d0cda3f06cab51ba4be75560185e1c2d4ee736046da7d41027629c2
3
- size 43762
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:346e8a61edfceed4e48bfc1fe68431a15cddd0dfe1a448984e332704de9c124a
3
+ size 43634
ppo-LunarLander-v2/system_info.txt CHANGED
@@ -2,7 +2,7 @@
2
  - Python: 3.10.12
3
  - Stable-Baselines3: 2.0.0a5
4
  - PyTorch: 2.1.0+cu121
5
- - GPU Enabled: True
6
  - Numpy: 1.25.2
7
  - Cloudpickle: 2.2.1
8
  - Gymnasium: 0.28.1
 
2
  - Python: 3.10.12
3
  - Stable-Baselines3: 2.0.0a5
4
  - PyTorch: 2.1.0+cu121
5
+ - GPU Enabled: False
6
  - Numpy: 1.25.2
7
  - Cloudpickle: 2.2.1
8
  - Gymnasium: 0.28.1
replay.mp4 CHANGED
Binary files a/replay.mp4 and b/replay.mp4 differ
 
results.json CHANGED
@@ -1 +1 @@
1
- {"mean_reward": 223.34998009999998, "std_reward": 80.58648939532951, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2024-03-05T14:14:54.612048"}
 
1
+ {"mean_reward": 279.00388269999996, "std_reward": 15.825199841561911, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2024-03-06T09:23:31.386105"}