Initial commit
Browse files- README.md +1 -1
- a2c-PandaReachDense-v2.zip +2 -2
- a2c-PandaReachDense-v2/data +10 -10
- a2c-PandaReachDense-v2/policy.optimizer.pth +2 -2
- a2c-PandaReachDense-v2/policy.pth +2 -2
- a2c-PandaReachDense-v2/system_info.txt +2 -2
- config.json +1 -1
- replay.mp4 +0 -0
- results.json +1 -1
- vec_normalize.pkl +1 -1
README.md
CHANGED
|
@@ -16,7 +16,7 @@ model-index:
|
|
| 16 |
type: PandaReachDense-v2
|
| 17 |
metrics:
|
| 18 |
- type: mean_reward
|
| 19 |
-
value: -3.
|
| 20 |
name: mean_reward
|
| 21 |
verified: false
|
| 22 |
---
|
|
|
|
| 16 |
type: PandaReachDense-v2
|
| 17 |
metrics:
|
| 18 |
- type: mean_reward
|
| 19 |
+
value: -3.78 +/- 0.82
|
| 20 |
name: mean_reward
|
| 21 |
verified: false
|
| 22 |
---
|
a2c-PandaReachDense-v2.zip
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:81f5dea135eef54b88486818628c7038fde718bf7bb3054f36b2cc10a88b3b37
|
| 3 |
+
size 107808
|
a2c-PandaReachDense-v2/data
CHANGED
|
@@ -4,9 +4,9 @@
|
|
| 4 |
":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
|
| 5 |
"__module__": "stable_baselines3.common.policies",
|
| 6 |
"__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
| 7 |
-
"__init__": "<function MultiInputActorCriticPolicy.__init__ at
|
| 8 |
"__abstractmethods__": "frozenset()",
|
| 9 |
-
"_abc_impl": "<_abc._abc_data object at
|
| 10 |
},
|
| 11 |
"verbose": 1,
|
| 12 |
"policy_kwargs": {
|
|
@@ -24,7 +24,7 @@
|
|
| 24 |
"_num_timesteps_at_start": 0,
|
| 25 |
"seed": null,
|
| 26 |
"action_noise": null,
|
| 27 |
-
"start_time":
|
| 28 |
"learning_rate": 0.0007,
|
| 29 |
"tensorboard_log": null,
|
| 30 |
"lr_schedule": {
|
|
@@ -33,10 +33,10 @@
|
|
| 33 |
},
|
| 34 |
"_last_obs": {
|
| 35 |
":type:": "<class 'collections.OrderedDict'>",
|
| 36 |
-
":serialized:": "
|
| 37 |
-
"achieved_goal": "[[0.
|
| 38 |
-
"desired_goal": "[[
|
| 39 |
-
"observation": "[[
|
| 40 |
},
|
| 41 |
"_last_episode_starts": {
|
| 42 |
":type:": "<class 'numpy.ndarray'>",
|
|
@@ -44,9 +44,9 @@
|
|
| 44 |
},
|
| 45 |
"_last_original_obs": {
|
| 46 |
":type:": "<class 'collections.OrderedDict'>",
|
| 47 |
-
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////
|
| 48 |
"achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]",
|
| 49 |
-
"desired_goal": "[[-0.
|
| 50 |
"observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"
|
| 51 |
},
|
| 52 |
"_episode_num": 0,
|
|
@@ -56,7 +56,7 @@
|
|
| 56 |
"_stats_window_size": 100,
|
| 57 |
"ep_info_buffer": {
|
| 58 |
":type:": "<class 'collections.deque'>",
|
| 59 |
-
":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////
|
| 60 |
},
|
| 61 |
"ep_success_buffer": {
|
| 62 |
":type:": "<class 'collections.deque'>",
|
|
|
|
| 4 |
":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
|
| 5 |
"__module__": "stable_baselines3.common.policies",
|
| 6 |
"__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
| 7 |
+
"__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7f528cd53d00>",
|
| 8 |
"__abstractmethods__": "frozenset()",
|
| 9 |
+
"_abc_impl": "<_abc._abc_data object at 0x7f528cd58540>"
|
| 10 |
},
|
| 11 |
"verbose": 1,
|
| 12 |
"policy_kwargs": {
|
|
|
|
| 24 |
"_num_timesteps_at_start": 0,
|
| 25 |
"seed": null,
|
| 26 |
"action_noise": null,
|
| 27 |
+
"start_time": 1686289531784048596,
|
| 28 |
"learning_rate": 0.0007,
|
| 29 |
"tensorboard_log": null,
|
| 30 |
"lr_schedule": {
|
|
|
|
| 33 |
},
|
| 34 |
"_last_obs": {
|
| 35 |
":type:": "<class 'collections.OrderedDict'>",
|
| 36 |
+
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAG2C8PnDejz3HpSE/G2C8PnDejz3HpSE/G2C8PnDejz3HpSE/G2C8PnDejz3HpSE/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAA6wlEvwkRWD6hQ7U8hWuXv6+zBz5G36a/sqKMP+oOsD+P+ik/ox3OPpXmhL/2b329lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAAAbYLw+cN6PPcelIT/zDXK8+NKbugP0ATwbYLw+cN6PPcelIT/zDXK8+NKbugP0ATwbYLw+cN6PPcelIT/zDXK8+NKbugP0ATwbYLw+cN6PPcelIT/zDXK8+NKbugP0ATyUaA5LBEsGhpRoEnSUUpR1Lg==",
|
| 37 |
+
"achieved_goal": "[[0.36792073 0.07024848 0.6314358 ]\n [0.36792073 0.07024848 0.6314358 ]\n [0.36792073 0.07024848 0.6314358 ]\n [0.36792073 0.07024848 0.6314358 ]]",
|
| 38 |
+
"desired_goal": "[[-0.76577634 0.21100248 0.02212697]\n [-1.1829687 0.13252138 -1.3036888 ]\n [ 1.0987151 1.3754551 0.6639795 ]\n [ 0.40256986 -1.0382868 -0.06187435]]",
|
| 39 |
+
"observation": "[[ 0.36792073 0.07024848 0.6314358 -0.01477383 -0.00118884 0.00793171]\n [ 0.36792073 0.07024848 0.6314358 -0.01477383 -0.00118884 0.00793171]\n [ 0.36792073 0.07024848 0.6314358 -0.01477383 -0.00118884 0.00793171]\n [ 0.36792073 0.07024848 0.6314358 -0.01477383 -0.00118884 0.00793171]]"
|
| 40 |
},
|
| 41 |
"_last_episode_starts": {
|
| 42 |
":type:": "<class 'numpy.ndarray'>",
|
|
|
|
| 44 |
},
|
| 45 |
"_last_original_obs": {
|
| 46 |
":type:": "<class 'collections.OrderedDict'>",
|
| 47 |
+
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAKr84vPWvDj4O4oo+YDWZvYJG5L3BBmM+Zz6IvU6xx70ndJI9HUTIvTALvj3dyzE+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==",
|
| 48 |
"achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]",
|
| 49 |
+
"desired_goal": "[[-0.01127605 0.1393431 0.2712559 ]\n [-0.07480884 -0.11146261 0.22170545]\n [-0.06652527 -0.09750615 0.07151061]\n [-0.09778617 0.09279478 0.17362924]]",
|
| 50 |
"observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"
|
| 51 |
},
|
| 52 |
"_episode_num": 0,
|
|
|
|
| 56 |
"_stats_window_size": 100,
|
| 57 |
"ep_info_buffer": {
|
| 58 |
":type:": "<class 'collections.deque'>",
|
| 59 |
+
":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIL6UuGcfoDsCUhpRSlIwBbJRLMowBdJRHQKkSvmOEM9d1fZQoaAZoCWgPQwhuisdFtdgQwJSGlFKUaBVLMmgWR0CpEhCuMdcTdX2UKGgGaAloD0MID5iHTPmwEMCUhpRSlGgVSzJoFkdAqRFyP6sQunV9lChoBmgJaA9DCDGzz2OUxw3AlIaUUpRoFUsyaBZHQKkQ+T8pCrt1fZQoaAZoCWgPQwio5JzYQ7sJwJSGlFKUaBVLMmgWR0CpE7bl7tzCdX2UKGgGaAloD0MI46qy74qACsCUhpRSlGgVSzJoFkdAqRMJCOWBz3V9lChoBmgJaA9DCJeo3hrYigHAlIaUUpRoFUsyaBZHQKkSapgCwKV1fZQoaAZoCWgPQwijWkQUk5cHwJSGlFKUaBVLMmgWR0CpEfFnh86WdX2UKGgGaAloD0MIkIZT5uYLEcCUhpRSlGgVSzJoFkdAqRSwIv8IiXV9lChoBmgJaA9DCObpXFFK6BLAlIaUUpRoFUsyaBZHQKkUAlC1JDp1fZQoaAZoCWgPQwhsJAnCFZAJwJSGlFKUaBVLMmgWR0CpE2Pu5SWJdX2UKGgGaAloD0MItrkxPWHJCMCUhpRSlGgVSzJoFkdAqRLq22G7BnV9lChoBmgJaA9DCLZmKy/5vwfAlIaUUpRoFUsyaBZHQKkVsYqoZQ51fZQoaAZoCWgPQwiUF5mAXwMLwJSGlFKUaBVLMmgWR0CpFQOeJ53UdX2UKGgGaAloD0MIQQx07QuoCMCUhpRSlGgVSzJoFkdAqRRlYW+GoXV9lChoBmgJaA9DCACquHGL2QvAlIaUUpRoFUsyaBZHQKkT7EQ5FPV1fZQoaAZoCWgPQwhC6KBLONQSwJSGlFKUaBVLMmgWR0CpFqjSgGr0dX2UKGgGaAloD0MI/RAbLJzkC8CUhpRSlGgVSzJoFkdAqRX7J6po9XV9lChoBmgJaA9DCLIrLSP1HgzAlIaUUpRoFUsyaBZHQKkVXNKyv9t1fZQoaAZoCWgPQwiy9KEL6nsGwJSGlFKUaBVLMmgWR0CpFOO/1xsEdX2UKGgGaAloD0MI88e0No0NEMCUhpRSlGgVSzJoFkdAqReu5e7cwnV9lChoBmgJaA9DCLJkjuVdVQ3AlIaUUpRoFUsyaBZHQKkXARA8jiZ1fZQoaAZoCWgPQwjY8zXLZXMSwJSGlFKUaBVLMmgWR0CpFmKo60Y1dX2UKGgGaAloD0MI1/fhICEqD8CUhpRSlGgVSzJoFkdAqRXpjlPrOnV9lChoBmgJaA9DCE4rhUAuEQXAlIaUUpRoFUsyaBZHQKkYqtU4rBl1fZQoaAZoCWgPQwjHoX4XtmYIwJSGlFKUaBVLMmgWR0CpF/0fozN2dX2UKGgGaAloD0MI0ETY8PQKDsCUhpRSlGgVSzJoFkdAqRdewFC9iHV9lChoBmgJaA9DCDzB/uvcJBTAlIaUUpRoFUsyaBZHQKkW5aOgg5l1fZQoaAZoCWgPQwiFCDiEKpUFwJSGlFKUaBVLMmgWR0CpGabF0gbIdX2UKGgGaAloD0MIuOnPfqSIBcCUhpRSlGgVSzJoFkdAqRj44jrzG3V9lChoBmgJaA9DCK4SLA5nngbAlIaUUpRoFUsyaBZHQKkYWoYNy5t1fZQoaAZoCWgPQwjwMsNGWY8SwJSGlFKUaBVLMmgWR0CpF+GxD9fkdX2UKGgGaAloD0MINrBVgsXhEMCUhpRSlGgVSzJoFkdAqRqh0uDjBHV9lChoBmgJaA9DCAOV8e8zbgrAlIaUUpRoFUsyaBZHQKkZ8/cFhXt1fZQoaAZoCWgPQwhbttYXCY0JwJSGlFKUaBVLMmgWR0CpGVWNWEK3dX2UKGgGaAloD0MItK88SE+RD8CUhpRSlGgVSzJoFkdAqRjcYO2AoXV9lChoBmgJaA9DCAjnU8cqBQ/AlIaUUpRoFUsyaBZHQKkbnKZlWfd1fZQoaAZoCWgPQwgPlxx3SjcQwJSGlFKUaBVLMmgWR0CpGu82zfJndX2UKGgGaAloD0MI56kOuRnOBMCUhpRSlGgVSzJoFkdAqRpQ0sOG03V9lChoBmgJaA9DCBVzEHS0SgnAlIaUUpRoFUsyaBZHQKkZ19xZMcp1fZQoaAZoCWgPQwiBJOzbSQQGwJSGlFKUaBVLMmgWR0CpHJC9Zid8dX2UKGgGaAloD0MIb2b0o+G0EcCUhpRSlGgVSzJoFkdAqRvi0rsjV3V9lChoBmgJaA9DCEC/79+8KBPAlIaUUpRoFUsyaBZHQKkbRJeVs1t1fZQoaAZoCWgPQwivtIzUe4oFwJSGlFKUaBVLMmgWR0CpGst7rs0IdX2UKGgGaAloD0MIqkNuhhvwDMCUhpRSlGgVSzJoFkdAqR2FX5nDi3V9lChoBmgJaA9DCE4pr5XQvRHAlIaUUpRoFUsyaBZHQKkc13jdYXB1fZQoaAZoCWgPQwiEDOTZ5Rv+v5SGlFKUaBVLMmgWR0CpHDkCeVcEdX2UKGgGaAloD0MIxHdi1ouhDcCUhpRSlGgVSzJoFkdAqRu/49HMEHV9lChoBmgJaA9DCPJbdLLUOg/AlIaUUpRoFUsyaBZHQKkegnRb8m91fZQoaAZoCWgPQwgJw4AlV3EGwJSGlFKUaBVLMmgWR0CpHdSHmA9WdX2UKGgGaAloD0MIldOeknMCEcCUhpRSlGgVSzJoFkdAqR02kP+XJHV9lChoBmgJaA9DCCxGXWvvMwfAlIaUUpRoFUsyaBZHQKkcvWz4UN91fZQoaAZoCWgPQwhoBvGBHX8IwJSGlFKUaBVLMmgWR0CpH4S/TLGJdX2UKGgGaAloD0MIiljEsMN4DsCUhpRSlGgVSzJoFkdAqR7W+23KCHV9lChoBmgJaA9DCJnTZTGx2QnAlIaUUpRoFUsyaBZHQKkeOJ6Y3Nt1fZQoaAZoCWgPQwhHVn4ZjEESwJSGlFKUaBVLMmgWR0CpHb+kHlfadX2UKGgGaAloD0MInYGRlzWRFcCUhpRSlGgVSzJoFkdAqSCD9AHE/HV9lChoBmgJaA9DCEm9p3La8wbAlIaUUpRoFUsyaBZHQKkf1hAGB4F1fZQoaAZoCWgPQwh4DI/9LJYIwJSGlFKUaBVLMmgWR0CpHzeqBErodX2UKGgGaAloD0MIldOeknMiDcCUhpRSlGgVSzJoFkdAqR6+e18b73V9lChoBmgJaA9DCKOSOgFN5BLAlIaUUpRoFUsyaBZHQKkiE57PY4B1fZQoaAZoCWgPQwico46OqxEQwJSGlFKUaBVLMmgWR0CpIWbMHKOldX2UKGgGaAloD0MI9NxCVyLQD8CUhpRSlGgVSzJoFkdAqSDJMJx//nV9lChoBmgJaA9DCHWw/s9hrhDAlIaUUpRoFUsyaBZHQKkgUP8Q7Ld1fZQoaAZoCWgPQwjequtQTYkFwJSGlFKUaBVLMmgWR0CpI7/EfkmydX2UKGgGaAloD0MI9Z7KaU+5EMCUhpRSlGgVSzJoFkdAqSMSjafzz3V9lChoBmgJaA9DCImXp3NFyQ7AlIaUUpRoFUsyaBZHQKkidL6DXe51fZQoaAZoCWgPQwgtJctJKJ0GwJSGlFKUaBVLMmgWR0CpIfxeLNwBdX2UKGgGaAloD0MI4X1VLlSOEcCUhpRSlGgVSzJoFkdAqSV34mCyyHV9lChoBmgJaA9DCASsVbsmRBPAlIaUUpRoFUsyaBZHQKkkyxGDtgN1fZQoaAZoCWgPQwj2lnK+2OsQwJSGlFKUaBVLMmgWR0CpJC2ac7QtdX2UKGgGaAloD0MIrFj8prDyDMCUhpRSlGgVSzJoFkdAqSO2SOinHnV9lChoBmgJaA9DCF3cRgN4KwjAlIaUUpRoFUsyaBZHQKknRmKZUkx1fZQoaAZoCWgPQwgD7KNTV34JwJSGlFKUaBVLMmgWR0CpJpluWKMvdX2UKGgGaAloD0MImGpmLQXUEsCUhpRSlGgVSzJoFkdAqSX8J2MbWHV9lChoBmgJaA9DCNECtK1mPQXAlIaUUpRoFUsyaBZHQKklg/IsAed1fZQoaAZoCWgPQwjU7lcBvvsIwJSGlFKUaBVLMmgWR0CpKQAU1yeadX2UKGgGaAloD0MIfzMxXYhVCsCUhpRSlGgVSzJoFkdAqShTLbHp8nV9lChoBmgJaA9DCKyRXWkZeRLAlIaUUpRoFUsyaBZHQKkntZ5iVjZ1fZQoaAZoCWgPQwjaU3JO7EEGwJSGlFKUaBVLMmgWR0CpJz158jRldX2UKGgGaAloD0MIG/M64pDNB8CUhpRSlGgVSzJoFkdAqSq4RNATqXV9lChoBmgJaA9DCM9r7BLVaxDAlIaUUpRoFUsyaBZHQKkqDBSk0rN1fZQoaAZoCWgPQwj0qWOV0pMNwJSGlFKUaBVLMmgWR0CpKW7iZOSGdX2UKGgGaAloD0MINSVZh6PrFMCUhpRSlGgVSzJoFkdAqSj3Dk2gnXV9lChoBmgJaA9DCJVIopdRjA/AlIaUUpRoFUsyaBZHQKksRlGPPs11fZQoaAZoCWgPQwgnM95Wes0BwJSGlFKUaBVLMmgWR0CpK5h2W6bwdX2UKGgGaAloD0MIIa6cvTO6B8CUhpRSlGgVSzJoFkdAqSr6Dyvs7nV9lChoBmgJaA9DCOC9o8aEGA7AlIaUUpRoFUsyaBZHQKkqgURFqi51fZQoaAZoCWgPQwicNXhflQsQwJSGlFKUaBVLMmgWR0CpLUF/x2B8dX2UKGgGaAloD0MI/yPTodMTEcCUhpRSlGgVSzJoFkdAqSyTo2XLNnV9lChoBmgJaA9DCIrHRbWIGBHAlIaUUpRoFUsyaBZHQKkr9ar3j+91fZQoaAZoCWgPQwgaGeQuwpQIwJSGlFKUaBVLMmgWR0CpK3z987ZGdX2UKGgGaAloD0MI9raZCvEoC8CUhpRSlGgVSzJoFkdAqS46AjIJaHV9lChoBmgJaA9DCOPD7GXbaQvAlIaUUpRoFUsyaBZHQKktjDG96C11fZQoaAZoCWgPQwirzf+rjrwTwJSGlFKUaBVLMmgWR0CpLO3LV4HHdX2UKGgGaAloD0MI8iN+xRqOBsCUhpRSlGgVSzJoFkdAqSx04m1IAnV9lChoBmgJaA9DCIF7nj9tRBDAlIaUUpRoFUsyaBZHQKkvOtK7I1d1fZQoaAZoCWgPQwjtSWBzDo4RwJSGlFKUaBVLMmgWR0CpLozz3AVPdX2UKGgGaAloD0MI/gqZK4NqC8CUhpRSlGgVSzJoFkdAqS3uhdt2tHV9lChoBmgJaA9DCPIjfsUa7g/AlIaUUpRoFUsyaBZHQKktdV5KODJ1ZS4="
|
| 60 |
},
|
| 61 |
"ep_success_buffer": {
|
| 62 |
":type:": "<class 'collections.deque'>",
|
a2c-PandaReachDense-v2/policy.optimizer.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:43837dbfce0d45e8d1d929fe62f65f8939d739e887d56b6c4a666f79566e25bd
|
| 3 |
+
size 44606
|
a2c-PandaReachDense-v2/policy.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1e0e6dfec3a218ec47b3dc003732b65460a800695422e42f2b9362b40e0fa69f
|
| 3 |
+
size 45886
|
a2c-PandaReachDense-v2/system_info.txt
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
- OS: Linux-5.15.107+-x86_64-with-glibc2.31 # 1 SMP Sat Apr 29 09:15:28 UTC 2023
|
| 2 |
-
- Python: 3.10.
|
| 3 |
- Stable-Baselines3: 1.8.0
|
| 4 |
- PyTorch: 2.0.1+cu118
|
| 5 |
-
- GPU Enabled:
|
| 6 |
- Numpy: 1.22.4
|
| 7 |
- Gym: 0.21.0
|
|
|
|
| 1 |
- OS: Linux-5.15.107+-x86_64-with-glibc2.31 # 1 SMP Sat Apr 29 09:15:28 UTC 2023
|
| 2 |
+
- Python: 3.10.12
|
| 3 |
- Stable-Baselines3: 1.8.0
|
| 4 |
- PyTorch: 2.0.1+cu118
|
| 5 |
+
- GPU Enabled: False
|
| 6 |
- Numpy: 1.22.4
|
| 7 |
- Gym: 0.21.0
|
config.json
CHANGED
|
@@ -1 +1 @@
|
|
| 1 |
-
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7f7e80ab5ea0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7f7e80abba40>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=", "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "num_timesteps": 1000000, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1686228647566223568, "learning_rate": 0.0007, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuCQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz9G8AaNuLrHhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA3JTVPo1eVjsdyBs/3JTVPo1eVjsdyBs/3JTVPo1eVjsdyBs/3JTVPo1eVjsdyBs/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAA3R4qP+U8CL+vcJQ/nzhmvOqOrT4PQr++DZhTv4AfNz8xZ1I/Ibz0Pqdo1L/NXhM/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADclNU+jV5WOx3IGz+87j+7iCq0t/1aF7vclNU+jV5WOx3IGz+87j+7iCq0t/1aF7vclNU+jV5WOx3IGz+87j+7iCq0t/1aF7vclNU+jV5WOx3IGz+87j+7iCq0t/1aF7uUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[0.41715133 0.00327102 0.60852224]\n [0.41715133 0.00327102 0.60852224]\n [0.41715133 0.00327102 0.60852224]\n [0.41715133 0.00327102 0.60852224]]", "desired_goal": "[[ 0.66453344 -0.5321792 1.1596888 ]\n [-0.01405159 0.33898097 -0.37355086]\n [-0.82653886 0.7153244 0.8218871 ]\n [ 0.4779978 -1.6594437 0.5756653 ]]", "observation": "[[ 4.1715133e-01 3.2710165e-03 6.0852224e-01 -2.9286584e-03\n -2.1477477e-05 -2.3095005e-03]\n [ 4.1715133e-01 3.2710165e-03 6.0852224e-01 -2.9286584e-03\n -2.1477477e-05 -2.3095005e-03]\n [ 4.1715133e-01 3.2710165e-03 6.0852224e-01 -2.9286584e-03\n -2.1477477e-05 -2.3095005e-03]\n [ 4.1715133e-01 3.2710165e-03 6.0852224e-01 -2.9286584e-03\n -2.1477477e-05 -2.3095005e-03]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAEBAQGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAb3QjvR36Eb77DLw9lqwXPl1AHj17vW0+oMUJPkabxLwe1cY8+mSTvRW9oj3sp2o+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]", "desired_goal": "[[-0.03990596 -0.14255567 0.09182163]\n [ 0.1481193 0.0386356 0.23216812]\n [ 0.13454294 -0.02399982 0.02427154]\n [-0.07196994 0.07946221 0.2291562 ]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIOul942sPGsCUhpRSlIwBbJRLMowBdJRHQKjCqghbGFV1fZQoaAZoCWgPQwi/ub963IcTwJSGlFKUaBVLMmgWR0Cowm/foA4odX2UKGgGaAloD0MI8rbSa7MxDMCUhpRSlGgVSzJoFkdAqMI1TFVDKHV9lChoBmgJaA9DCEPhs3VwIBPAlIaUUpRoFUsyaBZHQKjB9gMMI/t1fZQoaAZoCWgPQwicGJKTiTsJwJSGlFKUaBVLMmgWR0Cow6P9LpRodX2UKGgGaAloD0MI41XWNsVDDsCUhpRSlGgVSzJoFkdAqMNqLKmsNnV9lChoBmgJaA9DCO3w12SNugfAlIaUUpRoFUsyaBZHQKjDL7Lt/nZ1fZQoaAZoCWgPQwjDKAge394DwJSGlFKUaBVLMmgWR0CowvBCMPz4dX2UKGgGaAloD0MIZDp0et6NCMCUhpRSlGgVSzJoFkdAqMSVsrNGE3V9lChoBmgJaA9DCAsMWd3qGQrAlIaUUpRoFUsyaBZHQKjEW3EyckN1fZQoaAZoCWgPQwi7fOvDeoMEwJSGlFKUaBVLMmgWR0CoxCDbJwKjdX2UKGgGaAloD0MI1EhL5e2oCsCUhpRSlGgVSzJoFkdAqMPhdD6WPnV9lChoBmgJaA9DCMstrYbEXQbAlIaUUpRoFUsyaBZHQKjFjmjCYTl1fZQoaAZoCWgPQwgGZK93f4wSwJSGlFKUaBVLMmgWR0CoxVRzaK1pdX2UKGgGaAloD0MIilsFMdCVGcCUhpRSlGgVSzJoFkdAqMUaDAaegHV9lChoBmgJaA9DCOwwJv29tA7AlIaUUpRoFUsyaBZHQKjE2qIacZt1fZQoaAZoCWgPQwjZImk3+pgEwJSGlFKUaBVLMmgWR0Coxnu4PPLQdX2UKGgGaAloD0MIowOSsG/HCcCUhpRSlGgVSzJoFkdAqMZBiw0O3HV9lChoBmgJaA9DCLh4eM+BRRTAlIaUUpRoFUsyaBZHQKjGBylvZRN1fZQoaAZoCWgPQwh9QKAzaZMPwJSGlFKUaBVLMmgWR0Coxce/gzgudX2UKGgGaAloD0MIZk0s8BWdD8CUhpRSlGgVSzJoFkdAqMd0ZR8+inV9lChoBmgJaA9DCI4G8BZIUA7AlIaUUpRoFUsyaBZHQKjHOi6g/Tt1fZQoaAZoCWgPQwh39SoyOsASwJSGlFKUaBVLMmgWR0Coxv/NA1NydX2UKGgGaAloD0MIT3gJTn0gC8CUhpRSlGgVSzJoFkdAqMbAZIg/1XV9lChoBmgJaA9DCGjLuRRXtRTAlIaUUpRoFUsyaBZHQKjIZgeii7F1fZQoaAZoCWgPQwgi3jr/dln8v5SGlFKUaBVLMmgWR0CoyCv4dp7DdX2UKGgGaAloD0MI0EVDxqN0A8CUhpRSlGgVSzJoFkdAqMfxjpcHGHV9lChoBmgJaA9DCM2tEFZjiQPAlIaUUpRoFUsyaBZHQKjHsiX6ZYx1fZQoaAZoCWgPQwhQOLu1TJYRwJSGlFKUaBVLMmgWR0CoyWYZVGTcdX2UKGgGaAloD0MIRL+2fvpPBsCUhpRSlGgVSzJoFkdAqMksKZ2IPHV9lChoBmgJaA9DCID0TZoGBQPAlIaUUpRoFUsyaBZHQKjI8dcSoOx1fZQoaAZoCWgPQwj9L9eiBWgVwJSGlFKUaBVLMmgWR0CoyLJoTPB0dX2UKGgGaAloD0MIvt798V7lEsCUhpRSlGgVSzJoFkdAqMpP4Irvs3V9lChoBmgJaA9DCJShKqbSDwrAlIaUUpRoFUsyaBZHQKjKFbt7a7F1fZQoaAZoCWgPQwhhVFInoJkQwJSGlFKUaBVLMmgWR0CoydtkvsZ6dX2UKGgGaAloD0MI5rD7juHRB8CUhpRSlGgVSzJoFkdAqMmcSIxgzHV9lChoBmgJaA9DCFpj0AmhAxrAlIaUUpRoFUsyaBZHQKjLSu2Zy+91fZQoaAZoCWgPQwiQaAJFLHITwJSGlFKUaBVLMmgWR0CoyxC+UQkHdX2UKGgGaAloD0MIZF3cRgN4D8CUhpRSlGgVSzJoFkdAqMrWQbMot3V9lChoBmgJaA9DCKNXA5SGOgzAlIaUUpRoFUsyaBZHQKjKlu5SWJJ1fZQoaAZoCWgPQwgcsoF0sWkawJSGlFKUaBVLMmgWR0CozLYFJQLvdX2UKGgGaAloD0MIEtvdA3S/DMCUhpRSlGgVSzJoFkdAqMx8g8r7O3V9lChoBmgJaA9DCJo+O+C6AgjAlIaUUpRoFUsyaBZHQKjMQr7wazh1fZQoaAZoCWgPQwh6i4f3HCgWwJSGlFKUaBVLMmgWR0CozARO1v2odX2UKGgGaAloD0MITifZ6nL6EcCUhpRSlGgVSzJoFkdAqM49ytFKCnV9lChoBmgJaA9DCFNdwMsMGxvAlIaUUpRoFUsyaBZHQKjOBErGza91fZQoaAZoCWgPQwjYRGYucNkMwJSGlFKUaBVLMmgWR0CozcqGDcubdX2UKGgGaAloD0MI+IvZklXxB8CUhpRSlGgVSzJoFkdAqM2MM3IdVHV9lChoBmgJaA9DCF8KD5pdpxPAlIaUUpRoFUsyaBZHQKjPyJgsshB1fZQoaAZoCWgPQwiMaaZ7nTQTwJSGlFKUaBVLMmgWR0Coz48feUILdX2UKGgGaAloD0MIMWE0K9vHBsCUhpRSlGgVSzJoFkdAqM9VOoHcDnV9lChoBmgJaA9DCDBHj9/bFArAlIaUUpRoFUsyaBZHQKjPFm/336B1fZQoaAZoCWgPQwih+DHmroUDwJSGlFKUaBVLMmgWR0Co0Xj6vaDgdX2UKGgGaAloD0MI0c3+QLl9EMCUhpRSlGgVSzJoFkdAqNE/4h2W6nV9lChoBmgJaA9DCMrBbAIMSwTAlIaUUpRoFUsyaBZHQKjRBiVB2Oh1fZQoaAZoCWgPQwjgnXx6bGsBwJSGlFKUaBVLMmgWR0Co0MdyksSTdX2UKGgGaAloD0MIQBNhw9PLGsCUhpRSlGgVSzJoFkdAqNMjV6NVBHV9lChoBmgJaA9DCIwS9Bd6ZAbAlIaUUpRoFUsyaBZHQKjS6hs67ul1fZQoaAZoCWgPQwhY5q26DlUKwJSGlFKUaBVLMmgWR0Co0rAzP8htdX2UKGgGaAloD0MIzjY3pids/r+UhpRSlGgVSzJoFkdAqNJx/iHZb3V9lChoBmgJaA9DCIdT5uYbsRPAlIaUUpRoFUsyaBZHQKjU1Zr56+p1fZQoaAZoCWgPQwguyQG7muwSwJSGlFKUaBVLMmgWR0Co1JwZXMhYdX2UKGgGaAloD0MIMISc9/9xEsCUhpRSlGgVSzJoFkdAqNRij59E1HV9lChoBmgJaA9DCFcJFocz/wTAlIaUUpRoFUsyaBZHQKjUJFXq7iB1fZQoaAZoCWgPQwiu82+X/foMwJSGlFKUaBVLMmgWR0Co1ogY51eTdX2UKGgGaAloD0MI8fW1LjXC/b+UhpRSlGgVSzJoFkdAqNZOoR7JGXV9lChoBmgJaA9DCAxzgjY5HAPAlIaUUpRoFUsyaBZHQKjWFN7jT8Z1fZQoaAZoCWgPQwg+QWK7e2APwJSGlFKUaBVLMmgWR0Co1dZWilBQdX2UKGgGaAloD0MI+YGrPIHQDsCUhpRSlGgVSzJoFkdAqNggN5MURHV9lChoBmgJaA9DCJ7wEpz64APAlIaUUpRoFUsyaBZHQKjX5e1KGtZ1fZQoaAZoCWgPQwjymeyfp7ERwJSGlFKUaBVLMmgWR0Co16t2ki2VdX2UKGgGaAloD0MIrYpwk1HFDsCUhpRSlGgVSzJoFkdAqNdsHMUypXV9lChoBmgJaA9DCCwMkdPX0wzAlIaUUpRoFUsyaBZHQKjZBMlkYoB1fZQoaAZoCWgPQwjb/SrAd3sHwJSGlFKUaBVLMmgWR0Co2MqNAC4jdX2UKGgGaAloD0MIH9lcNc+RBcCUhpRSlGgVSzJoFkdAqNiQOlO45XV9lChoBmgJaA9DCLWK/tDM8x7AlIaUUpRoFUsyaBZHQKjYUOSW7e51fZQoaAZoCWgPQwhrLcxCOycPwJSGlFKUaBVLMmgWR0Co2ff/vOQhdX2UKGgGaAloD0MINzP60XAqEsCUhpRSlGgVSzJoFkdAqNm92gWadHV9lChoBmgJaA9DCPzjvWplIgnAlIaUUpRoFUsyaBZHQKjZg1LrX191fZQoaAZoCWgPQwgnT1lN1xMKwJSGlFKUaBVLMmgWR0Co2UQEpy6udX2UKGgGaAloD0MIi6VIvhLIC8CUhpRSlGgVSzJoFkdAqNruSIP9UHV9lChoBmgJaA9DCGN+bmjKrgnAlIaUUpRoFUsyaBZHQKjatBt1p0x1fZQoaAZoCWgPQwhm+E83UIARwJSGlFKUaBVLMmgWR0Co2nmbTc7AdX2UKGgGaAloD0MIX3zRHi+0E8CUhpRSlGgVSzJoFkdAqNo6dBjWkXV9lChoBmgJaA9DCHaKVYMwZxDAlIaUUpRoFUsyaBZHQKjb42ZRbbF1fZQoaAZoCWgPQwifIoeIm8MQwJSGlFKUaBVLMmgWR0Co26lC1JDmdX2UKGgGaAloD0MIhXtl3qprCcCUhpRSlGgVSzJoFkdAqNtuwRoRI3V9lChoBmgJaA9DCMNIL2r3KwPAlIaUUpRoFUsyaBZHQKjbL1U2kzp1fZQoaAZoCWgPQwjysiYW+GoHwJSGlFKUaBVLMmgWR0Co3Nhltj0+dX2UKGgGaAloD0MIbLBwkuaPAsCUhpRSlGgVSzJoFkdAqNyebCrLhnV9lChoBmgJaA9DCFVtN8E3/RfAlIaUUpRoFUsyaBZHQKjcY+GoJiR1fZQoaAZoCWgPQwiZR/5g4FkVwJSGlFKUaBVLMmgWR0Co3CSWiUPhdX2UKGgGaAloD0MIILdfPllBEsCUhpRSlGgVSzJoFkdAqN3KSX+l03V9lChoBmgJaA9DCD5anDHMqQLAlIaUUpRoFUsyaBZHQKjdkCih37l1fZQoaAZoCWgPQwhhF0UPfIwfwJSGlFKUaBVLMmgWR0Co3VW9cry2dX2UKGgGaAloD0MIOKPmq+SjBMCUhpRSlGgVSzJoFkdAqN0WbqhUR3V9lChoBmgJaA9DCORmuAGfHx7AlIaUUpRoFUsyaBZHQKjevPLxI8R1fZQoaAZoCWgPQwjKTj+oi+QQwJSGlFKUaBVLMmgWR0Co3oKnm7rcdX2UKGgGaAloD0MICcOAJVexCMCUhpRSlGgVSzJoFkdAqN5IeLehwnV9lChoBmgJaA9DCFJgAUwZCBPAlIaUUpRoFUsyaBZHQKjeCSQo1DV1ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 50000, "n_steps": 5, "gamma": 0.99, "gae_lambda": 1.0, "ent_coef": 0.0, "vf_coef": 0.5, "max_grad_norm": 0.5, "normalize_advantage": false, "observation_space": {":type:": "<class 'gym.spaces.dict.Dict'>", ":serialized:": "gAWVWAMAAAAAAACMD2d5bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwOZ3ltLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowGX3NoYXBllEsDhZSMA2xvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFksDhZSMAUOUdJRSlIwEaGlnaJRoHiiWDAAAAAAAAAAAACBBAAAgQQAAIEGUaBZLA4WUaCF0lFKUjA1ib3VuZGVkX2JlbG93lGgeKJYDAAAAAAAAAAEBAZRoE4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZRoIXSUUpSMDWJvdW5kZWRfYWJvdmWUaB4olgMAAAAAAAAAAQEBlGgtSwOFlGghdJRSlIwKX25wX3JhbmRvbZROdWKMDGRlc2lyZWRfZ29hbJRoDSmBlH2UKGgQaBZoGUsDhZRoG2geKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFksDhZRoIXSUUpRoJGgeKJYMAAAAAAAAAAAAIEEAACBBAAAgQZRoFksDhZRoIXSUUpRoKWgeKJYDAAAAAAAAAAEBAZRoLUsDhZRoIXSUUpRoM2geKJYDAAAAAAAAAAEBAZRoLUsDhZRoIXSUUpRoOE51YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgWaBlLBoWUaBtoHiiWGAAAAAAAAAAAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBZLBoWUaCF0lFKUaCRoHiiWGAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEGUaBZLBoWUaCF0lFKUaCloHiiWBgAAAAAAAAABAQEBAQGUaC1LBoWUaCF0lFKUaDNoHiiWBgAAAAAAAAABAQEBAQGUaC1LBoWUaCF0lFKUaDhOdWJ1aBlOaBBOaDhOdWIu", "spaces": "OrderedDict([('achieved_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('desired_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('observation', Box([-10. -10. -10. -10. -10. -10.], [10. 10. 10. 10. 10. 10.], (6,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVcwEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLA4WUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWDAAAAAAAAAAAAIC/AACAvwAAgL+UaAtLA4WUjAFDlHSUUpSMBGhpZ2iUaBMolgwAAAAAAAAAAACAPwAAgD8AAIA/lGgLSwOFlGgWdJRSlIwNYm91bmRlZF9iZWxvd5RoEyiWAwAAAAAAAAABAQGUaAiMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLA4WUaBZ0lFKUjA1ib3VuZGVkX2Fib3ZllGgTKJYDAAAAAAAAAAEBAZRoIksDhZRoFnSUUpSMCl9ucF9yYW5kb22UTnViLg==", "dtype": "float32", "_shape": [3], "low": "[-1. -1. -1.]", "high": "[1. 1. 1.]", "bounded_below": "[ True True True]", "bounded_above": "[ True True True]", "_np_random": null}, "n_envs": 4, "system_info": {"OS": "Linux-5.15.107+-x86_64-with-glibc2.31 # 1 SMP Sat Apr 29 09:15:28 UTC 2023", "Python": "3.10.11", "Stable-Baselines3": "1.8.0", "PyTorch": "2.0.1+cu118", "GPU Enabled": "True", "Numpy": "1.22.4", "Gym": "0.21.0"}}
|
|
|
|
| 1 |
+
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7f528cd53d00>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7f528cd58540>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=", "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "num_timesteps": 1000000, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1686289531784048596, "learning_rate": 0.0007, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuCQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz9G8AaNuLrHhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAG2C8PnDejz3HpSE/G2C8PnDejz3HpSE/G2C8PnDejz3HpSE/G2C8PnDejz3HpSE/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAA6wlEvwkRWD6hQ7U8hWuXv6+zBz5G36a/sqKMP+oOsD+P+ik/ox3OPpXmhL/2b329lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAAAbYLw+cN6PPcelIT/zDXK8+NKbugP0ATwbYLw+cN6PPcelIT/zDXK8+NKbugP0ATwbYLw+cN6PPcelIT/zDXK8+NKbugP0ATwbYLw+cN6PPcelIT/zDXK8+NKbugP0ATyUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[0.36792073 0.07024848 0.6314358 ]\n [0.36792073 0.07024848 0.6314358 ]\n [0.36792073 0.07024848 0.6314358 ]\n [0.36792073 0.07024848 0.6314358 ]]", "desired_goal": "[[-0.76577634 0.21100248 0.02212697]\n [-1.1829687 0.13252138 -1.3036888 ]\n [ 1.0987151 1.3754551 0.6639795 ]\n [ 0.40256986 -1.0382868 -0.06187435]]", "observation": "[[ 0.36792073 0.07024848 0.6314358 -0.01477383 -0.00118884 0.00793171]\n [ 0.36792073 0.07024848 0.6314358 -0.01477383 -0.00118884 0.00793171]\n [ 0.36792073 0.07024848 0.6314358 -0.01477383 -0.00118884 0.00793171]\n [ 0.36792073 0.07024848 0.6314358 -0.01477383 -0.00118884 0.00793171]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAEBAQGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAKr84vPWvDj4O4oo+YDWZvYJG5L3BBmM+Zz6IvU6xx70ndJI9HUTIvTALvj3dyzE+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]", "desired_goal": "[[-0.01127605 0.1393431 0.2712559 ]\n [-0.07480884 -0.11146261 0.22170545]\n [-0.06652527 -0.09750615 0.07151061]\n [-0.09778617 0.09279478 0.17362924]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIL6UuGcfoDsCUhpRSlIwBbJRLMowBdJRHQKkSvmOEM9d1fZQoaAZoCWgPQwhuisdFtdgQwJSGlFKUaBVLMmgWR0CpEhCuMdcTdX2UKGgGaAloD0MID5iHTPmwEMCUhpRSlGgVSzJoFkdAqRFyP6sQunV9lChoBmgJaA9DCDGzz2OUxw3AlIaUUpRoFUsyaBZHQKkQ+T8pCrt1fZQoaAZoCWgPQwio5JzYQ7sJwJSGlFKUaBVLMmgWR0CpE7bl7tzCdX2UKGgGaAloD0MI46qy74qACsCUhpRSlGgVSzJoFkdAqRMJCOWBz3V9lChoBmgJaA9DCJeo3hrYigHAlIaUUpRoFUsyaBZHQKkSapgCwKV1fZQoaAZoCWgPQwijWkQUk5cHwJSGlFKUaBVLMmgWR0CpEfFnh86WdX2UKGgGaAloD0MIkIZT5uYLEcCUhpRSlGgVSzJoFkdAqRSwIv8IiXV9lChoBmgJaA9DCObpXFFK6BLAlIaUUpRoFUsyaBZHQKkUAlC1JDp1fZQoaAZoCWgPQwhsJAnCFZAJwJSGlFKUaBVLMmgWR0CpE2Pu5SWJdX2UKGgGaAloD0MItrkxPWHJCMCUhpRSlGgVSzJoFkdAqRLq22G7BnV9lChoBmgJaA9DCLZmKy/5vwfAlIaUUpRoFUsyaBZHQKkVsYqoZQ51fZQoaAZoCWgPQwiUF5mAXwMLwJSGlFKUaBVLMmgWR0CpFQOeJ53UdX2UKGgGaAloD0MIQQx07QuoCMCUhpRSlGgVSzJoFkdAqRRlYW+GoXV9lChoBmgJaA9DCACquHGL2QvAlIaUUpRoFUsyaBZHQKkT7EQ5FPV1fZQoaAZoCWgPQwhC6KBLONQSwJSGlFKUaBVLMmgWR0CpFqjSgGr0dX2UKGgGaAloD0MI/RAbLJzkC8CUhpRSlGgVSzJoFkdAqRX7J6po9XV9lChoBmgJaA9DCLIrLSP1HgzAlIaUUpRoFUsyaBZHQKkVXNKyv9t1fZQoaAZoCWgPQwiy9KEL6nsGwJSGlFKUaBVLMmgWR0CpFOO/1xsEdX2UKGgGaAloD0MI88e0No0NEMCUhpRSlGgVSzJoFkdAqReu5e7cwnV9lChoBmgJaA9DCLJkjuVdVQ3AlIaUUpRoFUsyaBZHQKkXARA8jiZ1fZQoaAZoCWgPQwjY8zXLZXMSwJSGlFKUaBVLMmgWR0CpFmKo60Y1dX2UKGgGaAloD0MI1/fhICEqD8CUhpRSlGgVSzJoFkdAqRXpjlPrOnV9lChoBmgJaA9DCE4rhUAuEQXAlIaUUpRoFUsyaBZHQKkYqtU4rBl1fZQoaAZoCWgPQwjHoX4XtmYIwJSGlFKUaBVLMmgWR0CpF/0fozN2dX2UKGgGaAloD0MI0ETY8PQKDsCUhpRSlGgVSzJoFkdAqRdewFC9iHV9lChoBmgJaA9DCDzB/uvcJBTAlIaUUpRoFUsyaBZHQKkW5aOgg5l1fZQoaAZoCWgPQwiFCDiEKpUFwJSGlFKUaBVLMmgWR0CpGabF0gbIdX2UKGgGaAloD0MIuOnPfqSIBcCUhpRSlGgVSzJoFkdAqRj44jrzG3V9lChoBmgJaA9DCK4SLA5nngbAlIaUUpRoFUsyaBZHQKkYWoYNy5t1fZQoaAZoCWgPQwjwMsNGWY8SwJSGlFKUaBVLMmgWR0CpF+GxD9fkdX2UKGgGaAloD0MINrBVgsXhEMCUhpRSlGgVSzJoFkdAqRqh0uDjBHV9lChoBmgJaA9DCAOV8e8zbgrAlIaUUpRoFUsyaBZHQKkZ8/cFhXt1fZQoaAZoCWgPQwhbttYXCY0JwJSGlFKUaBVLMmgWR0CpGVWNWEK3dX2UKGgGaAloD0MItK88SE+RD8CUhpRSlGgVSzJoFkdAqRjcYO2AoXV9lChoBmgJaA9DCAjnU8cqBQ/AlIaUUpRoFUsyaBZHQKkbnKZlWfd1fZQoaAZoCWgPQwgPlxx3SjcQwJSGlFKUaBVLMmgWR0CpGu82zfJndX2UKGgGaAloD0MI56kOuRnOBMCUhpRSlGgVSzJoFkdAqRpQ0sOG03V9lChoBmgJaA9DCBVzEHS0SgnAlIaUUpRoFUsyaBZHQKkZ19xZMcp1fZQoaAZoCWgPQwiBJOzbSQQGwJSGlFKUaBVLMmgWR0CpHJC9Zid8dX2UKGgGaAloD0MIb2b0o+G0EcCUhpRSlGgVSzJoFkdAqRvi0rsjV3V9lChoBmgJaA9DCEC/79+8KBPAlIaUUpRoFUsyaBZHQKkbRJeVs1t1fZQoaAZoCWgPQwivtIzUe4oFwJSGlFKUaBVLMmgWR0CpGst7rs0IdX2UKGgGaAloD0MIqkNuhhvwDMCUhpRSlGgVSzJoFkdAqR2FX5nDi3V9lChoBmgJaA9DCE4pr5XQvRHAlIaUUpRoFUsyaBZHQKkc13jdYXB1fZQoaAZoCWgPQwiEDOTZ5Rv+v5SGlFKUaBVLMmgWR0CpHDkCeVcEdX2UKGgGaAloD0MIxHdi1ouhDcCUhpRSlGgVSzJoFkdAqRu/49HMEHV9lChoBmgJaA9DCPJbdLLUOg/AlIaUUpRoFUsyaBZHQKkegnRb8m91fZQoaAZoCWgPQwgJw4AlV3EGwJSGlFKUaBVLMmgWR0CpHdSHmA9WdX2UKGgGaAloD0MIldOeknMCEcCUhpRSlGgVSzJoFkdAqR02kP+XJHV9lChoBmgJaA9DCCxGXWvvMwfAlIaUUpRoFUsyaBZHQKkcvWz4UN91fZQoaAZoCWgPQwhoBvGBHX8IwJSGlFKUaBVLMmgWR0CpH4S/TLGJdX2UKGgGaAloD0MIiljEsMN4DsCUhpRSlGgVSzJoFkdAqR7W+23KCHV9lChoBmgJaA9DCJnTZTGx2QnAlIaUUpRoFUsyaBZHQKkeOJ6Y3Nt1fZQoaAZoCWgPQwhHVn4ZjEESwJSGlFKUaBVLMmgWR0CpHb+kHlfadX2UKGgGaAloD0MInYGRlzWRFcCUhpRSlGgVSzJoFkdAqSCD9AHE/HV9lChoBmgJaA9DCEm9p3La8wbAlIaUUpRoFUsyaBZHQKkf1hAGB4F1fZQoaAZoCWgPQwh4DI/9LJYIwJSGlFKUaBVLMmgWR0CpHzeqBErodX2UKGgGaAloD0MIldOeknMiDcCUhpRSlGgVSzJoFkdAqR6+e18b73V9lChoBmgJaA9DCKOSOgFN5BLAlIaUUpRoFUsyaBZHQKkiE57PY4B1fZQoaAZoCWgPQwico46OqxEQwJSGlFKUaBVLMmgWR0CpIWbMHKOldX2UKGgGaAloD0MI9NxCVyLQD8CUhpRSlGgVSzJoFkdAqSDJMJx//nV9lChoBmgJaA9DCHWw/s9hrhDAlIaUUpRoFUsyaBZHQKkgUP8Q7Ld1fZQoaAZoCWgPQwjequtQTYkFwJSGlFKUaBVLMmgWR0CpI7/EfkmydX2UKGgGaAloD0MI9Z7KaU+5EMCUhpRSlGgVSzJoFkdAqSMSjafzz3V9lChoBmgJaA9DCImXp3NFyQ7AlIaUUpRoFUsyaBZHQKkidL6DXe51fZQoaAZoCWgPQwgtJctJKJ0GwJSGlFKUaBVLMmgWR0CpIfxeLNwBdX2UKGgGaAloD0MI4X1VLlSOEcCUhpRSlGgVSzJoFkdAqSV34mCyyHV9lChoBmgJaA9DCASsVbsmRBPAlIaUUpRoFUsyaBZHQKkkyxGDtgN1fZQoaAZoCWgPQwj2lnK+2OsQwJSGlFKUaBVLMmgWR0CpJC2ac7QtdX2UKGgGaAloD0MIrFj8prDyDMCUhpRSlGgVSzJoFkdAqSO2SOinHnV9lChoBmgJaA9DCF3cRgN4KwjAlIaUUpRoFUsyaBZHQKknRmKZUkx1fZQoaAZoCWgPQwgD7KNTV34JwJSGlFKUaBVLMmgWR0CpJpluWKMvdX2UKGgGaAloD0MImGpmLQXUEsCUhpRSlGgVSzJoFkdAqSX8J2MbWHV9lChoBmgJaA9DCNECtK1mPQXAlIaUUpRoFUsyaBZHQKklg/IsAed1fZQoaAZoCWgPQwjU7lcBvvsIwJSGlFKUaBVLMmgWR0CpKQAU1yeadX2UKGgGaAloD0MIfzMxXYhVCsCUhpRSlGgVSzJoFkdAqShTLbHp8nV9lChoBmgJaA9DCKyRXWkZeRLAlIaUUpRoFUsyaBZHQKkntZ5iVjZ1fZQoaAZoCWgPQwjaU3JO7EEGwJSGlFKUaBVLMmgWR0CpJz158jRldX2UKGgGaAloD0MIG/M64pDNB8CUhpRSlGgVSzJoFkdAqSq4RNATqXV9lChoBmgJaA9DCM9r7BLVaxDAlIaUUpRoFUsyaBZHQKkqDBSk0rN1fZQoaAZoCWgPQwj0qWOV0pMNwJSGlFKUaBVLMmgWR0CpKW7iZOSGdX2UKGgGaAloD0MINSVZh6PrFMCUhpRSlGgVSzJoFkdAqSj3Dk2gnXV9lChoBmgJaA9DCJVIopdRjA/AlIaUUpRoFUsyaBZHQKksRlGPPs11fZQoaAZoCWgPQwgnM95Wes0BwJSGlFKUaBVLMmgWR0CpK5h2W6bwdX2UKGgGaAloD0MIIa6cvTO6B8CUhpRSlGgVSzJoFkdAqSr6Dyvs7nV9lChoBmgJaA9DCOC9o8aEGA7AlIaUUpRoFUsyaBZHQKkqgURFqi51fZQoaAZoCWgPQwicNXhflQsQwJSGlFKUaBVLMmgWR0CpLUF/x2B8dX2UKGgGaAloD0MI/yPTodMTEcCUhpRSlGgVSzJoFkdAqSyTo2XLNnV9lChoBmgJaA9DCIrHRbWIGBHAlIaUUpRoFUsyaBZHQKkr9ar3j+91fZQoaAZoCWgPQwgaGeQuwpQIwJSGlFKUaBVLMmgWR0CpK3z987ZGdX2UKGgGaAloD0MI9raZCvEoC8CUhpRSlGgVSzJoFkdAqS46AjIJaHV9lChoBmgJaA9DCOPD7GXbaQvAlIaUUpRoFUsyaBZHQKktjDG96C11fZQoaAZoCWgPQwirzf+rjrwTwJSGlFKUaBVLMmgWR0CpLO3LV4HHdX2UKGgGaAloD0MI8iN+xRqOBsCUhpRSlGgVSzJoFkdAqSx04m1IAnV9lChoBmgJaA9DCIF7nj9tRBDAlIaUUpRoFUsyaBZHQKkvOtK7I1d1fZQoaAZoCWgPQwjtSWBzDo4RwJSGlFKUaBVLMmgWR0CpLozz3AVPdX2UKGgGaAloD0MI/gqZK4NqC8CUhpRSlGgVSzJoFkdAqS3uhdt2tHV9lChoBmgJaA9DCPIjfsUa7g/AlIaUUpRoFUsyaBZHQKktdV5KODJ1ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 50000, "n_steps": 5, "gamma": 0.99, "gae_lambda": 1.0, "ent_coef": 0.0, "vf_coef": 0.5, "max_grad_norm": 0.5, "normalize_advantage": false, "observation_space": {":type:": "<class 'gym.spaces.dict.Dict'>", ":serialized:": "gAWVWAMAAAAAAACMD2d5bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwOZ3ltLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowGX3NoYXBllEsDhZSMA2xvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFksDhZSMAUOUdJRSlIwEaGlnaJRoHiiWDAAAAAAAAAAAACBBAAAgQQAAIEGUaBZLA4WUaCF0lFKUjA1ib3VuZGVkX2JlbG93lGgeKJYDAAAAAAAAAAEBAZRoE4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZRoIXSUUpSMDWJvdW5kZWRfYWJvdmWUaB4olgMAAAAAAAAAAQEBlGgtSwOFlGghdJRSlIwKX25wX3JhbmRvbZROdWKMDGRlc2lyZWRfZ29hbJRoDSmBlH2UKGgQaBZoGUsDhZRoG2geKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFksDhZRoIXSUUpRoJGgeKJYMAAAAAAAAAAAAIEEAACBBAAAgQZRoFksDhZRoIXSUUpRoKWgeKJYDAAAAAAAAAAEBAZRoLUsDhZRoIXSUUpRoM2geKJYDAAAAAAAAAAEBAZRoLUsDhZRoIXSUUpRoOE51YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgWaBlLBoWUaBtoHiiWGAAAAAAAAAAAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBZLBoWUaCF0lFKUaCRoHiiWGAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEGUaBZLBoWUaCF0lFKUaCloHiiWBgAAAAAAAAABAQEBAQGUaC1LBoWUaCF0lFKUaDNoHiiWBgAAAAAAAAABAQEBAQGUaC1LBoWUaCF0lFKUaDhOdWJ1aBlOaBBOaDhOdWIu", "spaces": "OrderedDict([('achieved_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('desired_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('observation', Box([-10. -10. -10. -10. -10. -10.], [10. 10. 10. 10. 10. 10.], (6,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVcwEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLA4WUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWDAAAAAAAAAAAAIC/AACAvwAAgL+UaAtLA4WUjAFDlHSUUpSMBGhpZ2iUaBMolgwAAAAAAAAAAACAPwAAgD8AAIA/lGgLSwOFlGgWdJRSlIwNYm91bmRlZF9iZWxvd5RoEyiWAwAAAAAAAAABAQGUaAiMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLA4WUaBZ0lFKUjA1ib3VuZGVkX2Fib3ZllGgTKJYDAAAAAAAAAAEBAZRoIksDhZRoFnSUUpSMCl9ucF9yYW5kb22UTnViLg==", "dtype": "float32", "_shape": [3], "low": "[-1. -1. -1.]", "high": "[1. 1. 1.]", "bounded_below": "[ True True True]", "bounded_above": "[ True True True]", "_np_random": null}, "n_envs": 4, "system_info": {"OS": "Linux-5.15.107+-x86_64-with-glibc2.31 # 1 SMP Sat Apr 29 09:15:28 UTC 2023", "Python": "3.10.12", "Stable-Baselines3": "1.8.0", "PyTorch": "2.0.1+cu118", "GPU Enabled": "False", "Numpy": "1.22.4", "Gym": "0.21.0"}}
|
replay.mp4
CHANGED
|
Binary files a/replay.mp4 and b/replay.mp4 differ
|
|
|
results.json
CHANGED
|
@@ -1 +1 @@
|
|
| 1 |
-
{"mean_reward": -3.
|
|
|
|
| 1 |
+
{"mean_reward": -3.7849887076299638, "std_reward": 0.8238678315817847, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-06-09T06:46:41.276426"}
|
vec_normalize.pkl
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 2387
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:cf047c3035f8ff42e5014af3e43daf68662b2ac7979dfc81a37459c1ae38c1ec
|
| 3 |
size 2387
|