sleepytaco commited on
Commit
528e378
1 Parent(s): 12762ef

Initial commit

Browse files
README.md CHANGED
@@ -16,7 +16,7 @@ model-index:
16
  type: PandaReachDense-v2
17
  metrics:
18
  - type: mean_reward
19
- value: -4.14 +/- 1.20
20
  name: mean_reward
21
  verified: false
22
  ---
 
16
  type: PandaReachDense-v2
17
  metrics:
18
  - type: mean_reward
19
+ value: -2.09 +/- 0.40
20
  name: mean_reward
21
  verified: false
22
  ---
a2c-PandaReachDense-v2.zip CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:08b1e105512830945b07c2b6a8930246597bd7a41017d9d30a5dba894e2bcce5
3
- size 102530
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:23bae4f77bbba98ab8db4df27e9dde4c54a79c0bdada35940f1205d1e69850e5
3
+ size 108028
a2c-PandaReachDense-v2/data CHANGED
@@ -4,9 +4,9 @@
4
  ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
5
  "__module__": "stable_baselines3.common.policies",
6
  "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
7
- "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7f23e2402280>",
8
  "__abstractmethods__": "frozenset()",
9
- "_abc_impl": "<_abc._abc_data object at 0x7f23e23fdf00>"
10
  },
11
  "verbose": 1,
12
  "policy_kwargs": {
@@ -41,12 +41,12 @@
41
  "_np_random": null
42
  },
43
  "n_envs": 4,
44
- "num_timesteps": 0,
45
- "_total_timesteps": 1000000,
46
  "_num_timesteps_at_start": 0,
47
  "seed": null,
48
  "action_noise": null,
49
- "start_time": 1680109445969967231,
50
  "learning_rate": 0.0007,
51
  "tensorboard_log": null,
52
  "lr_schedule": {
@@ -55,10 +55,10 @@
55
  },
56
  "_last_obs": {
57
  ":type:": "<class 'collections.OrderedDict'>",
58
- ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAucvQPpar2TwgoAo/ucvQPpar2TwgoAo/ucvQPpar2TwgoAo/ucvQPpar2TwgoAo/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAxvrNv//Rm7/qEZ2/0frcPzpY2b4cuAg/hzwGv5M95D7efzW+EAsGv/zNlT/VWIq/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAAC5y9A+lqvZPCCgCj/eWyM8jp0nO779Bzu5y9A+lqvZPCCgCj/eWyM8jp0nO779Bzu5y9A+lqvZPCCgCj/eWyM8jp0nO779Bzu5y9A+lqvZPCCgCj/eWyM8jp0nO779BzuUaA5LBEsGhpRoEnSUUpR1Lg==",
59
- "achieved_goal": "[[0.40780428 0.02657108 0.5415058 ]\n [0.40780428 0.02657108 0.5415058 ]\n [0.40780428 0.02657108 0.5415058 ]\n [0.40780428 0.02657108 0.5415058 ]]",
60
- "desired_goal": "[[-1.6092155 -1.2173461 -1.2271092 ]\n [ 1.7264043 -0.42450124 0.5340593 ]\n [-0.5243611 0.44578227 -0.17724559]\n [-0.5236063 1.1703486 -1.0808359 ]]",
61
- "observation": "[[0.40780428 0.02657108 0.5415058 0.00997063 0.00255761 0.00207506]\n [0.40780428 0.02657108 0.5415058 0.00997063 0.00255761 0.00207506]\n [0.40780428 0.02657108 0.5415058 0.00997063 0.00255761 0.00207506]\n [0.40780428 0.02657108 0.5415058 0.00997063 0.00255761 0.00207506]]"
62
  },
63
  "_last_episode_starts": {
64
  ":type:": "<class 'numpy.ndarray'>",
@@ -66,9 +66,9 @@
66
  },
67
  "_last_original_obs": {
68
  ":type:": "<class 'collections.OrderedDict'>",
69
- ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAYK3BvexjzL1hUFY+am/GuodO5L184Ic+h1BUPa6KAD1pB5c7o/BNvGUPrL0vra09lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==",
70
  "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]",
71
- "desired_goal": "[[-0.09456897 -0.09979996 0.209291 ]\n [-0.00151394 -0.1114779 0.26538455]\n [ 0.05183461 0.03138226 0.00460904]\n [-0.01256958 -0.08401374 0.08480298]]",
72
  "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"
73
  },
74
  "_episode_num": 0,
@@ -77,13 +77,13 @@
77
  "_current_progress_remaining": 0.0,
78
  "ep_info_buffer": {
79
  ":type:": "<class 'collections.deque'>",
80
- ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
81
  },
82
  "ep_success_buffer": {
83
  ":type:": "<class 'collections.deque'>",
84
  ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
85
  },
86
- "_n_updates": 50000,
87
  "n_steps": 5,
88
  "gamma": 0.99,
89
  "gae_lambda": 1.0,
 
4
  ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
5
  "__module__": "stable_baselines3.common.policies",
6
  "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
7
+ "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7ff778876700>",
8
  "__abstractmethods__": "frozenset()",
9
+ "_abc_impl": "<_abc._abc_data object at 0x7ff778877480>"
10
  },
11
  "verbose": 1,
12
  "policy_kwargs": {
 
41
  "_np_random": null
42
  },
43
  "n_envs": 4,
44
+ "num_timesteps": 1500000,
45
+ "_total_timesteps": 1500000,
46
  "_num_timesteps_at_start": 0,
47
  "seed": null,
48
  "action_noise": null,
49
+ "start_time": 1680124603703928597,
50
  "learning_rate": 0.0007,
51
  "tensorboard_log": null,
52
  "lr_schedule": {
 
55
  },
56
  "_last_obs": {
57
  ":type:": "<class 'collections.OrderedDict'>",
58
+ ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA5KLYPlHnI73yfQA/5KLYPlHnI73yfQA/5KLYPlHnI73yfQA/5KLYPlHnI73yfQA/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAiQC3v9Nerb2yLEa/3Cuov7qFzD8Tfty/fuXbP38jDD6BfPk+JpXVvzQcez7s0bY/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADkotg+UecjvfJ9AD+SyzI8nnryu/gJaDzkotg+UecjvfJ9AD+SyzI8nnryu/gJaDzkotg+UecjvfJ9AD+SyzI8nnryu/gJaDzkotg+UecjvfJ9AD+SyzI8nnryu/gJaDyUaA5LBEsGhpRoEnSUUpR1Lg==",
59
+ "achieved_goal": "[[ 0.42311776 -0.04001552 0.5019218 ]\n [ 0.42311776 -0.04001552 0.5019218 ]\n [ 0.42311776 -0.04001552 0.5019218 ]\n [ 0.42311776 -0.04001552 0.5019218 ]]",
60
+ "desired_goal": "[[-1.4297038 -0.08465352 -0.7741195 ]\n [-1.3138385 1.597831 -1.7225975 ]\n [ 1.717941 0.13685416 0.487278 ]\n [-1.6686141 0.24522477 1.4282813 ]]",
61
+ "observation": "[[ 0.42311776 -0.04001552 0.5019218 0.01091279 -0.00739987 0.01416253]\n [ 0.42311776 -0.04001552 0.5019218 0.01091279 -0.00739987 0.01416253]\n [ 0.42311776 -0.04001552 0.5019218 0.01091279 -0.00739987 0.01416253]\n [ 0.42311776 -0.04001552 0.5019218 0.01091279 -0.00739987 0.01416253]]"
62
  },
63
  "_last_episode_starts": {
64
  ":type:": "<class 'numpy.ndarray'>",
 
66
  },
67
  "_last_original_obs": {
68
  ":type:": "<class 'collections.OrderedDict'>",
69
+ ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAKHgWvtNUOr2LpUU+YRCdvRvjB71sZwQ+zt0FPpiyqDzNUxw+SbBTPdbQZT0gPEI+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==",
70
  "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]",
71
+ "desired_goal": "[[-0.14694273 -0.04549105 0.19301431]\n [-0.0766914 -0.03317557 0.12930077]\n [ 0.13072893 0.02059297 0.15266342]\n [ 0.05168179 0.05610736 0.18968248]]",
72
  "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"
73
  },
74
  "_episode_num": 0,
 
77
  "_current_progress_remaining": 0.0,
78
  "ep_info_buffer": {
79
  ":type:": "<class 'collections.deque'>",
80
+ ":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIlnhA2ZTr+r+UhpRSlIwBbJRLMowBdJRHQLNt2VTaTOh1fZQoaAZoCWgPQwgJ/reSHbsGwJSGlFKUaBVLMmgWR0Czba06DGtIdX2UKGgGaAloD0MIJO1GH/MhBcCUhpRSlGgVSzJoFkdAs22AYXO4X3V9lChoBmgJaA9DCPLrh9hgwQjAlIaUUpRoFUsyaBZHQLNtUhJAdGR1fZQoaAZoCWgPQwgIsMivH+IDwJSGlFKUaBVLMmgWR0Czblu+mFajdX2UKGgGaAloD0MIvXDnwkiv9b+UhpRSlGgVSzJoFkdAs24voyKvV3V9lChoBmgJaA9DCPaYSGk2rwjAlIaUUpRoFUsyaBZHQLNuAuIAOrh1fZQoaAZoCWgPQwgz+tFwylz8v5SGlFKUaBVLMmgWR0CzbdS3w1BMdX2UKGgGaAloD0MIy59vC5Zq7L+UhpRSlGgVSzJoFkdAs27+goPTX3V9lChoBmgJaA9DCLsPQGoTZ/S/lIaUUpRoFUsyaBZHQLNu0mu1WsB1fZQoaAZoCWgPQwhgdeRIZ8ACwJSGlFKUaBVLMmgWR0CzbqW5+YtydX2UKGgGaAloD0MIAvOQKR8C+L+UhpRSlGgVSzJoFkdAs253g/C66XV9lChoBmgJaA9DCF8pyxDHevS/lIaUUpRoFUsyaBZHQLNvj4N7SiN1fZQoaAZoCWgPQwgSonxBC+kEwJSGlFKUaBVLMmgWR0Czb2NkvsZ6dX2UKGgGaAloD0MIwTi4dMz5AMCUhpRSlGgVSzJoFkdAs282hf0Eo3V9lChoBmgJaA9DCH15AfbRqfS/lIaUUpRoFUsyaBZHQLNvCFdLQHB1fZQoaAZoCWgPQwjFqkGY2334v5SGlFKUaBVLMmgWR0CzcFs45tFbdX2UKGgGaAloD0MIK061FmYREcCUhpRSlGgVSzJoFkdAs3AvtBv733V9lChoBmgJaA9DCJ3ZrtAHawvAlIaUUpRoFUsyaBZHQLNwA0SAYpF1fZQoaAZoCWgPQwjcuMX83BABwJSGlFKUaBVLMmgWR0Czb9ViKBNFdX2UKGgGaAloD0MIsoLfhhiv9L+UhpRSlGgVSzJoFkdAs3E3SLIgeXV9lChoBmgJaA9DCFHZsKayCAHAlIaUUpRoFUsyaBZHQLNxC11W8yx1fZQoaAZoCWgPQwjo3O16aQoJwJSGlFKUaBVLMmgWR0CzcN7yUcGUdX2UKGgGaAloD0MIuRrZlZZREcCUhpRSlGgVSzJoFkdAs3CxSZSeiHV9lChoBmgJaA9DCEsi+yDLAhPAlIaUUpRoFUsyaBZHQLNyGundfsx1fZQoaAZoCWgPQwjVJeMYyZ7xv5SGlFKUaBVLMmgWR0Czce8WO6uodX2UKGgGaAloD0MIBJFFmnjH97+UhpRSlGgVSzJoFkdAs3HC7L+xW3V9lChoBmgJaA9DCL7Z5sb0BP2/lIaUUpRoFUsyaBZHQLNxlP/JeVt1fZQoaAZoCWgPQwholZnS+pv2v5SGlFKUaBVLMmgWR0CzcyjundftdX2UKGgGaAloD0MIUfcBSG1iB8CUhpRSlGgVSzJoFkdAs3L9wT/Q0HV9lChoBmgJaA9DCHtJY7SOqgzAlIaUUpRoFUsyaBZHQLNy0UornT11fZQoaAZoCWgPQwhxdJXurlMJwJSGlFKUaBVLMmgWR0CzcqRQSBbwdX2UKGgGaAloD0MIMXiY9s19/L+UhpRSlGgVSzJoFkdAs3Qajua4MHV9lChoBmgJaA9DCNFY+zvbgwPAlIaUUpRoFUsyaBZHQLNz7vwVj7R1fZQoaAZoCWgPQwgNwtzu5Z4CwJSGlFKUaBVLMmgWR0Czc8KjBVMmdX2UKGgGaAloD0MIPBIvT+fKC8CUhpRSlGgVSzJoFkdAs3OU1O0sv3V9lChoBmgJaA9DCE4NNJ9z9/m/lIaUUpRoFUsyaBZHQLN1GQla8pV1fZQoaAZoCWgPQwi1xqATQgf/v5SGlFKUaBVLMmgWR0CzdO3G8274dX2UKGgGaAloD0MI5iSUvhCy87+UhpRSlGgVSzJoFkdAs3TBYq5LAnV9lChoBmgJaA9DCDzYYrfPKhHAlIaUUpRoFUsyaBZHQLN0k9CeEqV1fZQoaAZoCWgPQwgBNbVsrW8HwJSGlFKUaBVLMmgWR0CzdeAswtaqdX2UKGgGaAloD0MISIjyBS3k/r+UhpRSlGgVSzJoFkdAs3W0C/47BHV9lChoBmgJaA9DCHjuPVxyjBPAlIaUUpRoFUsyaBZHQLN1h0vXbud1fZQoaAZoCWgPQwhgyyvX2wYGwJSGlFKUaBVLMmgWR0CzdVkW/JvHdX2UKGgGaAloD0MINZpcjIEVAMCUhpRSlGgVSzJoFkdAs3ZqUliSaHV9lChoBmgJaA9DCLOWAtL+RwnAlIaUUpRoFUsyaBZHQLN2Pj2zv7Z1fZQoaAZoCWgPQwh7wac5eRH9v5SGlFKUaBVLMmgWR0CzdhFzySV4dX2UKGgGaAloD0MI97GC34a4AcCUhpRSlGgVSzJoFkdAs3XjRgJC0HV9lChoBmgJaA9DCE9cjlcgevG/lIaUUpRoFUsyaBZHQLN28UKiPAB1fZQoaAZoCWgPQwgPK9zykdTzv5SGlFKUaBVLMmgWR0CzdsVDv3JxdX2UKGgGaAloD0MI8pcW9Uku97+UhpRSlGgVSzJoFkdAs3aYi1RceXV9lChoBmgJaA9DCMBatWtCWu6/lIaUUpRoFUsyaBZHQLN2an889wF1fZQoaAZoCWgPQwjb2y3JAfsMwJSGlFKUaBVLMmgWR0Czd4LXQMQVdX2UKGgGaAloD0MIbhgFwePb8b+UhpRSlGgVSzJoFkdAs3dW1kUbk3V9lChoBmgJaA9DCKkyjLtBNPO/lIaUUpRoFUsyaBZHQLN3Khw2l2x1fZQoaAZoCWgPQwjvb9BefRwAwJSGlFKUaBVLMmgWR0Czdvv5DZ13dX2UKGgGaAloD0MIFTYDXJDt/L+UhpRSlGgVSzJoFkdAs3gieyzHCHV9lChoBmgJaA9DCIo+H2XEhfi/lIaUUpRoFUsyaBZHQLN39l6qsEJ1fZQoaAZoCWgPQwgrTrUWZkEAwJSGlFKUaBVLMmgWR0Czd8moR7JGdX2UKGgGaAloD0MIh6QWSibHAMCUhpRSlGgVSzJoFkdAs3ebhBJI2HV9lChoBmgJaA9DCJBq2O+J5RDAlIaUUpRoFUsyaBZHQLN4t3t8eCF1fZQoaAZoCWgPQwhQcLGiBlPuv5SGlFKUaBVLMmgWR0CzeItl7MPjdX2UKGgGaAloD0MI9YB5yJSvB8CUhpRSlGgVSzJoFkdAs3hes0YTCnV9lChoBmgJaA9DCHLe/8cJcwPAlIaUUpRoFUsyaBZHQLN4MH4XXRR1fZQoaAZoCWgPQwiVKeYg6IgLwJSGlFKUaBVLMmgWR0CzeUCIDYAbdX2UKGgGaAloD0MIgVoMHqb9/r+UhpRSlGgVSzJoFkdAs3kUVLzwt3V9lChoBmgJaA9DCEG62LRSqBLAlIaUUpRoFUsyaBZHQLN455PuXu51fZQoaAZoCWgPQwg4gem0biMAwJSGlFKUaBVLMmgWR0CzeLlOfukUdX2UKGgGaAloD0MIaY1BJ4TuB8CUhpRSlGgVSzJoFkdAs3nNZIQOF3V9lChoBmgJaA9DCPnX8sr1dvm/lIaUUpRoFUsyaBZHQLN5oVJcxCZ1fZQoaAZoCWgPQwj0biwoDEr1v5SGlFKUaBVLMmgWR0CzeXS0OVgQdX2UKGgGaAloD0MIF2L1RxhG97+UhpRSlGgVSzJoFkdAs3lGlsP8RHV9lChoBmgJaA9DCF/ObFfoowrAlIaUUpRoFUsyaBZHQLN6baPjn3d1fZQoaAZoCWgPQwj8jXbc8Lvxv5SGlFKUaBVLMmgWR0CzekGovSMMdX2UKGgGaAloD0MIa+9TVWhgAcCUhpRSlGgVSzJoFkdAs3oUz1schnV9lChoBmgJaA9DCHAk0GBTp/e/lIaUUpRoFUsyaBZHQLN55q//Nqx1fZQoaAZoCWgPQwicxCCwckgBwJSGlFKUaBVLMmgWR0CzewT6zmfXdX2UKGgGaAloD0MIdXRcjexK/r+UhpRSlGgVSzJoFkdAs3rY+kgwGnV9lChoBmgJaA9DCFRx4xbzswjAlIaUUpRoFUsyaBZHQLN6rDxb0OF1fZQoaAZoCWgPQwifjscMVEYDwJSGlFKUaBVLMmgWR0Czen4pYs/ZdX2UKGgGaAloD0MIBTbn4JmQAcCUhpRSlGgVSzJoFkdAs3urE0iyIHV9lChoBmgJaA9DCHh95qxP+fW/lIaUUpRoFUsyaBZHQLN7fxNIsiB1fZQoaAZoCWgPQwj5npEIjSDwv5SGlFKUaBVLMmgWR0Cze1LUG3WndX2UKGgGaAloD0MIgo/BilMNBMCUhpRSlGgVSzJoFkdAs3skt4A0bnV9lChoBmgJaA9DCJBpbRrbCw7AlIaUUpRoFUsyaBZHQLN8TyKekHl1fZQoaAZoCWgPQwgDmZ1F73QLwJSGlFKUaBVLMmgWR0CzfCM7QswtdX2UKGgGaAloD0MIezGUE+0KAcCUhpRSlGgVSzJoFkdAs3v2rU9ZBHV9lChoBmgJaA9DCDXrjO+LywLAlIaUUpRoFUsyaBZHQLN7yMewLVp1fZQoaAZoCWgPQwi45SMp6UEGwJSGlFKUaBVLMmgWR0CzfNyf16E8dX2UKGgGaAloD0MIo+pXOh8+B8CUhpRSlGgVSzJoFkdAs3ywdFOO83V9lChoBmgJaA9DCP1NKETA4QnAlIaUUpRoFUsyaBZHQLN8g40Mw111fZQoaAZoCWgPQwgBa9WuCekDwJSGlFKUaBVLMmgWR0CzfFVGgBcSdX2UKGgGaAloD0MI/0C5bd+j9r+UhpRSlGgVSzJoFkdAs31fl90A93V9lChoBmgJaA9DCF/v/nivGvu/lIaUUpRoFUsyaBZHQLN9M2JBPbh1fZQoaAZoCWgPQwhxHHi13NkEwJSGlFKUaBVLMmgWR0CzfQZ40Mw2dX2UKGgGaAloD0MI8b2/QXvVCcCUhpRSlGgVSzJoFkdAs3zYK5TZQHV9lChoBmgJaA9DCI4hADj2TAHAlIaUUpRoFUsyaBZHQLN953UhFE11fZQoaAZoCWgPQwg+CAH5Eqrtv5SGlFKUaBVLMmgWR0CzfbtPDYRNdX2UKGgGaAloD0MInx9GCI/29L+UhpRSlGgVSzJoFkdAs32OekHlfnV9lChoBmgJaA9DCKaaWUsBKQPAlIaUUpRoFUsyaBZHQLN9YEjPfKp1ZS4="
81
  },
82
  "ep_success_buffer": {
83
  ":type:": "<class 'collections.deque'>",
84
  ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
85
  },
86
+ "_n_updates": 75000,
87
  "n_steps": 5,
88
  "gamma": 0.99,
89
  "gae_lambda": 1.0,
a2c-PandaReachDense-v2/policy.optimizer.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1bbc0bba5711f1733cb374109162f82ebc8deca81f5b2cf58b2004332c490171
3
  size 44734
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4f4d0de2a851b448564668da8eb4af030227100158f90fb4388ead37f1aba2b5
3
  size 44734
a2c-PandaReachDense-v2/policy.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f802dc9cb085ad0f7451f60ef6c395bb774309340d2075e1d5be7b6ed1e5c5e8
3
  size 46014
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f0e749698aa07bdd09212778b5d26c48848bd9fec248bde612ece2dac95c8d1
3
  size 46014
config.json CHANGED
@@ -1 +1 @@
1
- {"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7f23e2402280>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7f23e23fdf00>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=", "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "observation_space": {":type:": "<class 'gym.spaces.dict.Dict'>", ":serialized:": "gAWVUgMAAAAAAACMD2d5bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwOZ3ltLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUaBCTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowGX3NoYXBllEsDhZSMA2xvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZSMAUOUdJRSlIwEaGlnaJRoHSiWDAAAAAAAAAAAACBBAAAgQQAAIEGUaBVLA4WUaCB0lFKUjA1ib3VuZGVkX2JlbG93lGgdKJYDAAAAAAAAAAEBAZRoEowCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZRoIHSUUpSMDWJvdW5kZWRfYWJvdmWUaB0olgMAAAAAAAAAAQEBlGgsSwOFlGggdJRSlIwKX25wX3JhbmRvbZROdWKMDGRlc2lyZWRfZ29hbJRoDSmBlH2UKGgQaBVoGEsDhZRoGmgdKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZRoIHSUUpRoI2gdKJYMAAAAAAAAAAAAIEEAACBBAAAgQZRoFUsDhZRoIHSUUpRoKGgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoMmgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoN051YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgVaBhLBoWUaBpoHSiWGAAAAAAAAAAAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBVLBoWUaCB0lFKUaCNoHSiWGAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEGUaBVLBoWUaCB0lFKUaChoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDJoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDdOdWJ1aBhOaBBOaDdOdWIu", "spaces": "OrderedDict([('achieved_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('desired_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('observation', Box([-10. -10. -10. -10. -10. -10.], [10. 10. 10. 10. 10. 10.], (6,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVbQEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLA4WUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWDAAAAAAAAAAAAIC/AACAvwAAgL+UaApLA4WUjAFDlHSUUpSMBGhpZ2iUaBIolgwAAAAAAAAAAACAPwAAgD8AAIA/lGgKSwOFlGgVdJRSlIwNYm91bmRlZF9iZWxvd5RoEiiWAwAAAAAAAAABAQGUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLA4WUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYDAAAAAAAAAAEBAZRoIUsDhZRoFXSUUpSMCl9ucF9yYW5kb22UTnViLg==", "dtype": "float32", "_shape": [3], "low": "[-1. -1. -1.]", "high": "[1. 1. 1.]", "bounded_below": "[ True True True]", "bounded_above": "[ True True True]", "_np_random": null}, "n_envs": 4, "num_timesteps": 0, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1680109445969967231, "learning_rate": 0.0007, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/RvAGjbi6x4WUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAucvQPpar2TwgoAo/ucvQPpar2TwgoAo/ucvQPpar2TwgoAo/ucvQPpar2TwgoAo/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAxvrNv//Rm7/qEZ2/0frcPzpY2b4cuAg/hzwGv5M95D7efzW+EAsGv/zNlT/VWIq/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAAC5y9A+lqvZPCCgCj/eWyM8jp0nO779Bzu5y9A+lqvZPCCgCj/eWyM8jp0nO779Bzu5y9A+lqvZPCCgCj/eWyM8jp0nO779Bzu5y9A+lqvZPCCgCj/eWyM8jp0nO779BzuUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[0.40780428 0.02657108 0.5415058 ]\n [0.40780428 0.02657108 0.5415058 ]\n [0.40780428 0.02657108 0.5415058 ]\n [0.40780428 0.02657108 0.5415058 ]]", "desired_goal": "[[-1.6092155 -1.2173461 -1.2271092 ]\n [ 1.7264043 -0.42450124 0.5340593 ]\n [-0.5243611 0.44578227 -0.17724559]\n [-0.5236063 1.1703486 -1.0808359 ]]", "observation": "[[0.40780428 0.02657108 0.5415058 0.00997063 0.00255761 0.00207506]\n [0.40780428 0.02657108 0.5415058 0.00997063 0.00255761 0.00207506]\n [0.40780428 0.02657108 0.5415058 0.00997063 0.00255761 0.00207506]\n [0.40780428 0.02657108 0.5415058 0.00997063 0.00255761 0.00207506]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAEBAQGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAYK3BvexjzL1hUFY+am/GuodO5L184Ic+h1BUPa6KAD1pB5c7o/BNvGUPrL0vra09lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]", "desired_goal": "[[-0.09456897 -0.09979996 0.209291 ]\n [-0.00151394 -0.1114779 0.26538455]\n [ 0.05183461 0.03138226 0.00460904]\n [-0.01256958 -0.08401374 0.08480298]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 50000, "n_steps": 5, "gamma": 0.99, "gae_lambda": 1.0, "ent_coef": 0.0, "vf_coef": 0.5, "max_grad_norm": 0.5, "normalize_advantage": false, "system_info": {"OS": "Linux-5.10.147+-x86_64-with-glibc2.31 # 1 SMP Sat Dec 10 16:00:40 UTC 2022", "Python": "3.9.16", "Stable-Baselines3": "1.7.0", "PyTorch": "1.13.1+cu116", "GPU Enabled": "True", "Numpy": "1.22.4", "Gym": "0.21.0"}}
 
1
+ {"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7ff778876700>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7ff778877480>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=", "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "observation_space": {":type:": "<class 'gym.spaces.dict.Dict'>", ":serialized:": "gAWVUgMAAAAAAACMD2d5bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwOZ3ltLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUaBCTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowGX3NoYXBllEsDhZSMA2xvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZSMAUOUdJRSlIwEaGlnaJRoHSiWDAAAAAAAAAAAACBBAAAgQQAAIEGUaBVLA4WUaCB0lFKUjA1ib3VuZGVkX2JlbG93lGgdKJYDAAAAAAAAAAEBAZRoEowCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZRoIHSUUpSMDWJvdW5kZWRfYWJvdmWUaB0olgMAAAAAAAAAAQEBlGgsSwOFlGggdJRSlIwKX25wX3JhbmRvbZROdWKMDGRlc2lyZWRfZ29hbJRoDSmBlH2UKGgQaBVoGEsDhZRoGmgdKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZRoIHSUUpRoI2gdKJYMAAAAAAAAAAAAIEEAACBBAAAgQZRoFUsDhZRoIHSUUpRoKGgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoMmgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoN051YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgVaBhLBoWUaBpoHSiWGAAAAAAAAAAAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBVLBoWUaCB0lFKUaCNoHSiWGAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEGUaBVLBoWUaCB0lFKUaChoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDJoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDdOdWJ1aBhOaBBOaDdOdWIu", "spaces": "OrderedDict([('achieved_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('desired_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('observation', Box([-10. -10. -10. -10. -10. -10.], [10. 10. 10. 10. 10. 10.], (6,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVbQEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLA4WUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWDAAAAAAAAAAAAIC/AACAvwAAgL+UaApLA4WUjAFDlHSUUpSMBGhpZ2iUaBIolgwAAAAAAAAAAACAPwAAgD8AAIA/lGgKSwOFlGgVdJRSlIwNYm91bmRlZF9iZWxvd5RoEiiWAwAAAAAAAAABAQGUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLA4WUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYDAAAAAAAAAAEBAZRoIUsDhZRoFXSUUpSMCl9ucF9yYW5kb22UTnViLg==", "dtype": "float32", "_shape": [3], "low": "[-1. -1. -1.]", "high": "[1. 1. 1.]", "bounded_below": "[ True True True]", "bounded_above": "[ True True True]", "_np_random": null}, "n_envs": 4, "num_timesteps": 1500000, "_total_timesteps": 1500000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1680124603703928597, "learning_rate": 0.0007, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/RvAGjbi6x4WUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA5KLYPlHnI73yfQA/5KLYPlHnI73yfQA/5KLYPlHnI73yfQA/5KLYPlHnI73yfQA/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAiQC3v9Nerb2yLEa/3Cuov7qFzD8Tfty/fuXbP38jDD6BfPk+JpXVvzQcez7s0bY/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADkotg+UecjvfJ9AD+SyzI8nnryu/gJaDzkotg+UecjvfJ9AD+SyzI8nnryu/gJaDzkotg+UecjvfJ9AD+SyzI8nnryu/gJaDzkotg+UecjvfJ9AD+SyzI8nnryu/gJaDyUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 0.42311776 -0.04001552 0.5019218 ]\n [ 0.42311776 -0.04001552 0.5019218 ]\n [ 0.42311776 -0.04001552 0.5019218 ]\n [ 0.42311776 -0.04001552 0.5019218 ]]", "desired_goal": "[[-1.4297038 -0.08465352 -0.7741195 ]\n [-1.3138385 1.597831 -1.7225975 ]\n [ 1.717941 0.13685416 0.487278 ]\n [-1.6686141 0.24522477 1.4282813 ]]", "observation": "[[ 0.42311776 -0.04001552 0.5019218 0.01091279 -0.00739987 0.01416253]\n [ 0.42311776 -0.04001552 0.5019218 0.01091279 -0.00739987 0.01416253]\n [ 0.42311776 -0.04001552 0.5019218 0.01091279 -0.00739987 0.01416253]\n [ 0.42311776 -0.04001552 0.5019218 0.01091279 -0.00739987 0.01416253]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAEBAQGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAKHgWvtNUOr2LpUU+YRCdvRvjB71sZwQ+zt0FPpiyqDzNUxw+SbBTPdbQZT0gPEI+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]", "desired_goal": "[[-0.14694273 -0.04549105 0.19301431]\n [-0.0766914 -0.03317557 0.12930077]\n [ 0.13072893 0.02059297 0.15266342]\n [ 0.05168179 0.05610736 0.18968248]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIlnhA2ZTr+r+UhpRSlIwBbJRLMowBdJRHQLNt2VTaTOh1fZQoaAZoCWgPQwgJ/reSHbsGwJSGlFKUaBVLMmgWR0Czba06DGtIdX2UKGgGaAloD0MIJO1GH/MhBcCUhpRSlGgVSzJoFkdAs22AYXO4X3V9lChoBmgJaA9DCPLrh9hgwQjAlIaUUpRoFUsyaBZHQLNtUhJAdGR1fZQoaAZoCWgPQwgIsMivH+IDwJSGlFKUaBVLMmgWR0Czblu+mFajdX2UKGgGaAloD0MIvXDnwkiv9b+UhpRSlGgVSzJoFkdAs24voyKvV3V9lChoBmgJaA9DCPaYSGk2rwjAlIaUUpRoFUsyaBZHQLNuAuIAOrh1fZQoaAZoCWgPQwgz+tFwylz8v5SGlFKUaBVLMmgWR0CzbdS3w1BMdX2UKGgGaAloD0MIy59vC5Zq7L+UhpRSlGgVSzJoFkdAs27+goPTX3V9lChoBmgJaA9DCLsPQGoTZ/S/lIaUUpRoFUsyaBZHQLNu0mu1WsB1fZQoaAZoCWgPQwhgdeRIZ8ACwJSGlFKUaBVLMmgWR0CzbqW5+YtydX2UKGgGaAloD0MIAvOQKR8C+L+UhpRSlGgVSzJoFkdAs253g/C66XV9lChoBmgJaA9DCF8pyxDHevS/lIaUUpRoFUsyaBZHQLNvj4N7SiN1fZQoaAZoCWgPQwgSonxBC+kEwJSGlFKUaBVLMmgWR0Czb2NkvsZ6dX2UKGgGaAloD0MIwTi4dMz5AMCUhpRSlGgVSzJoFkdAs282hf0Eo3V9lChoBmgJaA9DCH15AfbRqfS/lIaUUpRoFUsyaBZHQLNvCFdLQHB1fZQoaAZoCWgPQwjFqkGY2334v5SGlFKUaBVLMmgWR0CzcFs45tFbdX2UKGgGaAloD0MIK061FmYREcCUhpRSlGgVSzJoFkdAs3AvtBv733V9lChoBmgJaA9DCJ3ZrtAHawvAlIaUUpRoFUsyaBZHQLNwA0SAYpF1fZQoaAZoCWgPQwjcuMX83BABwJSGlFKUaBVLMmgWR0Czb9ViKBNFdX2UKGgGaAloD0MIsoLfhhiv9L+UhpRSlGgVSzJoFkdAs3E3SLIgeXV9lChoBmgJaA9DCFHZsKayCAHAlIaUUpRoFUsyaBZHQLNxC11W8yx1fZQoaAZoCWgPQwjo3O16aQoJwJSGlFKUaBVLMmgWR0CzcN7yUcGUdX2UKGgGaAloD0MIuRrZlZZREcCUhpRSlGgVSzJoFkdAs3CxSZSeiHV9lChoBmgJaA9DCEsi+yDLAhPAlIaUUpRoFUsyaBZHQLNyGundfsx1fZQoaAZoCWgPQwjVJeMYyZ7xv5SGlFKUaBVLMmgWR0Czce8WO6uodX2UKGgGaAloD0MIBJFFmnjH97+UhpRSlGgVSzJoFkdAs3HC7L+xW3V9lChoBmgJaA9DCL7Z5sb0BP2/lIaUUpRoFUsyaBZHQLNxlP/JeVt1fZQoaAZoCWgPQwholZnS+pv2v5SGlFKUaBVLMmgWR0CzcyjundftdX2UKGgGaAloD0MIUfcBSG1iB8CUhpRSlGgVSzJoFkdAs3L9wT/Q0HV9lChoBmgJaA9DCHtJY7SOqgzAlIaUUpRoFUsyaBZHQLNy0UornT11fZQoaAZoCWgPQwhxdJXurlMJwJSGlFKUaBVLMmgWR0CzcqRQSBbwdX2UKGgGaAloD0MIMXiY9s19/L+UhpRSlGgVSzJoFkdAs3Qajua4MHV9lChoBmgJaA9DCNFY+zvbgwPAlIaUUpRoFUsyaBZHQLNz7vwVj7R1fZQoaAZoCWgPQwgNwtzu5Z4CwJSGlFKUaBVLMmgWR0Czc8KjBVMmdX2UKGgGaAloD0MIPBIvT+fKC8CUhpRSlGgVSzJoFkdAs3OU1O0sv3V9lChoBmgJaA9DCE4NNJ9z9/m/lIaUUpRoFUsyaBZHQLN1GQla8pV1fZQoaAZoCWgPQwi1xqATQgf/v5SGlFKUaBVLMmgWR0CzdO3G8274dX2UKGgGaAloD0MI5iSUvhCy87+UhpRSlGgVSzJoFkdAs3TBYq5LAnV9lChoBmgJaA9DCDzYYrfPKhHAlIaUUpRoFUsyaBZHQLN0k9CeEqV1fZQoaAZoCWgPQwgBNbVsrW8HwJSGlFKUaBVLMmgWR0CzdeAswtaqdX2UKGgGaAloD0MISIjyBS3k/r+UhpRSlGgVSzJoFkdAs3W0C/47BHV9lChoBmgJaA9DCHjuPVxyjBPAlIaUUpRoFUsyaBZHQLN1h0vXbud1fZQoaAZoCWgPQwhgyyvX2wYGwJSGlFKUaBVLMmgWR0CzdVkW/JvHdX2UKGgGaAloD0MINZpcjIEVAMCUhpRSlGgVSzJoFkdAs3ZqUliSaHV9lChoBmgJaA9DCLOWAtL+RwnAlIaUUpRoFUsyaBZHQLN2Pj2zv7Z1fZQoaAZoCWgPQwh7wac5eRH9v5SGlFKUaBVLMmgWR0CzdhFzySV4dX2UKGgGaAloD0MI97GC34a4AcCUhpRSlGgVSzJoFkdAs3XjRgJC0HV9lChoBmgJaA9DCE9cjlcgevG/lIaUUpRoFUsyaBZHQLN28UKiPAB1fZQoaAZoCWgPQwgPK9zykdTzv5SGlFKUaBVLMmgWR0CzdsVDv3JxdX2UKGgGaAloD0MI8pcW9Uku97+UhpRSlGgVSzJoFkdAs3aYi1RceXV9lChoBmgJaA9DCMBatWtCWu6/lIaUUpRoFUsyaBZHQLN2an889wF1fZQoaAZoCWgPQwjb2y3JAfsMwJSGlFKUaBVLMmgWR0Czd4LXQMQVdX2UKGgGaAloD0MIbhgFwePb8b+UhpRSlGgVSzJoFkdAs3dW1kUbk3V9lChoBmgJaA9DCKkyjLtBNPO/lIaUUpRoFUsyaBZHQLN3Khw2l2x1fZQoaAZoCWgPQwjvb9BefRwAwJSGlFKUaBVLMmgWR0Czdvv5DZ13dX2UKGgGaAloD0MIFTYDXJDt/L+UhpRSlGgVSzJoFkdAs3gieyzHCHV9lChoBmgJaA9DCIo+H2XEhfi/lIaUUpRoFUsyaBZHQLN39l6qsEJ1fZQoaAZoCWgPQwgrTrUWZkEAwJSGlFKUaBVLMmgWR0Czd8moR7JGdX2UKGgGaAloD0MIh6QWSibHAMCUhpRSlGgVSzJoFkdAs3ebhBJI2HV9lChoBmgJaA9DCJBq2O+J5RDAlIaUUpRoFUsyaBZHQLN4t3t8eCF1fZQoaAZoCWgPQwhQcLGiBlPuv5SGlFKUaBVLMmgWR0CzeItl7MPjdX2UKGgGaAloD0MI9YB5yJSvB8CUhpRSlGgVSzJoFkdAs3hes0YTCnV9lChoBmgJaA9DCHLe/8cJcwPAlIaUUpRoFUsyaBZHQLN4MH4XXRR1fZQoaAZoCWgPQwiVKeYg6IgLwJSGlFKUaBVLMmgWR0CzeUCIDYAbdX2UKGgGaAloD0MIgVoMHqb9/r+UhpRSlGgVSzJoFkdAs3kUVLzwt3V9lChoBmgJaA9DCEG62LRSqBLAlIaUUpRoFUsyaBZHQLN455PuXu51fZQoaAZoCWgPQwg4gem0biMAwJSGlFKUaBVLMmgWR0CzeLlOfukUdX2UKGgGaAloD0MIaY1BJ4TuB8CUhpRSlGgVSzJoFkdAs3nNZIQOF3V9lChoBmgJaA9DCPnX8sr1dvm/lIaUUpRoFUsyaBZHQLN5oVJcxCZ1fZQoaAZoCWgPQwj0biwoDEr1v5SGlFKUaBVLMmgWR0CzeXS0OVgQdX2UKGgGaAloD0MIF2L1RxhG97+UhpRSlGgVSzJoFkdAs3lGlsP8RHV9lChoBmgJaA9DCF/ObFfoowrAlIaUUpRoFUsyaBZHQLN6baPjn3d1fZQoaAZoCWgPQwj8jXbc8Lvxv5SGlFKUaBVLMmgWR0CzekGovSMMdX2UKGgGaAloD0MIa+9TVWhgAcCUhpRSlGgVSzJoFkdAs3oUz1schnV9lChoBmgJaA9DCHAk0GBTp/e/lIaUUpRoFUsyaBZHQLN55q//Nqx1fZQoaAZoCWgPQwicxCCwckgBwJSGlFKUaBVLMmgWR0CzewT6zmfXdX2UKGgGaAloD0MIdXRcjexK/r+UhpRSlGgVSzJoFkdAs3rY+kgwGnV9lChoBmgJaA9DCFRx4xbzswjAlIaUUpRoFUsyaBZHQLN6rDxb0OF1fZQoaAZoCWgPQwifjscMVEYDwJSGlFKUaBVLMmgWR0Czen4pYs/ZdX2UKGgGaAloD0MIBTbn4JmQAcCUhpRSlGgVSzJoFkdAs3urE0iyIHV9lChoBmgJaA9DCHh95qxP+fW/lIaUUpRoFUsyaBZHQLN7fxNIsiB1fZQoaAZoCWgPQwj5npEIjSDwv5SGlFKUaBVLMmgWR0Cze1LUG3WndX2UKGgGaAloD0MIgo/BilMNBMCUhpRSlGgVSzJoFkdAs3skt4A0bnV9lChoBmgJaA9DCJBpbRrbCw7AlIaUUpRoFUsyaBZHQLN8TyKekHl1fZQoaAZoCWgPQwgDmZ1F73QLwJSGlFKUaBVLMmgWR0CzfCM7QswtdX2UKGgGaAloD0MIezGUE+0KAcCUhpRSlGgVSzJoFkdAs3v2rU9ZBHV9lChoBmgJaA9DCDXrjO+LywLAlIaUUpRoFUsyaBZHQLN7yMewLVp1fZQoaAZoCWgPQwi45SMp6UEGwJSGlFKUaBVLMmgWR0CzfNyf16E8dX2UKGgGaAloD0MIo+pXOh8+B8CUhpRSlGgVSzJoFkdAs3ywdFOO83V9lChoBmgJaA9DCP1NKETA4QnAlIaUUpRoFUsyaBZHQLN8g40Mw111fZQoaAZoCWgPQwgBa9WuCekDwJSGlFKUaBVLMmgWR0CzfFVGgBcSdX2UKGgGaAloD0MI/0C5bd+j9r+UhpRSlGgVSzJoFkdAs31fl90A93V9lChoBmgJaA9DCF/v/nivGvu/lIaUUpRoFUsyaBZHQLN9M2JBPbh1fZQoaAZoCWgPQwhxHHi13NkEwJSGlFKUaBVLMmgWR0CzfQZ40Mw2dX2UKGgGaAloD0MI8b2/QXvVCcCUhpRSlGgVSzJoFkdAs3zYK5TZQHV9lChoBmgJaA9DCI4hADj2TAHAlIaUUpRoFUsyaBZHQLN953UhFE11fZQoaAZoCWgPQwg+CAH5Eqrtv5SGlFKUaBVLMmgWR0CzfbtPDYRNdX2UKGgGaAloD0MInx9GCI/29L+UhpRSlGgVSzJoFkdAs32OekHlfnV9lChoBmgJaA9DCKaaWUsBKQPAlIaUUpRoFUsyaBZHQLN9YEjPfKp1ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 75000, "n_steps": 5, "gamma": 0.99, "gae_lambda": 1.0, "ent_coef": 0.0, "vf_coef": 0.5, "max_grad_norm": 0.5, "normalize_advantage": false, "system_info": {"OS": "Linux-5.10.147+-x86_64-with-glibc2.31 # 1 SMP Sat Dec 10 16:00:40 UTC 2022", "Python": "3.9.16", "Stable-Baselines3": "1.7.0", "PyTorch": "1.13.1+cu116", "GPU Enabled": "True", "Numpy": "1.22.4", "Gym": "0.21.0"}}
replay.mp4 CHANGED
Binary files a/replay.mp4 and b/replay.mp4 differ
 
results.json CHANGED
@@ -1 +1 @@
1
- {"mean_reward": -4.136206184700131, "std_reward": 1.1967026923777946, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-03-29T17:14:56.173684"}
 
1
+ {"mean_reward": -2.094399914704263, "std_reward": 0.3969215414372762, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-03-29T22:39:56.357556"}
vec_normalize.pkl CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ff88809c9081d122f101dca4f3a7b066225e3f0a88025fe968751cd7c3202dea
3
  size 3056
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4997cd638b199e9a39d139432ccdc19b29ebf529c841b60607351d613c56c32f
3
  size 3056