kreepy commited on
Commit
063fa64
1 Parent(s): 8cd8bec

Initial commit

Browse files
README.md ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: stable-baselines3
3
+ tags:
4
+ - AntBulletEnv-v0
5
+ - deep-reinforcement-learning
6
+ - reinforcement-learning
7
+ - stable-baselines3
8
+ model-index:
9
+ - name: A2C
10
+ results:
11
+ - task:
12
+ type: reinforcement-learning
13
+ name: reinforcement-learning
14
+ dataset:
15
+ name: AntBulletEnv-v0
16
+ type: AntBulletEnv-v0
17
+ metrics:
18
+ - type: mean_reward
19
+ value: 1811.94 +/- 114.06
20
+ name: mean_reward
21
+ verified: false
22
+ ---
23
+
24
+ # **A2C** Agent playing **AntBulletEnv-v0**
25
+ This is a trained model of a **A2C** agent playing **AntBulletEnv-v0**
26
+ using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3).
27
+
28
+ ## Usage (with Stable-baselines3)
29
+ TODO: Add your code
30
+
31
+
32
+ ```python
33
+ from stable_baselines3 import ...
34
+ from huggingface_sb3 import load_from_hub
35
+
36
+ ...
37
+ ```
a2c-AntBulletEnv-v0.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe3876270b5466600b38346778b6826ece6bbf762b937cfc79df6c78c9298507
3
+ size 124868
a2c-AntBulletEnv-v0/_stable_baselines3_version ADDED
@@ -0,0 +1 @@
 
 
1
+ 1.8.0
a2c-AntBulletEnv-v0/data ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "policy_class": {
3
+ ":type:": "<class 'abc.ABCMeta'>",
4
+ ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
5
+ "__module__": "stable_baselines3.common.policies",
6
+ "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
7
+ "__init__": "<function ActorCriticPolicy.__init__ at 0x0000026FA2678550>",
8
+ "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x0000026FA26785E0>",
9
+ "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x0000026FA2678670>",
10
+ "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x0000026FA2678700>",
11
+ "_build": "<function ActorCriticPolicy._build at 0x0000026FA2678790>",
12
+ "forward": "<function ActorCriticPolicy.forward at 0x0000026FA2678820>",
13
+ "extract_features": "<function ActorCriticPolicy.extract_features at 0x0000026FA26788B0>",
14
+ "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x0000026FA2678940>",
15
+ "_predict": "<function ActorCriticPolicy._predict at 0x0000026FA26789D0>",
16
+ "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x0000026FA2678A60>",
17
+ "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x0000026FA2678AF0>",
18
+ "predict_values": "<function ActorCriticPolicy.predict_values at 0x0000026FA2678B80>",
19
+ "__abstractmethods__": "frozenset()",
20
+ "_abc_impl": "<_abc._abc_data object at 0x0000026FA2679940>"
21
+ },
22
+ "verbose": 1,
23
+ "policy_kwargs": {
24
+ ":type:": "<class 'dict'>",
25
+ ":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=",
26
+ "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>",
27
+ "optimizer_kwargs": {
28
+ "alpha": 0.99,
29
+ "eps": 1e-05,
30
+ "weight_decay": 0
31
+ }
32
+ },
33
+ "num_timesteps": 2000000,
34
+ "_total_timesteps": 2000000,
35
+ "_num_timesteps_at_start": 0,
36
+ "seed": null,
37
+ "action_noise": null,
38
+ "start_time": 1683001285520733300,
39
+ "learning_rate": 0.0007,
40
+ "tensorboard_log": null,
41
+ "lr_schedule": {
42
+ ":type:": "<class 'function'>",
43
+ ":serialized:": "gAWVoAIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMbkM6XFVzZXJzXGtpbmdrXFB5Y2hhcm1Qcm9qZWN0c1xodWdnaW5nLWZhY2UtcmwtY291cnNlXHZlbnZcbGliXHNpdGUtcGFja2FnZXNcc3RhYmxlX2Jhc2VsaW5lczNcY29tbW9uXHV0aWxzLnB5lIwEZnVuY5RLgkMCAAGUjAN2YWyUhZQpdJRSlH2UKIwLX19wYWNrYWdlX1+UjBhzdGFibGVfYmFzZWxpbmVzMy5jb21tb26UjAhfX25hbWVfX5SMHnN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi51dGlsc5SMCF9fZmlsZV9flGgMdU5OaACMEF9tYWtlX2VtcHR5X2NlbGyUk5QpUpSFlHSUUpSMHGNsb3VkcGlja2xlLmNsb3VkcGlja2xlX2Zhc3SUjBJfZnVuY3Rpb25fc2V0c3RhdGWUk5RoHn2UfZQoaBZoDYwMX19xdWFsbmFtZV9flIwZY29uc3RhbnRfZm4uPGxvY2Fscz4uZnVuY5SMD19fYW5ub3RhdGlvbnNfX5R9lIwOX19rd2RlZmF1bHRzX1+UTowMX19kZWZhdWx0c19flE6MCl9fbW9kdWxlX1+UaBeMB19fZG9jX1+UTowLX19jbG9zdXJlX1+UaACMCl9tYWtlX2NlbGyUk5RHP0bwBo24useFlFKUhZSMF19jbG91ZHBpY2tsZV9zdWJtb2R1bGVzlF2UjAtfX2dsb2JhbHNfX5R9lHWGlIZSMC4="
44
+ },
45
+ "_last_obs": {
46
+ ":type:": "<class 'numpy.ndarray'>",
47
+ ":serialized:": "gAWVNQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJbAAQAAAAAAAAXNjz/20O493oMLP0coUD9j7VS/w6e5vhNmE7/38Dg96r8kP7GVuL7M87Y+YlDOPlXBBL/wJ5m+bLokv6nnAb9Hq8i+Z6AYvxVCLr9fBgtAXfJxPiJXR8AJiIm+ILziPt2Ghb9C4xA/+8+/v22Vc78aYkU/xzyAP3AEsT6DE4U/6d2qvkyMUT/1LP09QZ20v49uAj+T+9q9+FLMP0/14r5wxo+/CwM8PwOsSL8FbaI+FjMav/aZMr7HtyY/75FzOtc+Dr/+4QE9rIxNv1bNtj5lZ3U/QuMQP2TVKj9tlXO/mfGxPeN3xr52iN4+5G5LP9Hyer+lrAO/H/POPa8WW7/HJf8+1vsXO9jqxD5+nHK/qg6FvxmmdT+1MGu+C1EeP+Lrgb5+HiY/Na4nP7wr7rxT9sa+J4ZRP+FdG75BBPA/3YaFv0wp4r/7z7+/bZVzv3lUWT99CIo/CB+bPphAgz9+E8M/dsYNP2OS/z5ZG66/e3yjPmsEi79NcGW/lPX9Plq8Yj/jO6g+RG8ePw8rtz9KhaE/s762vg5KkD5RabW/VBMSvzIhKD+ttIg/mLz/vt2Ghb9C4xA/+8+/v0yGhj+UjAVudW1weZSMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSwRLHIaUjAFDlHSUUpQu"
48
+ },
49
+ "_last_episode_starts": {
50
+ ":type:": "<class 'numpy.ndarray'>",
51
+ ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="
52
+ },
53
+ "_last_original_obs": {
54
+ ":type:": "<class 'numpy.ndarray'>",
55
+ ":serialized:": "gAWVNQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJbAAQAAAAAAAAAAAABOLM62AACAPwAAAAAAAAAAAAAAAAAAAAAAAACA1IMHPgAAAADT2du/AAAAANfCyTsAAAAAvm7aPwAAAAB17Ie9AAAAALZS7j8AAAAAaZecPQAAAAAqfN2/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaygVNwAAgD8AAAAAAAAAAAAAAAAAAAAAAAAAgGr0Ar4AAAAA/qv9vwAAAAC86w0+AAAAALsX3D8AAAAAu3zSOwAAAADUROE/AAAAAPB5zL0AAAAADGX6vwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGfQj7UAAIA/AAAAAAAAAAAAAAAAAAAAAAAAAIBCltk9AAAAAEka8r8AAAAAIoMCvgAAAABBLOY/AAAAAO1B1D0AAAAA1SwBQAAAAAALtKm9AAAAADHB4b8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACc58y1AACAPwAAAAAAAAAAAAAAAAAAAAAAAACAm9HAvQAAAAB9CPi/AAAAAJRB7bwAAAAASTX3PwAAAACzbGG9AAAAAERj/z8AAAAAPZmZPQAAAADaWfm/AAAAAAAAAAAAAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSwRLHIaUjAFDlHSUUpQu"
56
+ },
57
+ "_episode_num": 0,
58
+ "use_sde": false,
59
+ "sde_sample_freq": -1,
60
+ "_current_progress_remaining": 0.0,
61
+ "_stats_window_size": 100,
62
+ "ep_info_buffer": {
63
+ ":type:": "<class 'collections.deque'>",
64
+ ":serialized:": "gAWVQwwAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQJu7jFHavieMAWyUTegDjAF0lEdAmI/YZl4C63V9lChoBkdAnPy8INVinmgHTegDaAhHQJiSQraufVZ1fZQoaAZHQJtvX7rLQoloB03oA2gIR0CYlzn8KohqdX2UKGgGR0CePiyWiUPhaAdN6ANoCEdAmJoGiL2pQ3V9lChoBkdAnEm9d3SrpGgHTegDaAhHQJidB4mkWRB1fZQoaAZHQJ8AG+j/MntoB03oA2gIR0CYn4ptrKvFdX2UKGgGR0CbuGe54GD+aAdN6ANoCEdAmKR10HQhOnV9lChoBkdAnOc3yup0fmgHTegDaAhHQJinDd30PH11fZQoaAZHQJ0aOGJvYOFoB03oA2gIR0CYqfiyIHkcdX2UKGgGR0CbpWLiMo+faAdN6ANoCEdAmKxdeQdS23V9lChoBkdAnebPN/vv0GgHTegDaAhHQJixOPfbblB1fZQoaAZHQJyhRb+tKZloB03oA2gIR0CYs/9Q40djdX2UKGgGR0Ce7QDx9XtCaAdN6ANoCEdAmLbv1ct5EHV9lChoBkdAnM4thd+ocmgHTegDaAhHQJi5SWTot+V1fZQoaAZHQJw5Ls1KoQ5oB03oA2gIR0CYvj9cry2AdX2UKGgGR0CfcgihnJ1aaAdN6ANoCEdAmMEGgWac7XV9lChoBkdAnyJTvuw5emgHTegDaAhHQJjD/FzdUKl1fZQoaAZHQJ7srTw2ETRoB03oA2gIR0CYxmTINmUXdX2UKGgGR0CehayIpH7QaAdN6ANoCEdAmMtnAymALHV9lChoBkdAnTCmygPEsWgHTegDaAhHQJjOEvHtF8Z1fZQoaAZHQJ3YN2+wkgRoB03oA2gIR0CY0QcinpB5dX2UKGgGR0CY81RGMGX5aAdN6ANoCEdAmNNi79Q40nV9lChoBkdAmVribpeNUGgHTegDaAhHQJjYaa+evp11fZQoaAZHQJ0HJ7Z39rJoB03oA2gIR0CY2zuOS4e+dX2UKGgGR0CRS9huO0b+aAdN6ANoCEdAmN5MPOIInnV9lChoBkdAmtDjKYAsCmgHTegDaAhHQJjgqI1tO211fZQoaAZHQJq7swco6S1oB03oA2gIR0CY5WmBOHnEdX2UKGgGR0CaMpVAAyVOaAdN6ANoCEdAmOgdYKYzBXV9lChoBkdAneWpf2K2rmgHTegDaAhHQJjrIl/pdKN1fZQoaAZHQJsH/eQ+2VpoB03oA2gIR0CY7XoddVvNdX2UKGgGR0CdnwGgi/wiaAdN6ANoCEdAmPJrvgFX73V9lChoBkdAncFNx+8XemgHTegDaAhHQJj1F20Re1N1fZQoaAZHQJ36+uDBdldoB03oA2gIR0CY9/syi22HdX2UKGgGR0CaBqa1Cw8oaAdN6ANoCEdAmPpZsXSBsnV9lChoBkdAlqZXbAUL2GgHTegDaAhHQJj/YyoGY8d1fZQoaAZHQJosKLuQZGdoB03oA2gIR0CZAirqdH2AdX2UKGgGR0CbxkwcHWz4aAdN6ANoCEdAmQUGr8zhxnV9lChoBkdAl1p8/yGzr2gHTegDaAhHQJkHU3IdU851fZQoaAZHQJ1DBs3yZrpoB03oA2gIR0CZDD6YVqN7dX2UKGgGR0Ce5KInSfDlaAdN6ANoCEdAmQ7aT4cm0HV9lChoBkdAnQrXeizsyGgHTegDaAhHQJkR3MA3kxR1fZQoaAZHQJcOoPwuuihoB03oA2gIR0CZFDLwF1SwdX2UKGgGR0CUj3fI0ZWJaAdN6ANoCEdAmRk5WmxdIHV9lChoBkdAm/t6+i8Fp2gHTegDaAhHQJkcDQKKHfx1fZQoaAZHQJ3fGVdHDrJoB03oA2gIR0CZHwfVI7NjdX2UKGgGR0CcSS0+TvAoaAdN6ANoCEdAmSFTqrzXjHV9lChoBkdAnh9SvgWJrWgHTegDaAhHQJkmTND+irV1fZQoaAZHQJ2UehWYF7loB03oA2gIR0CZKQgte2NOdX2UKGgGR0CduurpqynlaAdN6ANoCEdAmSv9srNGE3V9lChoBkdAnntFNL127mgHTegDaAhHQJkuXaGpMpR1fZQoaAZHQJ2p+LtNSIhoB03oA2gIR0CZM01HOKO1dX2UKGgGR0CaXeJbMX7+aAdN6ANoCEdAmTX3D3ueBnV9lChoBkdAnewjP0I1L2gHTegDaAhHQJk42T+vQnh1fZQoaAZHQJ50S0AtFrloB03oA2gIR0CZOypy6tkndX2UKGgGR0CbNhsBQvYfaAdN6ANoCEdAmUANoJzDGnV9lChoBkdAnUIi9du50GgHTegDaAhHQJlCu4Ajps51fZQoaAZHQJ4famBOHnFoB03oA2gIR0CZRaV4X40udX2UKGgGR0Cfo2biZOSGaAdN6ANoCEdAmUfyBkI5YHV9lChoBkdAnTQd2C/XXmgHTegDaAhHQJlMxzLfUF11fZQoaAZHQJyFdoVVPvdoB03oA2gIR0CZT1oH9m6HdX2UKGgGR0CdztJ3PiT/aAdN6ANoCEdAmVJmgSOBD3V9lChoBkdAmPhlA7gbZWgHTegDaAhHQJlUs6vJRwZ1fZQoaAZHQJuPV7dBSk1oB03oA2gIR0CZWZTBqKxcdX2UKGgGR0Cc9/ayrxRVaAdN6ANoCEdAmVxFNL127nV9lChoBkdAnhQ6Mm4RVmgHTegDaAhHQJlfNq+Jxed1fZQoaAZHQJxNHp5eJHloB03oA2gIR0CZYaIUahpQdX2UKGgGR0CecpdXDFZQaAdN6ANoCEdAmWZz/2kBS3V9lChoBkdAnHMfCEYfn2gHTegDaAhHQJlpDrdFfAt1fZQoaAZHQJmoO06YE4hoB03oA2gIR0CZbAGGVRk3dX2UKGgGR0Cc2A/PgNwzaAdN6ANoCEdAmW4+Xu3MIXV9lChoBkdAm66CVObiImgHTegDaAhHQJlzKQ/5ckd1fZQoaAZHQJ2ZMaya/h5oB03oA2gIR0CZdecry1/ldX2UKGgGR0CboKUM5OrRaAdN6ANoCEdAmXjgUcn3L3V9lChoBkdAnmN4zi0fHWgHTegDaAhHQJl7RJ/XoTx1fZQoaAZHQJ5og01qFh5oB03oA2gIR0CZgCC2+fyxdX2UKGgGR0CeCmNpM6BAaAdN6ANoCEdAmYLNTUAks3V9lChoBkdAnHlvAoG6gGgHTegDaAhHQJmFssUZeiV1fZQoaAZHQJ2C3jJdSl5oB03oA2gIR0CZiA7ZFocrdX2UKGgGR0CeP/WCEpRXaAdN6ANoCEdAmY0ClSCOFXV9lChoBkdAmqLaGgzxgGgHTegDaAhHQJmQAcghbGF1fZQoaAZHQJvSAXqJMxpoB03oA2gIR0CZkvtxMnJDdX2UKGgGR0Cb9CN3GGVSaAdN6ANoCEdAmZVEmUnogXV9lChoBkdAlahF7x/d7GgHTegDaAhHQJmaJuBMBZJ1fZQoaAZHQJi0qR7qptJoB03oA2gIR0CZnOqrilzmdX2UKGgGR0Cc1rzguRLcaAdN6ANoCEdAmZ/ltoBaLXV9lChoBkdAmpt619fCymgHTegDaAhHQJmiUd6sySF1fZQoaAZHv9ehnJ1aGHpoB0uyaAhHQJmkolu3trt1fZQoaAZHQJziWQOnVG1oB03oA2gIR0CZp0fuTibVdX2UKGgGR0CWbZltCRfXaAdN6ANoCEdAman9kauOj3V9lChoBkdAmPNF/DtPYWgHTegDaAhHQJms6xGDtgN1fZQoaAZHQJwfDlV94NZoB03oA2gIR0CZsYnmJWNndX2UKGgGR0CcjG62fChwaAdN6ANoCEdAmbQOI68xsXV9lChoBkdAmuv6tga3qmgHTegDaAhHQJm2w0waisZ1fZQoaAZHQJ6EKA4GUwBoB03oA2gIR0CZudQ+EAYIdX2UKGgGR0CVryjsUqQSaAdN6ANoCEdAmb57U1AJLXV9lChoBkdAncBN2X9it2gHTegDaAhHQJnBHqu8sc11fZQoaAZHQJ5sW5SWJJpoB03oA2gIR0CZw8NS619fdX2UKGgGR0CYifV3ljmTaAdN6ANoCEdAmca/5HmRvHV9lChoBkdAmsx6UaAFxGgHTegDaAhHQJnLSS4e9zx1fZQoaAZHQJwue2x6fJ5oB03oA2gIR0CZzdLa24NJdWUu"
65
+ },
66
+ "ep_success_buffer": {
67
+ ":type:": "<class 'collections.deque'>",
68
+ ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
69
+ },
70
+ "_n_updates": 100000,
71
+ "n_steps": 5,
72
+ "gamma": 0.99,
73
+ "gae_lambda": 1.0,
74
+ "ent_coef": 0.0,
75
+ "vf_coef": 0.5,
76
+ "max_grad_norm": 0.5,
77
+ "normalize_advantage": false,
78
+ "observation_space": {
79
+ ":type:": "<class 'gym.spaces.box.Box'>",
80
+ ":serialized:": "gAWVZwIAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLHIWUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWcAAAAAAAAAAAAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/lGgKSxyFlIwBQ5R0lFKUjARoaWdolGgSKJZwAAAAAAAAAAAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH+UaApLHIWUaBV0lFKUjA1ib3VuZGVkX2JlbG93lGgSKJYcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLHIWUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUaCFLHIWUaBV0lFKUjApfbnBfcmFuZG9tlE51Yi4=",
81
+ "dtype": "float32",
82
+ "_shape": [
83
+ 28
84
+ ],
85
+ "low": "[-inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf\n -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf]",
86
+ "high": "[inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf\n inf inf inf inf inf inf inf inf inf inf]",
87
+ "bounded_below": "[False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False]",
88
+ "bounded_above": "[False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False]",
89
+ "_np_random": null
90
+ },
91
+ "action_space": {
92
+ ":type:": "<class 'gym.spaces.box.Box'>",
93
+ ":serialized:": "gAWVnwEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLCIWUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWIAAAAAAAAAAAAIC/AACAvwAAgL8AAIC/AACAvwAAgL8AAIC/AACAv5RoCksIhZSMAUOUdJRSlIwEaGlnaJRoEiiWIAAAAAAAAAAAAIA/AACAPwAAgD8AAIA/AACAPwAAgD8AAIA/AACAP5RoCksIhZRoFXSUUpSMDWJvdW5kZWRfYmVsb3eUaBIolggAAAAAAAAAAQEBAQEBAQGUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLCIWUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYIAAAAAAAAAAEBAQEBAQEBlGghSwiFlGgVdJRSlIwKX25wX3JhbmRvbZROdWIu",
94
+ "dtype": "float32",
95
+ "_shape": [
96
+ 8
97
+ ],
98
+ "low": "[-1. -1. -1. -1. -1. -1. -1. -1.]",
99
+ "high": "[1. 1. 1. 1. 1. 1. 1. 1.]",
100
+ "bounded_below": "[ True True True True True True True True]",
101
+ "bounded_above": "[ True True True True True True True True]",
102
+ "_np_random": null
103
+ },
104
+ "n_envs": 4
105
+ }
a2c-AntBulletEnv-v0/policy.optimizer.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f33b5a8139dcd998f3fbdebcac962dd3067235b5ade308174cfbca595e192bc
3
+ size 54078
a2c-AntBulletEnv-v0/policy.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a3280da19a7dc74e4443ca8cdf964b1242c7b62cd49c41c069b8ae6f80fb39a
3
+ size 54782
a2c-AntBulletEnv-v0/pytorch_variables.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d030ad8db708280fcae77d87e973102039acd23a11bdecc3db8eb6c0ac940ee1
3
+ size 431
a2c-AntBulletEnv-v0/system_info.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ - OS: Windows-10-10.0.22621-SP0 10.0.22621
2
+ - Python: 3.9.13
3
+ - Stable-Baselines3: 1.8.0
4
+ - PyTorch: 2.0.0+cpu
5
+ - GPU Enabled: False
6
+ - Numpy: 1.24.3
7
+ - Gym: 0.21.0
config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==", "__module__": "stable_baselines3.common.policies", "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function ActorCriticPolicy.__init__ at 0x0000026FA2678550>", "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x0000026FA26785E0>", "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x0000026FA2678670>", "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x0000026FA2678700>", "_build": "<function ActorCriticPolicy._build at 0x0000026FA2678790>", "forward": "<function ActorCriticPolicy.forward at 0x0000026FA2678820>", "extract_features": "<function ActorCriticPolicy.extract_features at 0x0000026FA26788B0>", "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x0000026FA2678940>", "_predict": "<function ActorCriticPolicy._predict at 0x0000026FA26789D0>", "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x0000026FA2678A60>", "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x0000026FA2678AF0>", "predict_values": "<function ActorCriticPolicy.predict_values at 0x0000026FA2678B80>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x0000026FA2679940>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=", "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "num_timesteps": 2000000, "_total_timesteps": 2000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1683001285520733300, "learning_rate": 0.0007, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVoAIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMbkM6XFVzZXJzXGtpbmdrXFB5Y2hhcm1Qcm9qZWN0c1xodWdnaW5nLWZhY2UtcmwtY291cnNlXHZlbnZcbGliXHNpdGUtcGFja2FnZXNcc3RhYmxlX2Jhc2VsaW5lczNcY29tbW9uXHV0aWxzLnB5lIwEZnVuY5RLgkMCAAGUjAN2YWyUhZQpdJRSlH2UKIwLX19wYWNrYWdlX1+UjBhzdGFibGVfYmFzZWxpbmVzMy5jb21tb26UjAhfX25hbWVfX5SMHnN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi51dGlsc5SMCF9fZmlsZV9flGgMdU5OaACMEF9tYWtlX2VtcHR5X2NlbGyUk5QpUpSFlHSUUpSMHGNsb3VkcGlja2xlLmNsb3VkcGlja2xlX2Zhc3SUjBJfZnVuY3Rpb25fc2V0c3RhdGWUk5RoHn2UfZQoaBZoDYwMX19xdWFsbmFtZV9flIwZY29uc3RhbnRfZm4uPGxvY2Fscz4uZnVuY5SMD19fYW5ub3RhdGlvbnNfX5R9lIwOX19rd2RlZmF1bHRzX1+UTowMX19kZWZhdWx0c19flE6MCl9fbW9kdWxlX1+UaBeMB19fZG9jX1+UTowLX19jbG9zdXJlX1+UaACMCl9tYWtlX2NlbGyUk5RHP0bwBo24useFlFKUhZSMF19jbG91ZHBpY2tsZV9zdWJtb2R1bGVzlF2UjAtfX2dsb2JhbHNfX5R9lHWGlIZSMC4="}, "_last_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVNQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJbAAQAAAAAAAAXNjz/20O493oMLP0coUD9j7VS/w6e5vhNmE7/38Dg96r8kP7GVuL7M87Y+YlDOPlXBBL/wJ5m+bLokv6nnAb9Hq8i+Z6AYvxVCLr9fBgtAXfJxPiJXR8AJiIm+ILziPt2Ghb9C4xA/+8+/v22Vc78aYkU/xzyAP3AEsT6DE4U/6d2qvkyMUT/1LP09QZ20v49uAj+T+9q9+FLMP0/14r5wxo+/CwM8PwOsSL8FbaI+FjMav/aZMr7HtyY/75FzOtc+Dr/+4QE9rIxNv1bNtj5lZ3U/QuMQP2TVKj9tlXO/mfGxPeN3xr52iN4+5G5LP9Hyer+lrAO/H/POPa8WW7/HJf8+1vsXO9jqxD5+nHK/qg6FvxmmdT+1MGu+C1EeP+Lrgb5+HiY/Na4nP7wr7rxT9sa+J4ZRP+FdG75BBPA/3YaFv0wp4r/7z7+/bZVzv3lUWT99CIo/CB+bPphAgz9+E8M/dsYNP2OS/z5ZG66/e3yjPmsEi79NcGW/lPX9Plq8Yj/jO6g+RG8ePw8rtz9KhaE/s762vg5KkD5RabW/VBMSvzIhKD+ttIg/mLz/vt2Ghb9C4xA/+8+/v0yGhj+UjAVudW1weZSMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSwRLHIaUjAFDlHSUUpQu"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVNQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJbAAQAAAAAAAAAAAABOLM62AACAPwAAAAAAAAAAAAAAAAAAAAAAAACA1IMHPgAAAADT2du/AAAAANfCyTsAAAAAvm7aPwAAAAB17Ie9AAAAALZS7j8AAAAAaZecPQAAAAAqfN2/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaygVNwAAgD8AAAAAAAAAAAAAAAAAAAAAAAAAgGr0Ar4AAAAA/qv9vwAAAAC86w0+AAAAALsX3D8AAAAAu3zSOwAAAADUROE/AAAAAPB5zL0AAAAADGX6vwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGfQj7UAAIA/AAAAAAAAAAAAAAAAAAAAAAAAAIBCltk9AAAAAEka8r8AAAAAIoMCvgAAAABBLOY/AAAAAO1B1D0AAAAA1SwBQAAAAAALtKm9AAAAADHB4b8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACc58y1AACAPwAAAAAAAAAAAAAAAAAAAAAAAACAm9HAvQAAAAB9CPi/AAAAAJRB7bwAAAAASTX3PwAAAACzbGG9AAAAAERj/z8AAAAAPZmZPQAAAADaWfm/AAAAAAAAAAAAAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSwRLHIaUjAFDlHSUUpQu"}, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVQwwAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQJu7jFHavieMAWyUTegDjAF0lEdAmI/YZl4C63V9lChoBkdAnPy8INVinmgHTegDaAhHQJiSQraufVZ1fZQoaAZHQJtvX7rLQoloB03oA2gIR0CYlzn8KohqdX2UKGgGR0CePiyWiUPhaAdN6ANoCEdAmJoGiL2pQ3V9lChoBkdAnEm9d3SrpGgHTegDaAhHQJidB4mkWRB1fZQoaAZHQJ8AG+j/MntoB03oA2gIR0CYn4ptrKvFdX2UKGgGR0CbuGe54GD+aAdN6ANoCEdAmKR10HQhOnV9lChoBkdAnOc3yup0fmgHTegDaAhHQJinDd30PH11fZQoaAZHQJ0aOGJvYOFoB03oA2gIR0CYqfiyIHkcdX2UKGgGR0CbpWLiMo+faAdN6ANoCEdAmKxdeQdS23V9lChoBkdAnebPN/vv0GgHTegDaAhHQJixOPfbblB1fZQoaAZHQJyhRb+tKZloB03oA2gIR0CYs/9Q40djdX2UKGgGR0Ce7QDx9XtCaAdN6ANoCEdAmLbv1ct5EHV9lChoBkdAnM4thd+ocmgHTegDaAhHQJi5SWTot+V1fZQoaAZHQJw5Ls1KoQ5oB03oA2gIR0CYvj9cry2AdX2UKGgGR0CfcgihnJ1aaAdN6ANoCEdAmMEGgWac7XV9lChoBkdAnyJTvuw5emgHTegDaAhHQJjD/FzdUKl1fZQoaAZHQJ7srTw2ETRoB03oA2gIR0CYxmTINmUXdX2UKGgGR0CehayIpH7QaAdN6ANoCEdAmMtnAymALHV9lChoBkdAnTCmygPEsWgHTegDaAhHQJjOEvHtF8Z1fZQoaAZHQJ3YN2+wkgRoB03oA2gIR0CY0QcinpB5dX2UKGgGR0CY81RGMGX5aAdN6ANoCEdAmNNi79Q40nV9lChoBkdAmVribpeNUGgHTegDaAhHQJjYaa+evp11fZQoaAZHQJ0HJ7Z39rJoB03oA2gIR0CY2zuOS4e+dX2UKGgGR0CRS9huO0b+aAdN6ANoCEdAmN5MPOIInnV9lChoBkdAmtDjKYAsCmgHTegDaAhHQJjgqI1tO211fZQoaAZHQJq7swco6S1oB03oA2gIR0CY5WmBOHnEdX2UKGgGR0CaMpVAAyVOaAdN6ANoCEdAmOgdYKYzBXV9lChoBkdAneWpf2K2rmgHTegDaAhHQJjrIl/pdKN1fZQoaAZHQJsH/eQ+2VpoB03oA2gIR0CY7XoddVvNdX2UKGgGR0CdnwGgi/wiaAdN6ANoCEdAmPJrvgFX73V9lChoBkdAncFNx+8XemgHTegDaAhHQJj1F20Re1N1fZQoaAZHQJ36+uDBdldoB03oA2gIR0CY9/syi22HdX2UKGgGR0CaBqa1Cw8oaAdN6ANoCEdAmPpZsXSBsnV9lChoBkdAlqZXbAUL2GgHTegDaAhHQJj/YyoGY8d1fZQoaAZHQJosKLuQZGdoB03oA2gIR0CZAirqdH2AdX2UKGgGR0CbxkwcHWz4aAdN6ANoCEdAmQUGr8zhxnV9lChoBkdAl1p8/yGzr2gHTegDaAhHQJkHU3IdU851fZQoaAZHQJ1DBs3yZrpoB03oA2gIR0CZDD6YVqN7dX2UKGgGR0Ce5KInSfDlaAdN6ANoCEdAmQ7aT4cm0HV9lChoBkdAnQrXeizsyGgHTegDaAhHQJkR3MA3kxR1fZQoaAZHQJcOoPwuuihoB03oA2gIR0CZFDLwF1SwdX2UKGgGR0CUj3fI0ZWJaAdN6ANoCEdAmRk5WmxdIHV9lChoBkdAm/t6+i8Fp2gHTegDaAhHQJkcDQKKHfx1fZQoaAZHQJ3fGVdHDrJoB03oA2gIR0CZHwfVI7NjdX2UKGgGR0CcSS0+TvAoaAdN6ANoCEdAmSFTqrzXjHV9lChoBkdAnh9SvgWJrWgHTegDaAhHQJkmTND+irV1fZQoaAZHQJ2UehWYF7loB03oA2gIR0CZKQgte2NOdX2UKGgGR0CduurpqynlaAdN6ANoCEdAmSv9srNGE3V9lChoBkdAnntFNL127mgHTegDaAhHQJkuXaGpMpR1fZQoaAZHQJ2p+LtNSIhoB03oA2gIR0CZM01HOKO1dX2UKGgGR0CaXeJbMX7+aAdN6ANoCEdAmTX3D3ueBnV9lChoBkdAnewjP0I1L2gHTegDaAhHQJk42T+vQnh1fZQoaAZHQJ50S0AtFrloB03oA2gIR0CZOypy6tkndX2UKGgGR0CbNhsBQvYfaAdN6ANoCEdAmUANoJzDGnV9lChoBkdAnUIi9du50GgHTegDaAhHQJlCu4Ajps51fZQoaAZHQJ4famBOHnFoB03oA2gIR0CZRaV4X40udX2UKGgGR0Cfo2biZOSGaAdN6ANoCEdAmUfyBkI5YHV9lChoBkdAnTQd2C/XXmgHTegDaAhHQJlMxzLfUF11fZQoaAZHQJyFdoVVPvdoB03oA2gIR0CZT1oH9m6HdX2UKGgGR0CdztJ3PiT/aAdN6ANoCEdAmVJmgSOBD3V9lChoBkdAmPhlA7gbZWgHTegDaAhHQJlUs6vJRwZ1fZQoaAZHQJuPV7dBSk1oB03oA2gIR0CZWZTBqKxcdX2UKGgGR0Cc9/ayrxRVaAdN6ANoCEdAmVxFNL127nV9lChoBkdAnhQ6Mm4RVmgHTegDaAhHQJlfNq+Jxed1fZQoaAZHQJxNHp5eJHloB03oA2gIR0CZYaIUahpQdX2UKGgGR0CecpdXDFZQaAdN6ANoCEdAmWZz/2kBS3V9lChoBkdAnHMfCEYfn2gHTegDaAhHQJlpDrdFfAt1fZQoaAZHQJmoO06YE4hoB03oA2gIR0CZbAGGVRk3dX2UKGgGR0Cc2A/PgNwzaAdN6ANoCEdAmW4+Xu3MIXV9lChoBkdAm66CVObiImgHTegDaAhHQJlzKQ/5ckd1fZQoaAZHQJ2ZMaya/h5oB03oA2gIR0CZdecry1/ldX2UKGgGR0CboKUM5OrRaAdN6ANoCEdAmXjgUcn3L3V9lChoBkdAnmN4zi0fHWgHTegDaAhHQJl7RJ/XoTx1fZQoaAZHQJ5og01qFh5oB03oA2gIR0CZgCC2+fyxdX2UKGgGR0CeCmNpM6BAaAdN6ANoCEdAmYLNTUAks3V9lChoBkdAnHlvAoG6gGgHTegDaAhHQJmFssUZeiV1fZQoaAZHQJ2C3jJdSl5oB03oA2gIR0CZiA7ZFocrdX2UKGgGR0CeP/WCEpRXaAdN6ANoCEdAmY0ClSCOFXV9lChoBkdAmqLaGgzxgGgHTegDaAhHQJmQAcghbGF1fZQoaAZHQJvSAXqJMxpoB03oA2gIR0CZkvtxMnJDdX2UKGgGR0Cb9CN3GGVSaAdN6ANoCEdAmZVEmUnogXV9lChoBkdAlahF7x/d7GgHTegDaAhHQJmaJuBMBZJ1fZQoaAZHQJi0qR7qptJoB03oA2gIR0CZnOqrilzmdX2UKGgGR0Cc1rzguRLcaAdN6ANoCEdAmZ/ltoBaLXV9lChoBkdAmpt619fCymgHTegDaAhHQJmiUd6sySF1fZQoaAZHv9ehnJ1aGHpoB0uyaAhHQJmkolu3trt1fZQoaAZHQJziWQOnVG1oB03oA2gIR0CZp0fuTibVdX2UKGgGR0CWbZltCRfXaAdN6ANoCEdAman9kauOj3V9lChoBkdAmPNF/DtPYWgHTegDaAhHQJms6xGDtgN1fZQoaAZHQJwfDlV94NZoB03oA2gIR0CZsYnmJWNndX2UKGgGR0CcjG62fChwaAdN6ANoCEdAmbQOI68xsXV9lChoBkdAmuv6tga3qmgHTegDaAhHQJm2w0waisZ1fZQoaAZHQJ6EKA4GUwBoB03oA2gIR0CZudQ+EAYIdX2UKGgGR0CVryjsUqQSaAdN6ANoCEdAmb57U1AJLXV9lChoBkdAncBN2X9it2gHTegDaAhHQJnBHqu8sc11fZQoaAZHQJ5sW5SWJJpoB03oA2gIR0CZw8NS619fdX2UKGgGR0CYifV3ljmTaAdN6ANoCEdAmca/5HmRvHV9lChoBkdAmsx6UaAFxGgHTegDaAhHQJnLSS4e9zx1fZQoaAZHQJwue2x6fJ5oB03oA2gIR0CZzdLa24NJdWUu"}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 100000, "n_steps": 5, "gamma": 0.99, "gae_lambda": 1.0, "ent_coef": 0.0, "vf_coef": 0.5, "max_grad_norm": 0.5, "normalize_advantage": false, "observation_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVZwIAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLHIWUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWcAAAAAAAAAAAAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/lGgKSxyFlIwBQ5R0lFKUjARoaWdolGgSKJZwAAAAAAAAAAAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH+UaApLHIWUaBV0lFKUjA1ib3VuZGVkX2JlbG93lGgSKJYcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLHIWUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUaCFLHIWUaBV0lFKUjApfbnBfcmFuZG9tlE51Yi4=", "dtype": "float32", "_shape": [28], "low": "[-inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf\n -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf]", "high": "[inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf\n inf inf inf inf inf inf inf inf inf inf]", "bounded_below": "[False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False]", "bounded_above": "[False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False]", "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVnwEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLCIWUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWIAAAAAAAAAAAAIC/AACAvwAAgL8AAIC/AACAvwAAgL8AAIC/AACAv5RoCksIhZSMAUOUdJRSlIwEaGlnaJRoEiiWIAAAAAAAAAAAAIA/AACAPwAAgD8AAIA/AACAPwAAgD8AAIA/AACAP5RoCksIhZRoFXSUUpSMDWJvdW5kZWRfYmVsb3eUaBIolggAAAAAAAAAAQEBAQEBAQGUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLCIWUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYIAAAAAAAAAAEBAQEBAQEBlGghSwiFlGgVdJRSlIwKX25wX3JhbmRvbZROdWIu", "dtype": "float32", "_shape": [8], "low": "[-1. -1. -1. -1. -1. -1. -1. -1.]", "high": "[1. 1. 1. 1. 1. 1. 1. 1.]", "bounded_below": "[ True True True True True True True True]", "bounded_above": "[ True True True True True True True True]", "_np_random": null}, "n_envs": 4, "system_info": {"OS": "Windows-10-10.0.22621-SP0 10.0.22621", "Python": "3.9.13", "Stable-Baselines3": "1.8.0", "PyTorch": "2.0.0+cpu", "GPU Enabled": "False", "Numpy": "1.24.3", "Gym": "0.21.0"}}
results.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"mean_reward": 1811.9430410871166, "std_reward": 114.06295379012245, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-05-02T10:14:41.017467"}
vec_normalize.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:88283f9ef59693cf0ab4068ab7014b5fe424080a8c7e3ba378f1388e742e19bb
3
+ size 2170