{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVNwAAAAAAAACMHnN0YWJsZV9iYXNlbGluZXMzLnNhYy5wb2xpY2llc5SMEE11bHRpSW5wdXRQb2xpY3mUk5Qu", "__module__": "stable_baselines3.sac.policies", "__doc__": "\n Policy class (with both actor and critic) for SAC.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param use_expln: Use ``expln()`` function instead of ``exp()`` when using gSDE to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param clip_mean: Clip the mean output when using gSDE to avoid numerical instability.\n :param features_extractor_class: Features extractor to use.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n :param n_critics: Number of critic networks to create.\n :param share_features_extractor: Whether to share or not the features extractor\n between the actor and the critic (this saves computation time)\n ", "__init__": "<function MultiInputPolicy.__init__ at 0x2a5c3c3a0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x2a5c3d040>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVRgcAAAAAAAB9lCiMGGZlYXR1cmVzX2V4dHJhY3Rvcl9jbGFzc5SMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwUX21ha2Vfc2tlbGV0b25fY2xhc3OUk5QojAhidWlsdGluc5SMBHR5cGWUk5SMF0N1c3RvbUNvbWJpbmVkRXh0cmFjdG9ylIwlc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnRvcmNoX2xheWVyc5SMFUJhc2VGZWF0dXJlc0V4dHJhY3RvcpSTlIWUfZSMCl9fbW9kdWxlX1+UjAhfX21haW5fX5RzjCAxOGFhYjM5YWJkOGI0ZTU1ODY5YjZlZmJhZjU4MTZkN5ROdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMD19jbGFzc19zZXRzdGF0ZZSTlGgSfZQojApfX21vZHVsZV9flGgPjAdfX2RvY19flE6MCF9faW5pdF9flGgCjA5fbWFrZV9mdW5jdGlvbpSTlChoAowNX2J1aWx0aW5fdHlwZZSTlIwIQ29kZVR5cGWUhZRSlChLA0sASwBLA0sISwNDzHQAdAF8AIMCoAJ8AXwCoQIBAHQDoAR0A6AFfAFkARkAagZkAhkAZAOhAnQDoAehAHQDoAVkA2QEoQJ0A6AHoQChBHwAXwh0A6AEdAOgBXwBZAUZAGoGZAIZAGQDoQJ0A6AHoQB0A6AFZANkBKECdAOgB6EAoQR8AF8JdAOgBHQDoAV8AWQGGQBqBmQCGQBkB6ECdAOgB6EAdAOgBWQHZAehAnQDoAehAKEEfABfCnQDoAR0A6AFZAh8AqECdAOgB6EAoQJ8AF8LZABTAJQoTowNYWNoaWV2ZWRfZ29hbJRLAEtAS4CMDGRlc2lyZWRfZ29hbJSMC29ic2VydmF0aW9ulE0AAU0AAnSUKIwFc3VwZXKUaAhoGYwCbm6UjApTZXF1ZW50aWFslIwGTGluZWFylIwFc2hhcGWUjARSZUxVlIwXYWNoaWV2ZWRfZ29hbF9leHRyYWN0b3KUjBZkZXNpcmVkX2dvYWxfZXh0cmFjdG9ylIwVb2JzZXJ2YXRpb25fZXh0cmFjdG9ylIwLY29tYmluZWRfZmOUdJSMBHNlbGaUjBFvYnNlcnZhdGlvbl9zcGFjZZSMDGZlYXR1cmVzX2RpbZSHlIxCL1VzZXJzL2pidWNoODA4L0Rlc2t0b3AvQ29kZS9ybC10ZXN0L1BhbmRhUGlja2FuZFBsYWNlX3YzL3RyYWluLnB5lIwIX19pbml0X1+USxxDLgABEgMEARQBBgEKAQb8BgYEARQBBgEKAQb8BgYEARQBBgEKAQb8BggEAQoBBv6UjAlfX2NsYXNzX1+UhZQpdJRSlH2UKIwLX19wYWNrYWdlX1+UTowIX19uYW1lX1+UaA+MCF9fZmlsZV9flGg1dU5OaAKMEF9tYWtlX2VtcHR5X2NlbGyUk5QpUpSFlHSUUpRoE4wSX2Z1bmN0aW9uX3NldHN0YXRllJOUaEV9lH2UKGg+aDaMDF9fcXVhbG5hbWVfX5SMIEN1c3RvbUNvbWJpbmVkRXh0cmFjdG9yLl9faW5pdF9flIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTQAChZRoDmgPaBhOjAtfX2Nsb3N1cmVfX5RoAowKX21ha2VfY2VsbJSTlGgShZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZQoaAhoEmgnaAKMCXN1YmltcG9ydJSTlIwIdG9yY2gubm6UhZRSlHV1hpSGUjCMB2ZvcndhcmSUaBsoaCAoSwJLAEsASwZLBEtDQ0h8AKAAfAFkARkAoQF9AnwAoAF8AWQCGQChAX0DfACgAnwBZAMZAKEBfQR0A2oEfAJ8A3wEZgNkBGQFjQJ9BXwAoAV8BaEBUwCUKE5oImgjaCRLAYwDZGltlIWUdJQoaCxoLWgujAV0b3JjaJSMA2NhdJRoL3SUKGgxjAxvYnNlcnZhdGlvbnOUjBdhY2hpZXZlZF9nb2FsX2VtYmVkZGluZ5SMFmRlc2lyZWRfZ29hbF9lbWJlZGRpbmeUjBVvYnNlcnZhdGlvbl9lbWJlZGRpbmeUjBJjb21iaW5lZF9lbWJlZGRpbmeUdJRoNYwHZm9yd2FyZJRLOUMKAAEOAQ4BDgMUAZQpKXSUUpRoPE5OTnSUUpRoR2h0fZR9lChoPmhvaEqMH0N1c3RvbUNvbWJpbmVkRXh0cmFjdG9yLmZvcndhcmSUaEx9lGhOTmhPTmgOaA9oGE5oUU5oV12UaFl9lGhmaFxoZoWUUpRzdYaUhlIwdX2UhpSGUjCMGWZlYXR1cmVzX2V4dHJhY3Rvcl9rd2FyZ3OUfZSMDGZlYXR1cmVzX2RpbZRNAAJzjAd1c2Vfc2RllIl1Lg==", "features_extractor_class": "<class '__main__.CustomCombinedExtractor'>", "features_extractor_kwargs": {"features_dim": 512}, "use_sde": false}, "num_timesteps": 5000000, "_total_timesteps": 5000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1706940081750553000, "learning_rate": 0.0003, "tensorboard_log": "./logs/dense_sac1/", "_last_obs": null, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAEBAQGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWViwIAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAPlgWPrcE0T37wKM8Ip4HPoqb3D37wKM87GMEvkx2CLxhwaM8XmYYvkXYFT5fwaM8lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAASjjYOuLgGTwK16M8GPZPvXPl1L3cPKM9GkJ1vbZOFT4K16M8nC8WvRUuKD0zU1o+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWMAEAAAAAAAAGUQa+HJquPgDS5T7ZrO496L+CPic+tj4T16M9PlgWPrcE0T37wKM8JfPXN7zhlzd24ag5AtQdOA/7BLjg6zQvYGfNMnMiCLONcGs6OjxIOwE15T4t2tE+xt1rvVShXD1QA1A9xdajPSKeBz6Km9w9+8CjPAEf2DfTtZc3WiebOma+HTg08wS4yAm/LHIDtjJAiLSyAx5rOqlLKD2H9aU+tI7nPlB9r74n6P4+g+ewPrniXjXsYwS+THYIvGHBozzZBG+3rXdiN6TrazkLw6g3nRbFN/wCTC7YVIUyRcRhMtv8Ezo+/vQ91C83PtjZpT7V9+47UdsEP5dFwz5nVQA1XmYYvkXYFT5fwaM8kWNyt+HIYTfzCes4IQypN16nwzeFsRU02BURN2zA4TWxAxQ6lGgOSwRLE4aUaBJ0lFKUdS4=", "achieved_goal": "[[ 0.14682099 0.10205977 0.01998948]\n [ 0.13243917 0.10771854 0.01998948]\n [-0.12928742 -0.00832899 0.01998967]\n [-0.148828 0.14633282 0.01998967]]", "desired_goal": "[[ 0.00164963 0.009392 0.02 ]\n [-0.0507718 -0.10395326 0.07970592]\n [-0.05987749 0.14580807 0.02 ]\n [-0.0366665 0.04105957 0.213208 ]]", "observation": "[[-1.31168455e-01 3.41019511e-01 4.48867798e-01 1.16540618e-01\n 2.55370378e-01 3.55942935e-01 8.00000653e-02 1.46820992e-01\n 1.02059774e-01 1.99894812e-02 2.57432202e-05 1.81057185e-05\n 3.22114385e-04 3.76291646e-05 -3.17050690e-05 1.64547043e-10\n 2.39121505e-08 -3.16962989e-08 8.98130995e-04]\n [ 3.05534760e-03 4.47670013e-01 4.09867674e-01 -5.75845465e-02\n 5.38647920e-02 5.07844090e-02 7.99994841e-02 1.32439166e-01\n 1.07718542e-01 1.99894812e-02 2.57636439e-05 1.80852712e-05\n 1.18372892e-03 3.76090393e-05 -3.16977530e-05 5.42963244e-12\n 2.11891553e-08 -2.10167173e-08 8.96901067e-04]\n [ 4.10877801e-02 3.24138850e-01 4.52260613e-01 -3.42752934e-01\n 4.97864932e-01 3.45516294e-01 8.30313695e-07 -1.29287422e-01\n -8.32898542e-03 1.99896712e-02 -1.42466388e-05 1.34985139e-05\n 2.24991294e-04 2.01179846e-05 2.34947602e-05 4.63868804e-11\n 1.55218203e-08 1.31413502e-08 5.64528338e-04]\n [ 1.19625553e-01 1.78893387e-01 3.23927641e-01 7.29272747e-03\n 5.18971503e-01 3.81390303e-01 4.78079926e-07 -1.48828000e-01\n 1.46332815e-01 1.99896675e-02 -1.44475061e-05 1.34578158e-05\n 1.12075264e-04 2.01520179e-05 2.33237479e-05 1.39412876e-07\n 8.64775939e-06 1.68198085e-06 5.64630202e-04]]"}, "_episode_num": 103275, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWV4AsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHwCYSd1+y7f6MAWyUSzKMAXSUR0D7RV5CQLeAdX2UKGgGR8ArxlU6xPfsaAdLMmgIR0D7RXYYJE6UdX2UKGgGR8AhCRGtp22YaAdLMmgIR0D7RXXaqCHzdX2UKGgGR8Ao2DaGpMpPaAdLMmgIR0D7RXhMPSUkdX2UKGgGR7+pimVJL/S6aAdLAWgIR0D7RXlOq//OdX2UKGgGR8AP/rt3OfNBaAdLMmgIR0D7RZITbWVedX2UKGgGR8AuUBshxHXmaAdLMmgIR0D7RalMlkYodX2UKGgGR8AqPRTjvNNbaAdLMmgIR0D7RakFAmiQdX2UKGgGR8Ae3tTkyULVaAdLMmgIR0D7RaxwUQCkdX2UKGgGR8Akze1rqMWHaAdLMmgIR0D7RcWnhKlIdX2UKGgGR8AsJE2Hck+paAdLMmgIR0D7Rd0TPjXGdX2UKGgGR8AsBJ4B3iaRaAdLMmgIR0D7RdzMZP2xdX2UKGgGR8AuWLQ5WBBiaAdLMmgIR0D7ReA8W9DhdX2UKGgGR8AfQr8R+SbIaAdLMmgIR0D7RfmMHKOldX2UKGgGR8AsRGsmv4dqaAdLMmgIR0D7RhDzshPkdX2UKGgGR8Ao1azNUwSKaAdLMmgIR0D7RhCm6oVEdX2UKGgGR8AhdfbblA/taAdLMmgIR0D7RhQbRWtEdX2UKGgGR8AngUtZmqYJaAdLMmgIR0D7Ri1FkxyodX2UKGgGR8AMNbcGkep5aAdLMmgIR0D7RkTdYnv2dX2UKGgGR8AQCCL/CIk7aAdLMmgIR0D7RkSZJTVEdX2UKGgGR8AWqvHLidauaAdLMmgIR0D7RkgckdFOdX2UKGgGR8AYYoUi6g/UaAdLMmgIR0D7RmFVLSNPdX2UKGgGR8AVrLNfPX05aAdLMmgIR0D7RniQkonbdX2UKGgGR8AsKCPp6hQFaAdLMmgIR0D7RnhTFERbdX2UKGgGR8Ah9l7tzCDVaAdLMmgIR0D7RnvU6PsBdX2UKGgGR8AY83yZrpJPaAdLMmgIR0D7RpUCPp6hdX2UKGgGR8AWecbzbvgFaAdLMmgIR0D7RqyJXyRTdX2UKGgGR8ApUKgqVhTgaAdLMmgIR0D7RqxHe7+UdX2UKGgGR8Ab5QSBbwBpaAdLMmgIR0D7Rq/W6K+BdX2UKGgGR8AtyUsWfseGaAdLMmgIR0D7RskvSc9XdX2UKGgGR8AN4MrmQr+YaAdLMmgIR0D7RuB6w+t9dX2UKGgGR8AmOoZQ53kgaAdLMmgIR0D7RuA3qiXZdX2UKGgGR8Aa264Ds+mnaAdLMmgIR0D7RuO8eCCjdX2UKGgGR8AiTlLeyiVTaAdLMmgIR0D7Rv07u2JBdX2UKGgGR8AmBBGhEjPfaAdLMmgIR0D7RxT0dBBzdX2UKGgGR7+RGpda+vhZaAdLAWgIR0D7RxX51/2CdX2UKGgGR8AqgdrftQbdaAdLMmgIR0D7RxSps41hdX2UKGgGR8AwtiYb83uNaAdLMmgIR0D7RxgfoA4odX2UKGgGR8AoRwmVqveQaAdLMmgIR0D7RzEKAJ9idX2UKGgGR8AuqDbrTpgUaAdLMmgIR0D7R0mG6f8NdX2UKGgGR8Ab8L+glF+eaAdLMmgIR0D7R0g3X7LudX2UKGgGR8Ak5dWyTpxFaAdLMmgIR0D7R0vOdGy5dX2UKGgGR8AcZcPe54GEaAdLMmgIR0D7R2VO0b97dX2UKGgGR8AS/7N0NjLCaAdLMmgIR0D7R34bZvkzdX2UKGgGR8Agk3T/hl19aAdLMmgIR0D7R3zMs6JZdX2UKGgGR8AjSyRB/qgRaAdLMmgIR0D7R4BGZNO/dX2UKGgGR8AVQ0iyIHkcaAdLMmgIR0D7R5kUtZmqdX2UKGgGR8AmgKpDNQj2aAdLMmgIR0D7R7GOBDohdX2UKGgGR8AhGOAAhje9aAdLMmgIR0D7R7A9x6v8dX2UKGgGR8AiB15B1LamaAdLMmgIR0D7R7PF3pwCdX2UKGgGR8Aj1ObiIcioaAdLMmgIR0D7R80PKdQPdX2UKGgGR8AhvS4vvjOtaAdLMmgIR0D7R+Wdy1eCdX2UKGgGR8AlRTsIE8q4aAdLMmgIR0D7R+ROIqLCdX2UKGgGR8Ap6uaF23a0aAdLMmgIR0D7R+fmrsBydX2UKGgGR8AS6Op84PwvaAdLMmgIR0D7SAD+C9RKdX2UKGgGR8AKFxOtW+49aAdLMmgIR0D7SBlgccU/dX2UKGgGR8Addg+hXbM5aAdLMmgIR0D7SBgQxN7CdX2UKGgGR7+me4Cp3os7aAdLAWgIR0D7SBpxTsIFdX2UKGgGR8AKCmj0th/iaAdLMmgIR0D7SBulsguAdX2UKGgGR8AfUPSUkfLcaAdLMmgIR0D7SDTSJTESdX2UKGgGR8AwRT+NtIkJaAdLMmgIR0D7SExLU1AJdX2UKGgGR8AFPvx6OYICaAdLMmgIR0D7SE6tD2J0dX2UKGgGR8AvQkDZDiOvaAdLMmgIR0D7SFACyyD7dX2UKGgGR8Anw7ZnL7oCaAdLMmgIR0D7SGl/LDAKdX2UKGgGR8AHR3mmtQsPaAdLMmgIR0D7SIECkoF3dX2UKGgGR8AmCDmKZUkwaAdLMmgIR0D7SINdOqNqdX2UKGgGR8AqD0KZ2IO6aAdLMmgIR0D7SISKraM8dX2UKGgGR8AegvboKUmlaAdLMmgIR0D7SJ4JrLyMdX2UKGgGR8Al/fNzKcNIaAdLMmgIR0D7SLVmRNh3dX2UKGgGR8AmTF8XvYvnaAdLMmgIR0D7SLfKISDidX2UKGgGR8AkOB3A2ycDaAdLMmgIR0D7SLj8La24dX2UKGgGR8AuNfWMCLdfaAdLMmgIR0D7SNHXfZVXdX2UKGgGR8AvHgx8D0UXaAdLMmgIR0D7SOj0CRwIdX2UKGgGR8At/uvUz9CNaAdLMmgIR0D7SOtL6UJOdX2UKGgGR8AQONJe3QUpaAdLMmgIR0D7SOxlvIfbdX2UKGgGR8ALh2MbWEsbaAdLMmgIR0D7SQWxhlUZdX2UKGgGR8AZu+De0ojOaAdLMmgIR0D7SRzlP8AJdX2UKGgGR8ApGIsRQJokaAdLMmgIR0D7SR8+IMz/dX2UKGgGR8Ag2nR9gF5faAdLMmgIR0D7SSBzZHurdX2UKGgGR8AlfdN34bjtaAdLMmgIR0D7STm43WFwdX2UKGgGR8Ajei4axX4kaAdLMmgIR0D7SVDo/RmcdX2UKGgGR8AuT6CUX531aAdLMmgIR0D7SVNA80UHdX2UKGgGR8AoCOd5IH1OaAdLMmgIR0D7SVRh7E5ydX2UKGgGR8AY4yAQQL/kaAdLMmgIR0D7SW2MUAT7dX2UKGgGR8Akt0eU6gdwaAdLMmgIR0D7SYTK5kLAdX2UKGgGR8AngM5wOvt/aAdLMmgIR0D7SYcjENvwdX2UKGgGR8AjM+IuXeFdaAdLMmgIR0D7SYhLPldUdX2UKGgGR8AqA7HQyAQQaAdLMmgIR0D7SaGKraM8dX2UKGgGR8Ari2YOUdJbaAdLMmgIR0D7SbjdZaFFdX2UKGgGR8AHVlf7aZhKaAdLMmgIR0D7Sbs79AHFdX2UKGgGR8Aoz0ulGgBcaAdLMmgIR0D7Sbx32VVxdX2UKGgGR8AWJ/nW8RL9aAdLMmgIR0D7SdVrWAf/dX2UKGgGR8AQOYTj/+85aAdLMmgIR0D7Sey3XI2gdX2UKGgGR8AMNNahYeT3aAdLMmgIR0D7Se8V3EAHdX2UKGgGR8AmF6j3225QaAdLMmgIR0D7SfBO+7DmdX2UKGgGR7+T9wWFev6kaAdLAWgIR0D7SfFkJ8fFdX2UKGgGR8AVvmW+oLofaAdLMmgIR0D7Sgl8Zk08dX2UKGgGR8APfcBU70WeaAdLMmgIR0D7SiDZJCjUdX2UKGgGR8AiLblA/s3RaAdLMmgIR0D7SiM0+1SgdX2UKGgGR8AlYvxH5JsgaAdLMmgIR0D7SiVnNgSfdWUu"}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVhgAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKImJiYmIiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiImJiYmJiYmJiYmJiYmJiYmJiYmJiYiJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiYmJiImJiYllLg=="}, "_n_updates": 1249750, "observation_space": {":type:": "<class 'gymnasium.spaces.dict.Dict'>", ":serialized:": "gAWVMgQAAAAAAACMFWd5bW5hc2l1bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwUZ3ltbmFzaXVtLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowNYm91bmRlZF9iZWxvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYDAAAAAAAAAAEBAZRoE4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoHCiWAwAAAAAAAAABAQGUaCBLA4WUaCR0lFKUjAZfc2hhcGWUSwOFlIwDbG93lGgcKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFksDhZRoJHSUUpSMBGhpZ2iUaBwolgwAAAAAAAAAAAAgQQAAIEEAACBBlGgWSwOFlGgkdJRSlIwIbG93X3JlcHKUjAUtMTAuMJSMCWhpZ2hfcmVwcpSMBDEwLjCUjApfbnBfcmFuZG9tlE51YowMZGVzaXJlZF9nb2FslGgNKYGUfZQoaBBoFmgZaBwolgMAAAAAAAAAAQEBlGggSwOFlGgkdJRSlGgnaBwolgMAAAAAAAAAAQEBlGggSwOFlGgkdJRSlGgsSwOFlGguaBwolgwAAAAAAAAAAAAgwQAAIMEAACDBlGgWSwOFlGgkdJRSlGgzaBwolgwAAAAAAAAAAAAgQQAAIEEAACBBlGgWSwOFlGgkdJRSlGg4jAUtMTAuMJRoOowEMTAuMJRoPE51YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgWaBloHCiWEwAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBlGggSxOFlGgkdJRSlGgnaBwolhMAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAZRoIEsThZRoJHSUUpRoLEsThZRoLmgcKJZMAAAAAAAAAAAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBZLE4WUaCR0lFKUaDNoHCiWTAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBlGgWSxOFlGgkdJRSlGg4jAUtMTAuMJRoOowEMTAuMJRoPE51YnVoLE5oEE5oPE51Yi4=", "spaces": "OrderedDict([('achieved_goal', Box(-10.0, 10.0, (3,), float32)), ('desired_goal', Box(-10.0, 10.0, (3,), float32)), ('observation', Box(-10.0, 10.0, (19,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVawIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWBAAAAAAAAAABAQEBlGgIjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKUjA1ib3VuZGVkX2Fib3ZllGgRKJYEAAAAAAAAAAEBAQGUaBVLBIWUaBl0lFKUjAZfc2hhcGWUSwSFlIwDbG93lGgRKJYQAAAAAAAAAAAAgL8AAIC/AACAvwAAgL+UaAtLBIWUaBl0lFKUjARoaWdolGgRKJYQAAAAAAAAAAAAgD8AAIA/AACAPwAAgD+UaAtLBIWUaBl0lFKUjAhsb3dfcmVwcpSMBC0xLjCUjAloaWdoX3JlcHKUjAMxLjCUjApfbnBfcmFuZG9tlIwUbnVtcHkucmFuZG9tLl9waWNrbGWUjBBfX2dlbmVyYXRvcl9jdG9ylJOUjAVQQ0c2NJRoMowUX19iaXRfZ2VuZXJhdG9yX2N0b3KUk5SGlFKUfZQojA1iaXRfZ2VuZXJhdG9ylIwFUENHNjSUjAVzdGF0ZZR9lChoPYoQqtgCfl9nvmPM5C8i0S78HYwDaW5jlIoRe1vTIzPw7UEVeY/Kb4YxiwB1jApoYXNfdWludDMylEsAjAh1aW50ZWdlcpRLAHVidWIu", "dtype": "float32", "bounded_below": "[ True True True True]", "bounded_above": "[ True True True True]", "_shape": [4], "low": "[-1. -1. -1. -1.]", "high": "[1. 1. 1. 1.]", "low_repr": "-1.0", "high_repr": "1.0", "_np_random": "Generator(PCG64)"}, "n_envs": 1, "buffer_size": 1000000, "batch_size": 256, "learning_starts": 1000, "tau": 0.005, "gamma": 0.99, "gradient_steps": 1, "optimize_memory_usage": false, "replay_buffer_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVPwAAAAAAAACMJ3N0YWJsZV9iYXNlbGluZXMzLmhlci5oZXJfcmVwbGF5X2J1ZmZlcpSMD0hlclJlcGxheUJ1ZmZlcpSTlC4=", "__module__": "stable_baselines3.her.her_replay_buffer", "__annotations__": "{'env': typing.Optional[stable_baselines3.common.vec_env.base_vec_env.VecEnv]}", "__doc__": "\n Hindsight Experience Replay (HER) buffer.\n Paper: https://arxiv.org/abs/1707.01495\n\n Replay buffer for sampling HER (Hindsight Experience Replay) transitions.\n\n .. note::\n\n Compared to other implementations, the ``future`` goal sampling strategy is inclusive:\n the current transition can be used when re-sampling.\n\n :param buffer_size: Max number of element in the buffer\n :param observation_space: Observation space\n :param action_space: Action space\n :param env: The training environment\n :param device: PyTorch device\n :param n_envs: Number of parallel environments\n :param optimize_memory_usage: Enable a memory efficient variant\n Disabled for now (see https://github.com/DLR-RM/stable-baselines3/pull/243#discussion_r531535702)\n :param handle_timeout_termination: Handle timeout termination (due to timelimit)\n separately and treat the task as infinite horizon task.\n https://github.com/DLR-RM/stable-baselines3/issues/284\n :param n_sampled_goal: Number of virtual transitions to create per real transition,\n by sampling new goals.\n :param goal_selection_strategy: Strategy for sampling goals for replay.\n One of ['episode', 'final', 'future']\n :param copy_info_dict: Whether to copy the info dictionary and pass it to\n ``compute_reward()`` method.\n Please note that the copy may cause a slowdown.\n False by default.\n ", "__init__": "<function HerReplayBuffer.__init__ at 0x2a5c16ee0>", "__getstate__": "<function HerReplayBuffer.__getstate__ at 0x2a5c16f70>", "__setstate__": "<function HerReplayBuffer.__setstate__ at 0x2a5c28040>", "set_env": "<function HerReplayBuffer.set_env at 0x2a5c280d0>", "add": "<function HerReplayBuffer.add at 0x2a5c28160>", "_compute_episode_length": "<function HerReplayBuffer._compute_episode_length at 0x2a5c281f0>", "sample": "<function HerReplayBuffer.sample at 0x2a5c28280>", "_get_real_samples": "<function HerReplayBuffer._get_real_samples at 0x2a5c28310>", "_get_virtual_samples": "<function HerReplayBuffer._get_virtual_samples at 0x2a5c283a0>", "_sample_goals": "<function HerReplayBuffer._sample_goals at 0x2a5c28430>", "truncate_last_trajectory": "<function HerReplayBuffer.truncate_last_trajectory at 0x2a5c284c0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x2a5c27840>"}, "replay_buffer_kwargs": {"n_sampled_goal": 4, "goal_selection_strategy": "future"}, "train_freq": {":type:": "<class 'stable_baselines3.common.type_aliases.TrainFreq'>", ":serialized:": "gAWVYQAAAAAAAACMJXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi50eXBlX2FsaWFzZXOUjAlUcmFpbkZyZXGUk5RLAWgAjBJUcmFpbkZyZXF1ZW5jeVVuaXSUk5SMBHN0ZXCUhZRSlIaUgZQu"}, "use_sde_at_warmup": false, "target_entropy": -4.0, "ent_coef": "auto", "target_update_interval": 1, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWV+QIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMYy9Vc2Vycy9qYnVjaDgwOC9taW5pZm9yZ2UzL2VudnMvaGYtcmwvbGliL3B5dGhvbjMuOS9zaXRlLXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4NDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMYy9Vc2Vycy9qYnVjaDgwOC9taW5pZm9yZ2UzL2VudnMvaGYtcmwvbGliL3B5dGhvbjMuOS9zaXRlLXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/M6kqMFUyYYWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "batch_norm_stats": [], "batch_norm_stats_target": [], "system_info": {"OS": "macOS-14.1.1-arm64-i386-64bit Darwin Kernel Version 23.1.0: Mon Oct 9 21:27:24 PDT 2023; root:xnu-10002.41.9~6/RELEASE_ARM64_T6000", "Python": "3.9.18", "Stable-Baselines3": "2.2.1", "PyTorch": "2.2.0", "GPU Enabled": "False", "Numpy": "1.26.3", "Cloudpickle": "2.2.1", "Gymnasium": "0.29.1", "OpenAI Gym": "0.26.2"}} |