CrispyJLoHalo commited on
Commit
bc173e9
1 Parent(s): 34d393d

Upload DQN CartPole-v trained agent

Browse files
README.md ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: stable-baselines3
3
+ tags:
4
+ - CartPole-v1
5
+ - deep-reinforcement-learning
6
+ - reinforcement-learning
7
+ - stable-baselines3
8
+ model-index:
9
+ - name: DQN
10
+ results:
11
+ - task:
12
+ type: reinforcement-learning
13
+ name: reinforcement-learning
14
+ dataset:
15
+ name: CartPole-v1
16
+ type: CartPole-v1
17
+ metrics:
18
+ - type: mean_reward
19
+ value: 20.50 +/- 2.11
20
+ name: mean_reward
21
+ verified: false
22
+ ---
23
+
24
+ # **DQN** Agent playing **CartPole-v1**
25
+ This is a trained model of a **DQN** agent playing **CartPole-v1**
26
+ using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3).
27
+
28
+ ## Usage (with Stable-baselines3)
29
+ TODO: Add your code
30
+
31
+
32
+ ```python
33
+ from stable_baselines3 import ...
34
+ from huggingface_sb3 import load_from_hub
35
+
36
+ ...
37
+ ```
config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVMAAAAAAAAACMHnN0YWJsZV9iYXNlbGluZXMzLmRxbi5wb2xpY2llc5SMCURRTlBvbGljeZSTlC4=", "__module__": "stable_baselines3.dqn.policies", "__annotations__": "{'q_net': <class 'stable_baselines3.dqn.policies.QNetwork'>, 'q_net_target': <class 'stable_baselines3.dqn.policies.QNetwork'>}", "__doc__": "\n Policy class with Q-Value Net and target net for DQN\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function DQNPolicy.__init__ at 0x000002BC8E39A0E0>", "_build": "<function DQNPolicy._build at 0x000002BC8E39A170>", "make_q_net": "<function DQNPolicy.make_q_net at 0x000002BC8E39A200>", "forward": "<function DQNPolicy.forward at 0x000002BC8E39A290>", "_predict": "<function DQNPolicy._predict at 0x000002BC8E39A320>", "_get_constructor_parameters": "<function DQNPolicy._get_constructor_parameters at 0x000002BC8E39A3B0>", "set_training_mode": "<function DQNPolicy.set_training_mode at 0x000002BC8E39A440>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x000002BC8E3AABC0>"}, "verbose": 1, "policy_kwargs": {}, "num_timesteps": 50000, "_total_timesteps": 50000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1713557964966195100, "learning_rate": 0.0001, "tensorboard_log": null, "_last_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVhQAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYQAAAAAAAAAKWHs7w5bVC/CmoPvHnmjj+UjAVudW1weZSMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSwFLBIaUjAFDlHSUUpQu"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdAAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYBAAAAAAAAAAGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwGFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVhQAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYQAAAAAAAAADEYHLwLkR6/+f3PvP7xVD+UjAVudW1weZSMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSwFLBIaUjAFDlHSUUpQu"}, "_episode_num": 2590, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWV4AsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQDgAAAAAAACMAWyUSxiMAXSUR0CQVgyO7xusdX2UKGgGR0AmAAAAAAAAaAdLC2gIR0CQVwtcfNiZdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQWMmBvrGBdX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQWrDWsijddX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQXH4SpR4ydX2UKGgGR0AkAAAAAAAAaAdLCmgIR0CQXWVC5VfedX2UKGgGR0A5AAAAAAAAaAdLGWgIR0CQX4hcZ9/jdX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQYVUhFEy+dX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQYuNliBoVdX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQZIjawljWdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQZj+s5n14dX2UKGgGR0A6AAAAAAAAaAdLGmgIR0CQaHzf779AdX2UKGgGR0AuAAAAAAAAaAdLD2gIR0CQadEcKgIydX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQa45KvmozdX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQbXFuNxVAdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQbv/WDpTudX2UKGgGR0A4AAAAAAAAaAdLGGgIR0CQcRK0lZ5idX2UKGgGR0AxAAAAAAAAaAdLEWgIR0CQcpDR+jM3dX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQdE8CxNZedX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQdgjQAuIzdX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQd+9HMEA6dX2UKGgGR0A6AAAAAAAAaAdLGmgIR0CQeinJkoWpdX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQfA8f3evZdX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQffhlUZNxdX2UKGgGR0AxAAAAAAAAaAdLEWgIR0CQf3XNTtLMdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQgTH6/IsAdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQgu25xzaLdX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQhJGy5Zr6dX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQhnGkep4sdX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQiFhB7eEadX2UKGgGR0A4AAAAAAAAaAdLGGgIR0CQimZ/kNnXdX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQjDWTX8O1dX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQjdlQuVX4dX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQj79Q40djdX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQkYxi5NGmdX2UKGgGR0AxAAAAAAAAaAdLEWgIR0CQkwamoBJadX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQlNKkl/pddX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQlqBYV6/qdX2UKGgGR0AkAAAAAAAAaAdLCmgIR0CQl4kwN9YwdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQmRtq59VndX2UKGgGR0AuAAAAAAAAaAdLD2gIR0CQmmiXIEKWdX2UKGgGR0AkAAAAAAAAaAdLCmgIR0CQm03Ytg8bdX2UKGgGR0AxAAAAAAAAaAdLEWgIR0CQnMpEx7AtdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQnoY9xIatdX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQoGvJzT4MdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQof25hBqsdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQo7xLkCFLdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQpXqWTot+dX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQpyB1s+FDdX2UKGgGR0AwAAAAAAAAaAdLEGgIR0CQqIcO9WZJdX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQqlmjTKDDdX2UKGgGR0AkAAAAAAAAaAdLCmgIR0CQq0HP/rB1dX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQrOi6xxDLdX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQrpFpfx+bdX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQsF91U2k0dX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQsjM6BAfMdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQs8RmbsnidX2UKGgGR0AiAAAAAAAAaAdLCWgIR0CQtJmiQDFIdX2UKGgGR0A7AAAAAAAAaAdLG2gIR0CQtuU9ZA6ddX2UKGgGR0AiAAAAAAAAaAdLCWgIR0CQt7oQWepXdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQuXNy5qdpdX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQu0VdonKGdX2UKGgGR0AxAAAAAAAAaAdLEWgIR0CQvL7zkIX1dX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQvo9jwx33dX2UKGgGR0AxAAAAAAAAaAdLEWgIR0CQwA1BdD6WdX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQwbMUAT7EdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQw0NrCWNWdX2UKGgGR0AxAAAAAAAAaAdLEWgIR0CQxL7xd6cBdX2UKGgGR0A7AAAAAAAAaAdLG2gIR0CQxwkIomXxdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQyMeU6gdwdX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQypVn27FsdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQzB+dsi0OdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQzds/6frbdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQz2v+wTufdX2UKGgGR0AkAAAAAAAAaAdLCmgIR0CQ0FMcp9ZzdX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQ0fc580DVdX2UKGgGR0AmAAAAAAAAaAdLC2gIR0CQ0vWAPNFCdX2UKGgGR0A3AAAAAAAAaAdLF2gIR0CQ1OpMpPRBdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQ1nvkBCD3dX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQ2GOVxCIDdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQ2h0GeMAFdX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQ28hStNi6dX2UKGgGR0A4AAAAAAAAaAdLGGgIR0CQ3diB5HEudX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQ37zhgmZ3dX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQ4aHryDqXdX2UKGgGR0AxAAAAAAAAaAdLEWgIR0CQ4yDQqqffdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQ5LIqLCN0dX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQ5phje9BbdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQ6E+GoJiRdX2UKGgGR0AkAAAAAAAAaAdLCmgIR0CQ6TgG8mKJdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQ6vGxUvPDdX2UKGgGR0A4AAAAAAAAaAdLGGgIR0CQ7QQ5FPSEdX2UKGgGR0AxAAAAAAAAaAdLEWgIR0CQ7n9lmOENdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQ8Dl7MPjGdX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQ8hiKiwjddX2UKGgGR0A4AAAAAAAAaAdLGGgIR0CQ9Ckf9xZMdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQ9btcv/R3dX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQ959PUKAsdX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQ+UfD1oQGdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQ+wD+irT6dWUu"}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 12475, "observation_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVFgIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWBAAAAAAAAAABAQEBlGgIjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKUjA1ib3VuZGVkX2Fib3ZllGgRKJYEAAAAAAAAAAEBAQGUaBVLBIWUaBl0lFKUjAZfc2hhcGWUSwSFlIwDbG93lGgRKJYQAAAAAAAAAJqZmcD//3//UHfWvv//f/+UaAtLBIWUaBl0lFKUjARoaWdolGgRKJYQAAAAAAAAAJqZmUD//39/UHfWPv//f3+UaAtLBIWUaBl0lFKUjAhsb3dfcmVwcpSMPVstNC44MDAwMDAyZSswMCAtMy40MDI4MjM1ZSszOCAtNC4xODg3OTAzZS0wMSAtMy40MDI4MjM1ZSszOF2UjAloaWdoX3JlcHKUjDlbNC44MDAwMDAyZSswMCAzLjQwMjgyMzVlKzM4IDQuMTg4NzkwM2UtMDEgMy40MDI4MjM1ZSszOF2UjApfbnBfcmFuZG9tlE51Yi4=", "dtype": "float32", "bounded_below": "[ True True True True]", "bounded_above": "[ True True True True]", "_shape": [4], "low": "[-4.8000002e+00 -3.4028235e+38 -4.1887903e-01 -3.4028235e+38]", "high": "[4.8000002e+00 3.4028235e+38 4.1887903e-01 3.4028235e+38]", "low_repr": "[-4.8000002e+00 -3.4028235e+38 -4.1887903e-01 -3.4028235e+38]", "high_repr": "[4.8000002e+00 3.4028235e+38 4.1887903e-01 3.4028235e+38]", "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.discrete.Discrete'>", ":serialized:": "gAWVowEAAAAAAACMGWd5bW5hc2l1bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIAgAAAAAAAACUhpRSlIwFc3RhcnSUaAhoDkMIAAAAAAAAAACUhpRSlIwGX3NoYXBllCmMBWR0eXBllGgOjApfbnBfcmFuZG9tlIwUbnVtcHkucmFuZG9tLl9waWNrbGWUjBBfX2dlbmVyYXRvcl9jdG9ylJOUjAVQQ0c2NJRoG4wUX19iaXRfZ2VuZXJhdG9yX2N0b3KUk5SGlFKUfZQojA1iaXRfZ2VuZXJhdG9ylIwFUENHNjSUjAVzdGF0ZZR9lChoJooRPuJ3wCrypII7Idyz9/hoxgCMA2luY5SKESN8gSQijeRdbp4QBgbpcagAdYwKaGFzX3VpbnQzMpRLAYwIdWludGVnZXKUStW5bgV1YnViLg==", "n": "2", "start": "0", "_shape": [], "dtype": "int64", "_np_random": "Generator(PCG64)"}, "n_envs": 1, "buffer_size": 1000000, "batch_size": 32, "learning_starts": 100, "tau": 1.0, "gamma": 0.99, "gradient_steps": 1, "optimize_memory_usage": false, "replay_buffer_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVNQAAAAAAAACMIHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5idWZmZXJzlIwMUmVwbGF5QnVmZmVylJOULg==", "__module__": "stable_baselines3.common.buffers", "__annotations__": "{'observations': <class 'numpy.ndarray'>, 'next_observations': <class 'numpy.ndarray'>, 'actions': <class 'numpy.ndarray'>, 'rewards': <class 'numpy.ndarray'>, 'dones': <class 'numpy.ndarray'>, 'timeouts': <class 'numpy.ndarray'>}", "__doc__": "\n Replay buffer used in off-policy algorithms like SAC/TD3.\n\n :param buffer_size: Max number of element in the buffer\n :param observation_space: Observation space\n :param action_space: Action space\n :param device: PyTorch device\n :param n_envs: Number of parallel environments\n :param optimize_memory_usage: Enable a memory efficient variant\n of the replay buffer which reduces by almost a factor two the memory used,\n at a cost of more complexity.\n See https://github.com/DLR-RM/stable-baselines3/issues/37#issuecomment-637501195\n and https://github.com/DLR-RM/stable-baselines3/pull/28#issuecomment-637559274\n Cannot be used in combination with handle_timeout_termination.\n :param handle_timeout_termination: Handle timeout termination (due to timelimit)\n separately and treat the task as infinite horizon task.\n https://github.com/DLR-RM/stable-baselines3/issues/284\n ", "__init__": "<function ReplayBuffer.__init__ at 0x000002BC8E0B3130>", "add": "<function ReplayBuffer.add at 0x000002BC8E0B31C0>", "sample": "<function ReplayBuffer.sample at 0x000002BC8E0B3250>", "_get_samples": "<function ReplayBuffer._get_samples at 0x000002BC8E0B32E0>", "_maybe_cast_dtype": "<staticmethod(<function ReplayBuffer._maybe_cast_dtype at 0x000002BC8E0B3370>)>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x000002BC8B8AD700>"}, "replay_buffer_kwargs": {}, "train_freq": {":type:": "<class 'stable_baselines3.common.type_aliases.TrainFreq'>", ":serialized:": "gAWVYQAAAAAAAACMJXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi50eXBlX2FsaWFzZXOUjAlUcmFpbkZyZXGUk5RLBGgAjBJUcmFpbkZyZXF1ZW5jeVVuaXSUk5SMBHN0ZXCUhZRSlIaUgZQu"}, "use_sde_at_warmup": false, "exploration_initial_eps": 1.0, "exploration_final_eps": 0.05, "exploration_fraction": 0.1, "target_update_interval": 10000, "_n_calls": 50000, "max_grad_norm": 10, "exploration_rate": 0.05, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVlAIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMf2Q6XGpvbmFoXERvY3VtZW50c1xVbmlcV0lcMDNfQmFjaGVsb3JhcmJlaXRcSW1wbGVtZW50YXRpb25cR3ltbmFzaXVtXC5jb25kYVxsaWJcc2l0ZS1wYWNrYWdlc1xzdGFibGVfYmFzZWxpbmVzM1xjb21tb25cdXRpbHMucHmUjARmdW5jlEuDQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UaAx1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlGgAjBJfZnVuY3Rpb25fc2V0c3RhdGWUk5RoHn2UfZQoaBZoDYwMX19xdWFsbmFtZV9flIwZY29uc3RhbnRfZm4uPGxvY2Fscz4uZnVuY5SMD19fYW5ub3RhdGlvbnNfX5R9lIwOX19rd2RlZmF1bHRzX1+UTowMX19kZWZhdWx0c19flE6MCl9fbW9kdWxlX1+UaBeMB19fZG9jX1+UTowLX19jbG9zdXJlX1+UaACMCl9tYWtlX2NlbGyUk5RHPxo24uscQy2FlFKUhZSMF19jbG91ZHBpY2tsZV9zdWJtb2R1bGVzlF2UjAtfX2dsb2JhbHNfX5R9lHWGlIZSMC4="}, "batch_norm_stats": [], "batch_norm_stats_target": [], "exploration_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVNAMAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLBEsTQyhkAXwAGACIAWsEcgiIAFMAiAJkAXwAGACIAIgCGAAUAIgBGwAXAFMAlE5LAYaUKYwScHJvZ3Jlc3NfcmVtYWluaW5nlIWUjH9kOlxqb25haFxEb2N1bWVudHNcVW5pXFdJXDAzX0JhY2hlbG9yYXJiZWl0XEltcGxlbWVudGF0aW9uXEd5bW5hc2l1bVwuY29uZGFcbGliXHNpdGUtcGFja2FnZXNcc3RhYmxlX2Jhc2VsaW5lczNcY29tbW9uXHV0aWxzLnB5lIwEZnVuY5RLcUMGDAEEARgClIwDZW5klIwMZW5kX2ZyYWN0aW9ulIwFc3RhcnSUh5QpdJRSlH2UKIwLX19wYWNrYWdlX1+UjBhzdGFibGVfYmFzZWxpbmVzMy5jb21tb26UjAhfX25hbWVfX5SMHnN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi51dGlsc5SMCF9fZmlsZV9flGgMdU5OaACMEF9tYWtlX2VtcHR5X2NlbGyUk5QpUpRoHClSlGgcKVKUh5R0lFKUaACMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgifZR9lChoGGgNjAxfX3F1YWxuYW1lX1+UjBtnZXRfbGluZWFyX2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZQoaAqMCGJ1aWx0aW5zlIwFZmxvYXSUk5SMBnJldHVybpRoLXWMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgZjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz+pmZmZmZmahZRSlGg1Rz+5mZmZmZmahZRSlGg1Rz/wAAAAAAAAhZRSlIeUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "system_info": {"OS": "Windows-10-10.0.22631-SP0 10.0.22631", "Python": "3.10.14", "Stable-Baselines3": "2.3.0", "PyTorch": "2.2.2+cpu", "GPU Enabled": "False", "Numpy": "1.26.4", "Cloudpickle": "3.0.0", "Gymnasium": "0.29.1"}}
dqn-CartPole-v1.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:245372fa2130d7790cf9fcd5b5a4206ad7b203e40cf2ab5e7f1b3ee206b31f64
3
+ size 100062
dqn-CartPole-v1/_stable_baselines3_version ADDED
@@ -0,0 +1 @@
 
 
1
+ 2.3.0
dqn-CartPole-v1/data ADDED
@@ -0,0 +1,123 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "policy_class": {
3
+ ":type:": "<class 'abc.ABCMeta'>",
4
+ ":serialized:": "gAWVMAAAAAAAAACMHnN0YWJsZV9iYXNlbGluZXMzLmRxbi5wb2xpY2llc5SMCURRTlBvbGljeZSTlC4=",
5
+ "__module__": "stable_baselines3.dqn.policies",
6
+ "__annotations__": "{'q_net': <class 'stable_baselines3.dqn.policies.QNetwork'>, 'q_net_target': <class 'stable_baselines3.dqn.policies.QNetwork'>}",
7
+ "__doc__": "\n Policy class with Q-Value Net and target net for DQN\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
8
+ "__init__": "<function DQNPolicy.__init__ at 0x000002BC8E39A0E0>",
9
+ "_build": "<function DQNPolicy._build at 0x000002BC8E39A170>",
10
+ "make_q_net": "<function DQNPolicy.make_q_net at 0x000002BC8E39A200>",
11
+ "forward": "<function DQNPolicy.forward at 0x000002BC8E39A290>",
12
+ "_predict": "<function DQNPolicy._predict at 0x000002BC8E39A320>",
13
+ "_get_constructor_parameters": "<function DQNPolicy._get_constructor_parameters at 0x000002BC8E39A3B0>",
14
+ "set_training_mode": "<function DQNPolicy.set_training_mode at 0x000002BC8E39A440>",
15
+ "__abstractmethods__": "frozenset()",
16
+ "_abc_impl": "<_abc._abc_data object at 0x000002BC8E3AABC0>"
17
+ },
18
+ "verbose": 1,
19
+ "policy_kwargs": {},
20
+ "num_timesteps": 50000,
21
+ "_total_timesteps": 50000,
22
+ "_num_timesteps_at_start": 0,
23
+ "seed": null,
24
+ "action_noise": null,
25
+ "start_time": 1713557964966195100,
26
+ "learning_rate": 0.0001,
27
+ "tensorboard_log": null,
28
+ "_last_obs": {
29
+ ":type:": "<class 'numpy.ndarray'>",
30
+ ":serialized:": "gAWVhQAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYQAAAAAAAAAKWHs7w5bVC/CmoPvHnmjj+UjAVudW1weZSMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSwFLBIaUjAFDlHSUUpQu"
31
+ },
32
+ "_last_episode_starts": {
33
+ ":type:": "<class 'numpy.ndarray'>",
34
+ ":serialized:": "gAWVdAAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYBAAAAAAAAAAGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwGFlIwBQ5R0lFKULg=="
35
+ },
36
+ "_last_original_obs": {
37
+ ":type:": "<class 'numpy.ndarray'>",
38
+ ":serialized:": "gAWVhQAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYQAAAAAAAAADEYHLwLkR6/+f3PvP7xVD+UjAVudW1weZSMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSwFLBIaUjAFDlHSUUpQu"
39
+ },
40
+ "_episode_num": 2590,
41
+ "use_sde": false,
42
+ "sde_sample_freq": -1,
43
+ "_current_progress_remaining": 0.0,
44
+ "_stats_window_size": 100,
45
+ "ep_info_buffer": {
46
+ ":type:": "<class 'collections.deque'>",
47
+ ":serialized:": "gAWV4AsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQDgAAAAAAACMAWyUSxiMAXSUR0CQVgyO7xusdX2UKGgGR0AmAAAAAAAAaAdLC2gIR0CQVwtcfNiZdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQWMmBvrGBdX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQWrDWsijddX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQXH4SpR4ydX2UKGgGR0AkAAAAAAAAaAdLCmgIR0CQXWVC5VfedX2UKGgGR0A5AAAAAAAAaAdLGWgIR0CQX4hcZ9/jdX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQYVUhFEy+dX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQYuNliBoVdX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQZIjawljWdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQZj+s5n14dX2UKGgGR0A6AAAAAAAAaAdLGmgIR0CQaHzf779AdX2UKGgGR0AuAAAAAAAAaAdLD2gIR0CQadEcKgIydX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQa45KvmozdX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQbXFuNxVAdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQbv/WDpTudX2UKGgGR0A4AAAAAAAAaAdLGGgIR0CQcRK0lZ5idX2UKGgGR0AxAAAAAAAAaAdLEWgIR0CQcpDR+jM3dX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQdE8CxNZedX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQdgjQAuIzdX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQd+9HMEA6dX2UKGgGR0A6AAAAAAAAaAdLGmgIR0CQeinJkoWpdX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQfA8f3evZdX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQffhlUZNxdX2UKGgGR0AxAAAAAAAAaAdLEWgIR0CQf3XNTtLMdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQgTH6/IsAdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQgu25xzaLdX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQhJGy5Zr6dX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQhnGkep4sdX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQiFhB7eEadX2UKGgGR0A4AAAAAAAAaAdLGGgIR0CQimZ/kNnXdX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQjDWTX8O1dX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQjdlQuVX4dX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQj79Q40djdX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQkYxi5NGmdX2UKGgGR0AxAAAAAAAAaAdLEWgIR0CQkwamoBJadX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQlNKkl/pddX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQlqBYV6/qdX2UKGgGR0AkAAAAAAAAaAdLCmgIR0CQl4kwN9YwdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQmRtq59VndX2UKGgGR0AuAAAAAAAAaAdLD2gIR0CQmmiXIEKWdX2UKGgGR0AkAAAAAAAAaAdLCmgIR0CQm03Ytg8bdX2UKGgGR0AxAAAAAAAAaAdLEWgIR0CQnMpEx7AtdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQnoY9xIatdX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQoGvJzT4MdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQof25hBqsdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQo7xLkCFLdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQpXqWTot+dX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQpyB1s+FDdX2UKGgGR0AwAAAAAAAAaAdLEGgIR0CQqIcO9WZJdX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQqlmjTKDDdX2UKGgGR0AkAAAAAAAAaAdLCmgIR0CQq0HP/rB1dX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQrOi6xxDLdX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQrpFpfx+bdX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQsF91U2k0dX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQsjM6BAfMdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQs8RmbsnidX2UKGgGR0AiAAAAAAAAaAdLCWgIR0CQtJmiQDFIdX2UKGgGR0A7AAAAAAAAaAdLG2gIR0CQtuU9ZA6ddX2UKGgGR0AiAAAAAAAAaAdLCWgIR0CQt7oQWepXdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQuXNy5qdpdX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQu0VdonKGdX2UKGgGR0AxAAAAAAAAaAdLEWgIR0CQvL7zkIX1dX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQvo9jwx33dX2UKGgGR0AxAAAAAAAAaAdLEWgIR0CQwA1BdD6WdX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQwbMUAT7EdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQw0NrCWNWdX2UKGgGR0AxAAAAAAAAaAdLEWgIR0CQxL7xd6cBdX2UKGgGR0A7AAAAAAAAaAdLG2gIR0CQxwkIomXxdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQyMeU6gdwdX2UKGgGR0A1AAAAAAAAaAdLFWgIR0CQypVn27FsdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQzB+dsi0OdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQzds/6frbdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQz2v+wTufdX2UKGgGR0AkAAAAAAAAaAdLCmgIR0CQ0FMcp9ZzdX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQ0fc580DVdX2UKGgGR0AmAAAAAAAAaAdLC2gIR0CQ0vWAPNFCdX2UKGgGR0A3AAAAAAAAaAdLF2gIR0CQ1OpMpPRBdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQ1nvkBCD3dX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQ2GOVxCIDdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQ2h0GeMAFdX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQ28hStNi6dX2UKGgGR0A4AAAAAAAAaAdLGGgIR0CQ3diB5HEudX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQ37zhgmZ3dX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQ4aHryDqXdX2UKGgGR0AxAAAAAAAAaAdLEWgIR0CQ4yDQqqffdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQ5LIqLCN0dX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQ5phje9BbdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQ6E+GoJiRdX2UKGgGR0AkAAAAAAAAaAdLCmgIR0CQ6TgG8mKJdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQ6vGxUvPDdX2UKGgGR0A4AAAAAAAAaAdLGGgIR0CQ7QQ5FPSEdX2UKGgGR0AxAAAAAAAAaAdLEWgIR0CQ7n9lmOENdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQ8Dl7MPjGdX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQ8hiKiwjddX2UKGgGR0A4AAAAAAAAaAdLGGgIR0CQ9Ckf9xZMdX2UKGgGR0AyAAAAAAAAaAdLEmgIR0CQ9btcv/R3dX2UKGgGR0A2AAAAAAAAaAdLFmgIR0CQ959PUKAsdX2UKGgGR0AzAAAAAAAAaAdLE2gIR0CQ+UfD1oQGdX2UKGgGR0A0AAAAAAAAaAdLFGgIR0CQ+wD+irT6dWUu"
48
+ },
49
+ "ep_success_buffer": {
50
+ ":type:": "<class 'collections.deque'>",
51
+ ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
52
+ },
53
+ "_n_updates": 12475,
54
+ "observation_space": {
55
+ ":type:": "<class 'gymnasium.spaces.box.Box'>",
56
+ ":serialized:": "gAWVFgIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWBAAAAAAAAAABAQEBlGgIjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKUjA1ib3VuZGVkX2Fib3ZllGgRKJYEAAAAAAAAAAEBAQGUaBVLBIWUaBl0lFKUjAZfc2hhcGWUSwSFlIwDbG93lGgRKJYQAAAAAAAAAJqZmcD//3//UHfWvv//f/+UaAtLBIWUaBl0lFKUjARoaWdolGgRKJYQAAAAAAAAAJqZmUD//39/UHfWPv//f3+UaAtLBIWUaBl0lFKUjAhsb3dfcmVwcpSMPVstNC44MDAwMDAyZSswMCAtMy40MDI4MjM1ZSszOCAtNC4xODg3OTAzZS0wMSAtMy40MDI4MjM1ZSszOF2UjAloaWdoX3JlcHKUjDlbNC44MDAwMDAyZSswMCAzLjQwMjgyMzVlKzM4IDQuMTg4NzkwM2UtMDEgMy40MDI4MjM1ZSszOF2UjApfbnBfcmFuZG9tlE51Yi4=",
57
+ "dtype": "float32",
58
+ "bounded_below": "[ True True True True]",
59
+ "bounded_above": "[ True True True True]",
60
+ "_shape": [
61
+ 4
62
+ ],
63
+ "low": "[-4.8000002e+00 -3.4028235e+38 -4.1887903e-01 -3.4028235e+38]",
64
+ "high": "[4.8000002e+00 3.4028235e+38 4.1887903e-01 3.4028235e+38]",
65
+ "low_repr": "[-4.8000002e+00 -3.4028235e+38 -4.1887903e-01 -3.4028235e+38]",
66
+ "high_repr": "[4.8000002e+00 3.4028235e+38 4.1887903e-01 3.4028235e+38]",
67
+ "_np_random": null
68
+ },
69
+ "action_space": {
70
+ ":type:": "<class 'gymnasium.spaces.discrete.Discrete'>",
71
+ ":serialized:": "gAWVowEAAAAAAACMGWd5bW5hc2l1bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIAgAAAAAAAACUhpRSlIwFc3RhcnSUaAhoDkMIAAAAAAAAAACUhpRSlIwGX3NoYXBllCmMBWR0eXBllGgOjApfbnBfcmFuZG9tlIwUbnVtcHkucmFuZG9tLl9waWNrbGWUjBBfX2dlbmVyYXRvcl9jdG9ylJOUjAVQQ0c2NJRoG4wUX19iaXRfZ2VuZXJhdG9yX2N0b3KUk5SGlFKUfZQojA1iaXRfZ2VuZXJhdG9ylIwFUENHNjSUjAVzdGF0ZZR9lChoJooRPuJ3wCrypII7Idyz9/hoxgCMA2luY5SKESN8gSQijeRdbp4QBgbpcagAdYwKaGFzX3VpbnQzMpRLAYwIdWludGVnZXKUStW5bgV1YnViLg==",
72
+ "n": "2",
73
+ "start": "0",
74
+ "_shape": [],
75
+ "dtype": "int64",
76
+ "_np_random": "Generator(PCG64)"
77
+ },
78
+ "n_envs": 1,
79
+ "buffer_size": 1000000,
80
+ "batch_size": 32,
81
+ "learning_starts": 100,
82
+ "tau": 1.0,
83
+ "gamma": 0.99,
84
+ "gradient_steps": 1,
85
+ "optimize_memory_usage": false,
86
+ "replay_buffer_class": {
87
+ ":type:": "<class 'abc.ABCMeta'>",
88
+ ":serialized:": "gAWVNQAAAAAAAACMIHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5idWZmZXJzlIwMUmVwbGF5QnVmZmVylJOULg==",
89
+ "__module__": "stable_baselines3.common.buffers",
90
+ "__annotations__": "{'observations': <class 'numpy.ndarray'>, 'next_observations': <class 'numpy.ndarray'>, 'actions': <class 'numpy.ndarray'>, 'rewards': <class 'numpy.ndarray'>, 'dones': <class 'numpy.ndarray'>, 'timeouts': <class 'numpy.ndarray'>}",
91
+ "__doc__": "\n Replay buffer used in off-policy algorithms like SAC/TD3.\n\n :param buffer_size: Max number of element in the buffer\n :param observation_space: Observation space\n :param action_space: Action space\n :param device: PyTorch device\n :param n_envs: Number of parallel environments\n :param optimize_memory_usage: Enable a memory efficient variant\n of the replay buffer which reduces by almost a factor two the memory used,\n at a cost of more complexity.\n See https://github.com/DLR-RM/stable-baselines3/issues/37#issuecomment-637501195\n and https://github.com/DLR-RM/stable-baselines3/pull/28#issuecomment-637559274\n Cannot be used in combination with handle_timeout_termination.\n :param handle_timeout_termination: Handle timeout termination (due to timelimit)\n separately and treat the task as infinite horizon task.\n https://github.com/DLR-RM/stable-baselines3/issues/284\n ",
92
+ "__init__": "<function ReplayBuffer.__init__ at 0x000002BC8E0B3130>",
93
+ "add": "<function ReplayBuffer.add at 0x000002BC8E0B31C0>",
94
+ "sample": "<function ReplayBuffer.sample at 0x000002BC8E0B3250>",
95
+ "_get_samples": "<function ReplayBuffer._get_samples at 0x000002BC8E0B32E0>",
96
+ "_maybe_cast_dtype": "<staticmethod(<function ReplayBuffer._maybe_cast_dtype at 0x000002BC8E0B3370>)>",
97
+ "__abstractmethods__": "frozenset()",
98
+ "_abc_impl": "<_abc._abc_data object at 0x000002BC8B8AD700>"
99
+ },
100
+ "replay_buffer_kwargs": {},
101
+ "train_freq": {
102
+ ":type:": "<class 'stable_baselines3.common.type_aliases.TrainFreq'>",
103
+ ":serialized:": "gAWVYQAAAAAAAACMJXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi50eXBlX2FsaWFzZXOUjAlUcmFpbkZyZXGUk5RLBGgAjBJUcmFpbkZyZXF1ZW5jeVVuaXSUk5SMBHN0ZXCUhZRSlIaUgZQu"
104
+ },
105
+ "use_sde_at_warmup": false,
106
+ "exploration_initial_eps": 1.0,
107
+ "exploration_final_eps": 0.05,
108
+ "exploration_fraction": 0.1,
109
+ "target_update_interval": 10000,
110
+ "_n_calls": 50000,
111
+ "max_grad_norm": 10,
112
+ "exploration_rate": 0.05,
113
+ "lr_schedule": {
114
+ ":type:": "<class 'function'>",
115
+ ":serialized:": "gAWVlAIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMf2Q6XGpvbmFoXERvY3VtZW50c1xVbmlcV0lcMDNfQmFjaGVsb3JhcmJlaXRcSW1wbGVtZW50YXRpb25cR3ltbmFzaXVtXC5jb25kYVxsaWJcc2l0ZS1wYWNrYWdlc1xzdGFibGVfYmFzZWxpbmVzM1xjb21tb25cdXRpbHMucHmUjARmdW5jlEuDQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UaAx1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlGgAjBJfZnVuY3Rpb25fc2V0c3RhdGWUk5RoHn2UfZQoaBZoDYwMX19xdWFsbmFtZV9flIwZY29uc3RhbnRfZm4uPGxvY2Fscz4uZnVuY5SMD19fYW5ub3RhdGlvbnNfX5R9lIwOX19rd2RlZmF1bHRzX1+UTowMX19kZWZhdWx0c19flE6MCl9fbW9kdWxlX1+UaBeMB19fZG9jX1+UTowLX19jbG9zdXJlX1+UaACMCl9tYWtlX2NlbGyUk5RHPxo24uscQy2FlFKUhZSMF19jbG91ZHBpY2tsZV9zdWJtb2R1bGVzlF2UjAtfX2dsb2JhbHNfX5R9lHWGlIZSMC4="
116
+ },
117
+ "batch_norm_stats": [],
118
+ "batch_norm_stats_target": [],
119
+ "exploration_schedule": {
120
+ ":type:": "<class 'function'>",
121
+ ":serialized:": "gAWVNAMAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLBEsTQyhkAXwAGACIAWsEcgiIAFMAiAJkAXwAGACIAIgCGAAUAIgBGwAXAFMAlE5LAYaUKYwScHJvZ3Jlc3NfcmVtYWluaW5nlIWUjH9kOlxqb25haFxEb2N1bWVudHNcVW5pXFdJXDAzX0JhY2hlbG9yYXJiZWl0XEltcGxlbWVudGF0aW9uXEd5bW5hc2l1bVwuY29uZGFcbGliXHNpdGUtcGFja2FnZXNcc3RhYmxlX2Jhc2VsaW5lczNcY29tbW9uXHV0aWxzLnB5lIwEZnVuY5RLcUMGDAEEARgClIwDZW5klIwMZW5kX2ZyYWN0aW9ulIwFc3RhcnSUh5QpdJRSlH2UKIwLX19wYWNrYWdlX1+UjBhzdGFibGVfYmFzZWxpbmVzMy5jb21tb26UjAhfX25hbWVfX5SMHnN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi51dGlsc5SMCF9fZmlsZV9flGgMdU5OaACMEF9tYWtlX2VtcHR5X2NlbGyUk5QpUpRoHClSlGgcKVKUh5R0lFKUaACMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgifZR9lChoGGgNjAxfX3F1YWxuYW1lX1+UjBtnZXRfbGluZWFyX2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZQoaAqMCGJ1aWx0aW5zlIwFZmxvYXSUk5SMBnJldHVybpRoLXWMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgZjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz+pmZmZmZmahZRSlGg1Rz+5mZmZmZmahZRSlGg1Rz/wAAAAAAAAhZRSlIeUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"
122
+ }
123
+ }
dqn-CartPole-v1/policy.optimizer.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0a3f5d4a25b68d546fefd4fe092788b8b083f75a8212b4f1e15518ea5118846e
3
+ size 42144
dqn-CartPole-v1/policy.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be3a3e7ebbd154fb89ae6ae55d988193349e3146de1a83febcda6de2171ca832
3
+ size 41266
dqn-CartPole-v1/pytorch_variables.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fb4dde0c1ad63b7740276006a06cc491b21b407ea6c889928c223ec77ddad79f
3
+ size 864
dqn-CartPole-v1/system_info.txt ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ - OS: Windows-10-10.0.22631-SP0 10.0.22631
2
+ - Python: 3.10.14
3
+ - Stable-Baselines3: 2.3.0
4
+ - PyTorch: 2.2.2+cpu
5
+ - GPU Enabled: False
6
+ - Numpy: 1.26.4
7
+ - Cloudpickle: 3.0.0
8
+ - Gymnasium: 0.29.1
results.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"mean_reward": 20.5, "std_reward": 2.1095023109728985, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2024-04-19T22:40:40.281205"}