Initial Commit
Browse files- README.md +1 -1
- args.yml +7 -21
- dqn-MountainCar-v0.zip +2 -2
- dqn-MountainCar-v0/data +35 -33
- dqn-MountainCar-v0/policy.optimizer.pth +1 -1
- dqn-MountainCar-v0/policy.pth +1 -1
- replay.mp4 +2 -2
- results.json +1 -1
- train_eval_metrics.zip +3 -0
README.md
CHANGED
@@ -10,7 +10,7 @@ model-index:
|
|
10 |
results:
|
11 |
- metrics:
|
12 |
- type: mean_reward
|
13 |
-
value: -
|
14 |
name: mean_reward
|
15 |
task:
|
16 |
type: reinforcement-learning
|
|
|
10 |
results:
|
11 |
- metrics:
|
12 |
- type: mean_reward
|
13 |
+
value: -103.40 +/- 7.49
|
14 |
name: mean_reward
|
15 |
task:
|
16 |
type: reinforcement-learning
|
args.yml
CHANGED
@@ -1,28 +1,24 @@
|
|
1 |
!!python/object/apply:collections.OrderedDict
|
2 |
- - - algo
|
3 |
- dqn
|
4 |
-
- - device
|
5 |
-
- auto
|
6 |
- - env
|
7 |
- MountainCar-v0
|
8 |
- - env_kwargs
|
9 |
- null
|
10 |
- - eval_episodes
|
11 |
-
-
|
12 |
- - eval_freq
|
13 |
-
-
|
14 |
- - gym_packages
|
15 |
- []
|
16 |
- - hyperparams
|
17 |
- null
|
18 |
- - log_folder
|
19 |
-
-
|
20 |
- - log_interval
|
21 |
- -1
|
22 |
-
- - n_eval_envs
|
23 |
-
- 1
|
24 |
- - n_evaluations
|
25 |
-
-
|
26 |
- - n_jobs
|
27 |
- 1
|
28 |
- - n_startup_trials
|
@@ -30,13 +26,9 @@
|
|
30 |
- - n_timesteps
|
31 |
- -1
|
32 |
- - n_trials
|
33 |
-
-
|
34 |
-
- - no_optim_plots
|
35 |
-
- false
|
36 |
- - num_threads
|
37 |
- -1
|
38 |
-
- - optimization_log_path
|
39 |
-
- null
|
40 |
- - optimize_hyperparameters
|
41 |
- false
|
42 |
- - pruner
|
@@ -48,26 +40,20 @@
|
|
48 |
- - save_replay_buffer
|
49 |
- false
|
50 |
- - seed
|
51 |
-
-
|
52 |
- - storage
|
53 |
- null
|
54 |
- - study_name
|
55 |
- null
|
56 |
- - tensorboard_log
|
57 |
- ''
|
58 |
-
- - track
|
59 |
-
- false
|
60 |
- - trained_agent
|
61 |
- ''
|
62 |
- - truncate_last_trajectory
|
63 |
- true
|
64 |
- - uuid
|
65 |
-
-
|
66 |
- - vec_env
|
67 |
- dummy
|
68 |
- - verbose
|
69 |
- 1
|
70 |
-
- - wandb_entity
|
71 |
-
- null
|
72 |
-
- - wandb_project_name
|
73 |
-
- sb3
|
|
|
1 |
!!python/object/apply:collections.OrderedDict
|
2 |
- - - algo
|
3 |
- dqn
|
|
|
|
|
4 |
- - env
|
5 |
- MountainCar-v0
|
6 |
- - env_kwargs
|
7 |
- null
|
8 |
- - eval_episodes
|
9 |
+
- 10
|
10 |
- - eval_freq
|
11 |
+
- 10000
|
12 |
- - gym_packages
|
13 |
- []
|
14 |
- - hyperparams
|
15 |
- null
|
16 |
- - log_folder
|
17 |
+
- rl-trained-agents/
|
18 |
- - log_interval
|
19 |
- -1
|
|
|
|
|
20 |
- - n_evaluations
|
21 |
+
- 20
|
22 |
- - n_jobs
|
23 |
- 1
|
24 |
- - n_startup_trials
|
|
|
26 |
- - n_timesteps
|
27 |
- -1
|
28 |
- - n_trials
|
29 |
+
- 10
|
|
|
|
|
30 |
- - num_threads
|
31 |
- -1
|
|
|
|
|
32 |
- - optimize_hyperparameters
|
33 |
- false
|
34 |
- - pruner
|
|
|
40 |
- - save_replay_buffer
|
41 |
- false
|
42 |
- - seed
|
43 |
+
- 1787207996
|
44 |
- - storage
|
45 |
- null
|
46 |
- - study_name
|
47 |
- null
|
48 |
- - tensorboard_log
|
49 |
- ''
|
|
|
|
|
50 |
- - trained_agent
|
51 |
- ''
|
52 |
- - truncate_last_trajectory
|
53 |
- true
|
54 |
- - uuid
|
55 |
+
- true
|
56 |
- - vec_env
|
57 |
- dummy
|
58 |
- - verbose
|
59 |
- 1
|
|
|
|
|
|
|
|
dqn-MountainCar-v0.zip
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c5d02d795e9120efb19cd1f65541a7f040eacf9b4253bfd438ebc35b64113a14
|
3 |
+
size 1103767
|
dqn-MountainCar-v0/data
CHANGED
@@ -4,15 +4,15 @@
|
|
4 |
":serialized:": "gAWVMAAAAAAAAACMHnN0YWJsZV9iYXNlbGluZXMzLmRxbi5wb2xpY2llc5SMCURRTlBvbGljeZSTlC4=",
|
5 |
"__module__": "stable_baselines3.dqn.policies",
|
6 |
"__doc__": "\n Policy class with Q-Value Net and target net for DQN\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
-
"__init__": "<function DQNPolicy.__init__ at
|
8 |
-
"_build": "<function DQNPolicy._build at
|
9 |
-
"make_q_net": "<function DQNPolicy.make_q_net at
|
10 |
-
"forward": "<function DQNPolicy.forward at
|
11 |
-
"_predict": "<function DQNPolicy._predict at
|
12 |
-
"_get_constructor_parameters": "<function DQNPolicy._get_constructor_parameters at
|
13 |
-
"set_training_mode": "<function DQNPolicy.set_training_mode at
|
14 |
"__abstractmethods__": "frozenset()",
|
15 |
-
"_abc_impl": "<_abc_data object at
|
16 |
},
|
17 |
"verbose": 1,
|
18 |
"policy_kwargs": {
|
@@ -23,24 +23,24 @@
|
|
23 |
},
|
24 |
"observation_space": {
|
25 |
":type:": "<class 'gym.spaces.box.Box'>",
|
26 |
-
":serialized:": "gAWVYwEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////
|
27 |
"dtype": "float32",
|
28 |
-
"_shape": [
|
29 |
-
2
|
30 |
-
],
|
31 |
"low": "[-1.2 -0.07]",
|
32 |
"high": "[0.6 0.07]",
|
33 |
"bounded_below": "[ True True]",
|
34 |
"bounded_above": "[ True True]",
|
35 |
-
"_np_random": null
|
|
|
|
|
|
|
36 |
},
|
37 |
"action_space": {
|
38 |
":type:": "<class 'gym.spaces.discrete.Discrete'>",
|
39 |
-
":serialized:": "
|
40 |
"n": 3,
|
41 |
-
"_shape": [],
|
42 |
"dtype": "int64",
|
43 |
-
"_np_random": "RandomState(MT19937)"
|
|
|
44 |
},
|
45 |
"n_envs": 1,
|
46 |
"num_timesteps": 120000,
|
@@ -48,32 +48,29 @@
|
|
48 |
"_num_timesteps_at_start": 0,
|
49 |
"seed": 0,
|
50 |
"action_noise": null,
|
51 |
-
"start_time":
|
52 |
"learning_rate": {
|
53 |
":type:": "<class 'function'>",
|
54 |
-
":serialized:": "
|
55 |
},
|
56 |
"tensorboard_log": null,
|
57 |
"lr_schedule": {
|
58 |
":type:": "<class 'function'>",
|
59 |
-
":serialized:": "
|
60 |
},
|
61 |
"_last_obs": null,
|
62 |
-
"_last_episode_starts":
|
63 |
-
":type:": "<class 'numpy.ndarray'>",
|
64 |
-
":serialized:": "gAWVdAAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYBAAAAAAAAAAGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwGFlIwBQ5R0lFKULg=="
|
65 |
-
},
|
66 |
"_last_original_obs": {
|
67 |
":type:": "<class 'numpy.ndarray'>",
|
68 |
-
":serialized:": "
|
69 |
},
|
70 |
-
"_episode_num":
|
71 |
"use_sde": false,
|
72 |
"sde_sample_freq": -1,
|
73 |
"_current_progress_remaining": 0.0,
|
74 |
"ep_info_buffer": {
|
75 |
":type:": "<class 'collections.deque'>",
|
76 |
-
":serialized:": "
|
77 |
},
|
78 |
"ep_success_buffer": {
|
79 |
":type:": "<class 'collections.deque'>",
|
@@ -92,12 +89,12 @@
|
|
92 |
":serialized:": "gAWVNQAAAAAAAACMIHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5idWZmZXJzlIwMUmVwbGF5QnVmZmVylJOULg==",
|
93 |
"__module__": "stable_baselines3.common.buffers",
|
94 |
"__doc__": "\n Replay buffer used in off-policy algorithms like SAC/TD3.\n\n :param buffer_size: Max number of element in the buffer\n :param observation_space: Observation space\n :param action_space: Action space\n :param device:\n :param n_envs: Number of parallel environments\n :param optimize_memory_usage: Enable a memory efficient variant\n of the replay buffer which reduces by almost a factor two the memory used,\n at a cost of more complexity.\n See https://github.com/DLR-RM/stable-baselines3/issues/37#issuecomment-637501195\n and https://github.com/DLR-RM/stable-baselines3/pull/28#issuecomment-637559274\n :param handle_timeout_termination: Handle timeout termination (due to timelimit)\n separately and treat the task as infinite horizon task.\n https://github.com/DLR-RM/stable-baselines3/issues/284\n ",
|
95 |
-
"__init__": "<function ReplayBuffer.__init__ at
|
96 |
-
"add": "<function ReplayBuffer.add at
|
97 |
-
"sample": "<function ReplayBuffer.sample at
|
98 |
-
"_get_samples": "<function ReplayBuffer._get_samples at
|
99 |
"__abstractmethods__": "frozenset()",
|
100 |
-
"_abc_impl": "<_abc_data object at
|
101 |
},
|
102 |
"replay_buffer_kwargs": {},
|
103 |
"train_freq": {
|
@@ -110,11 +107,16 @@
|
|
110 |
"exploration_final_eps": 0.07,
|
111 |
"exploration_fraction": 0.2,
|
112 |
"target_update_interval": 600,
|
113 |
-
"_n_calls":
|
114 |
"max_grad_norm": 10,
|
115 |
"exploration_rate": 0.07,
|
116 |
"exploration_schedule": {
|
117 |
":type:": "<class 'function'>",
|
118 |
":serialized:": "gAWVdQMAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwNX2J1aWx0aW5fdHlwZZSTlIwKTGFtYmRhVHlwZZSFlFKUKGgCjAhDb2RlVHlwZZSFlFKUKEsBSwBLAUsESxNDLGQBfAAYAIgBawRyEIgAUwCIAmQBfAAYAIgAiAIYABQAiAEbABcAUwBkAFMAlE5LAYaUKYwScHJvZ3Jlc3NfcmVtYWluaW5nlIWUjFEvaG9tZS9hbnRvbmluL0RvY3VtZW50cy9kbHIvcmwvdG9yY2h5LWJhc2VsaW5lcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEtuQwYAAQwBBAKUjANlbmSUjAxlbmRfZnJhY3Rpb26UjAVzdGFydJSHlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjFEvaG9tZS9hbnRvbmluL0RvY3VtZW50cy9kbHIvcmwvdG9yY2h5LWJhc2VsaW5lcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUdU5OaACMEF9tYWtlX2VtcHR5X2NlbGyUk5QpUpRoHilSlGgeKVKUh5R0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaCR9lH2UKGgZaA6MDF9fcXVhbG5hbWVfX5SMG2dldF9saW5lYXJfZm4uPGxvY2Fscz4uZnVuY5SMD19fYW5ub3RhdGlvbnNfX5R9lChoC4wIYnVpbHRpbnOUjAVmbG9hdJSTlIwGcmV0dXJulGgwdYwOX19rd2RlZmF1bHRzX1+UTowMX19kZWZhdWx0c19flE6MCl9fbW9kdWxlX1+UaBqMB19fZG9jX1+UTowLX19jbG9zdXJlX1+UaACMCl9tYWtlX2NlbGyUk5RHP7HrhR64UeyFlFKUaDhHP8mZmZmZmZqFlFKUaDhHP/AAAAAAAACFlFKUh5SMF19jbG91ZHBpY2tsZV9zdWJtb2R1bGVzlF2UjAtfX2dsb2JhbHNfX5R9lHWGlIZSMC4="
|
119 |
-
}
|
|
|
|
|
|
|
|
|
|
|
120 |
}
|
|
|
4 |
":serialized:": "gAWVMAAAAAAAAACMHnN0YWJsZV9iYXNlbGluZXMzLmRxbi5wb2xpY2llc5SMCURRTlBvbGljeZSTlC4=",
|
5 |
"__module__": "stable_baselines3.dqn.policies",
|
6 |
"__doc__": "\n Policy class with Q-Value Net and target net for DQN\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
+
"__init__": "<function DQNPolicy.__init__ at 0x7fd067581b00>",
|
8 |
+
"_build": "<function DQNPolicy._build at 0x7fd067581b90>",
|
9 |
+
"make_q_net": "<function DQNPolicy.make_q_net at 0x7fd067581c20>",
|
10 |
+
"forward": "<function DQNPolicy.forward at 0x7fd067581cb0>",
|
11 |
+
"_predict": "<function DQNPolicy._predict at 0x7fd067581d40>",
|
12 |
+
"_get_constructor_parameters": "<function DQNPolicy._get_constructor_parameters at 0x7fd067581dd0>",
|
13 |
+
"set_training_mode": "<function DQNPolicy.set_training_mode at 0x7fd067581e60>",
|
14 |
"__abstractmethods__": "frozenset()",
|
15 |
+
"_abc_impl": "<_abc_data object at 0x7fd0675794e0>"
|
16 |
},
|
17 |
"verbose": 1,
|
18 |
"policy_kwargs": {
|
|
|
23 |
},
|
24 |
"observation_space": {
|
25 |
":type:": "<class 'gym.spaces.box.Box'>",
|
26 |
+
":serialized:": "gAWVYwEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMA2xvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYIAAAAAAAAAJqZmb8pXI+9lGgKSwKFlIwBQ5R0lFKUjARoaWdolGgQKJYIAAAAAAAAAJqZGT8pXI89lGgKSwKFlGgTdJRSlIwNYm91bmRlZF9iZWxvd5RoECiWAgAAAAAAAAABAZRoB4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksChZRoE3SUUpSMDWJvdW5kZWRfYWJvdmWUaBAolgIAAAAAAAAAAQGUaB9LAoWUaBN0lFKUjApfbnBfcmFuZG9tlE6MBl9zaGFwZZRLAoWUdWIu",
|
27 |
"dtype": "float32",
|
|
|
|
|
|
|
28 |
"low": "[-1.2 -0.07]",
|
29 |
"high": "[0.6 0.07]",
|
30 |
"bounded_below": "[ True True]",
|
31 |
"bounded_above": "[ True True]",
|
32 |
+
"_np_random": null,
|
33 |
+
"_shape": [
|
34 |
+
2
|
35 |
+
]
|
36 |
},
|
37 |
"action_space": {
|
38 |
":type:": "<class 'gym.spaces.discrete.Discrete'>",
|
39 |
+
":serialized:": "gAWVNQsAAAAAAACME2d5bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpRLA4wFZHR5cGWUjAVudW1weZSMBWR0eXBllJOUjAJpOJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRijApfbnBfcmFuZG9tlIwUbnVtcHkucmFuZG9tLl9waWNrbGWUjBJfX3JhbmRvbXN0YXRlX2N0b3KUk5SMB01UMTk5MzeUhZRSlH2UKIwNYml0X2dlbmVyYXRvcpRoE4wFc3RhdGWUfZQojANrZXmUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWwAkAAAAAAAAAAACAU8KznIcDtZNy7Ktb6Oay8s+2gdrVBu9hoTFNoGu1zNkT5hifdJx5L8ilG4DEeQFJng9D5F3gGJOSE1XM1EopZNIIlb400J5EcnoD8K2/CnObez7pYLEG2nUDRQtufdYWausENGaDt/P1pS9p70JjQ7Vc98J3UsxGRDctCIlu0I6ud/sYtoBPe575TzLsEti5jl6FqRnKrj12LWcrQoCexe7HH/UiAV1LzyQPzBlSZERXmHCdCvUSF7XpWt47xP9BzzqxX7aH3TPYWImqos1/ez/JlLdsD0MfMZl9G2CQq7cHHRlM3sj7jroA9c+pGt4l/iAGpRb80HbjwU71ykPTAVp531BXrc2qmIU6z9Fh4TAPx7fZ1kVF+L1Irlou+4Ckky7Ys59nB7KkciTI+N5jlb62ybZt0+ZWgIA6LKLvdx/mTQtB4k1aplT/C7L9/ybKCFn2quN/7YlIkxoH1U0xdabG6rgOrR+SHMmvUwvtKB+19Ibb07mSgVQyjNAvnyADPJf3pkxylZtn7f/OVpWEaWfl6BcLwy0grrEgUK+H+8P8XWMuBginXgwzn3sy4+ZOlr45op6TtuqX0Knz/SySGDlBIK8JqKObzB6fGt+ovJHEM8KlL4veKwkLkuuMWBaex3FBdWskry5qhslxMgnk2thh8DaXmAfbuI8j0SqHMW1kleITi9ekfXx/eSi5hX1GjA/M62Zixuay1H8zH9VjsTRcGacyJ0vh1hNReDFoNsXFbLfLqaIvbLDQjY7T289ZXsupvAxu2GVTbqWst+ckPPzwH7vLikULC+weAKwxarqm+ugAXgyz774meHOsvQYuu18nvrrunjZWDvwaKuYohEwUfSnpotE9XhX99yUTc8sGPQidTfXkzm/t8MWP8it4l4VSEgDLn8GW8t2DAh8EwFa/KOGoZEGjYqZ2IMA70E+F2LqgaZlQLFMONTIx3yuN5F2e1MT4v2wdBRK9R+lGMpxIiNldyOwwxLDBTRDMhd7APidmDwQBnvaIecKFa95btwHkRBEUT5g++/I0DDg685EX4OMO2YtTPqM3PQluS4puEhAQRVukNGSh4gYDgcBPKZl4ThNf+G+E7El9fmWJcP39Sifw6Mn+GEisM1RhHY05XZHUv5W4r8kD2jSLMY+IIL2+LtQrW7it7y28+sEicLoEfYOky9ZJF6l0fR+sXEawf+REH9LvtRJ4yzfxr7KisNpr1axv1ae5CDXS+XTzuOG/BJnHvt8arnY1XWH9SdkCOeok6MI8GBCtjTCxJ5JbpI5J0i0A66mJaRW9LMfP6Cil3/cVRQ9uN2KTtV3o7rJwY4XCnj7DJmqrUwofDDl7Ek0PoN7w0Hh8YHOy8qhPw7V8ALdjZn7eYtjCQIldQvHbM1I73RtCLQvQGFMXUCJ022pGRqTvZX5XWSizqbgX6TJmI6LDF9wcpYealB7cDwelfqdpzHRmyjRbIX9b+w4uj//aDRgP2SgiOAq/D/9/0SbgK/E0FQyclhNVAkbKwXhAxKGczpvJow0mFFUAt/5fT5KAsmQTAt8p0FsrGMDTfk4RzZgqZSm+ihVRS371Tx3twpGA1goo/AIfJh8slJC3hkR1OGCN7LAPGCwbM9rHlKSU4uuhJiff196h9q1kPMld6989MfKLVkvCl7ofCRurPUW46ceJKE951sQD1v8cK0HK1JmuBTCXAelCUCIFNLGk3tMXNVmuuFF3o3xb4V4T1IAYIfBdyEVHhIIZOE/JEY79daQw8njYEtQ6YwZ6kNCBYfrjq2OglITcRdwDmINL42ro6HnbWgLZQ8Ce/EiPVBtWHwhvGUHK1FNONzRzXgT1zKEg+WAigeuK4QVIxdITM4YvUyYvpQJuJd+xGD1no7BYIKXdV4aDlsRnWSMmS+zTyTvC0+TgBMCNpMvdChjaB/XTrMVsm0vgPmCYswn067MTYWfm5oCqqmNciqoRfFL2O2mxFT1VMcKDrxHBdBUhSG5UmAerx86KAEytbsCbn6OOj8Y02VwVynzXd0WJfLioeGMZISM1eneWfTc1mQ6CpdDxJqUmU86/KsBL3Bb0S2NAqFysFJZKxDwLej8xz+xH8IxEHzlkiiNH+2IIq0663FAwi6wg6dgcryDqQ+lNDwn898nylrcYShigDrtrFBNezKx3ZjpkPCnPUeQB4hJUrYCUJy5CyytC/x1UsByKez/aSNEWnlWnzYdJf2PoKL0YfmaR3KpXzi9ax3BHPgk1cdmgdVkqevFJ0DUdTBFQj/mhaKqcaT0rKJLgy/11AhWW4nX7+kAdgR0b1iAseI0TbMDtohBuqqUZfqMfUKsdI8v2aeUd0+IqOjPBFe7TZRC7OUYmf789SRTpw9gst4tzx7tLap8JnFt2keKhqd3vBgqpvlsxvx0DcPC+bo/qIldKiAn5D7TPjeWLzJ1gmpk1mVKOyWOv/ZzlRTfe8yEsMsRcgdPxbOuxLjlOwo1uFh9NjHoOz/xbnI62I49ZzT59GUCNtAL74UqjlRoyXZ5ELEjhTn+F5fYfEkY2TnSsgKO4Wwb/xD41S4mBL7LcUyF76ybV7Yx0L6V2QGoSfyhHFqMQJs/haLPPW18mWJb/UDl90ZN9TEzcdXvZsmCeqzCagC6YDHp3fop+5nAQSnT/Byt2j7z+6cnl/aZh6oKs5xrEMmuzpLFbXNVof9hNmX5E0DQ2M8uBqqeW95p6z8ySnOxURAO28oYWsbVyeYaNlWLZrOtIMZDRjjbecSSwMLlrBhw4mZVht4DgOQxI1+P7sPHZLMf89U+5ctf1rD0r1AXgyXjzOxKvCxWMhrz6Ah19+zal/bAIpw+0V7Pq85PRQO4UeScmMwODR8jcOfILuMmo7xXhemY/JqtOncklEaGapMeGlkiefvQkx9L5EWvLn6stI4zRP4pZXx9iOz17IKJmKOVHgCIAOiheb0bwkjNkItlfYO3LzeLLPuBDNLFg7tQu5NPWy28a4nBsE/gsyEteRvF2ECYFIOJg06dzc77IWw7o+z1Q5APxLg9uvyFniYWNuJyk7rflLCmYcg1gN657CWff8YfPr0ukKOamco94X1nFdyroxHiQlRXaP91DOqMueI1pCasyRQt0jtbWwxdEVyzP3GzUZXBWqa0xXCzwe29cxg2aiwKuuAAVfaCE/Pt1cJXq8wvliF81sMDPMbowd9+uyWuExq/e+2W3wWeV3hVofoiEySjBrJPWVJW9++UocJbC0ppNw5mtHktkZqUk6kVtUgVQ4Cj4udj/bluZzcqWjIvOCJO52M+xcQY808Ei8T/lwwS9TguuzQ3e0KR7hptgNcX1/XhCvAuUaAmMAnU0lImIh5RSlChLA2gNTk5OSv////9K/////0sAdJRiTXAChZSMAUOUdJRSlIwDcG9zlE1wAnWMCWhhc19nYXVzc5RLAIwFZ2F1c3OURwAAAAAAAAAAdWKMBl9zaGFwZZQpdWIu",
|
40 |
"n": 3,
|
|
|
41 |
"dtype": "int64",
|
42 |
+
"_np_random": "RandomState(MT19937)",
|
43 |
+
"_shape": []
|
44 |
},
|
45 |
"n_envs": 1,
|
46 |
"num_timesteps": 120000,
|
|
|
48 |
"_num_timesteps_at_start": 0,
|
49 |
"seed": 0,
|
50 |
"action_noise": null,
|
51 |
+
"start_time": 1614621403.8870316,
|
52 |
"learning_rate": {
|
53 |
":type:": "<class 'function'>",
|
54 |
+
":serialized:": "gAWV2QIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwNX2J1aWx0aW5fdHlwZZSTlIwKTGFtYmRhVHlwZZSFlFKUKGgCjAhDb2RlVHlwZZSFlFKUKEsBSwBLAUsBSxNDBIgAUwCUToWUKYwBX5SFlIxVL3ZvbHVtZS9VU0VSU1RPUkUvcmFmZl9hbi9wcm9qZWN0cy90b3JjaHktYmFzZWxpbmVzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS3xDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMVS92b2x1bWUvVVNFUlNUT1JFL3JhZmZfYW4vcHJvamVjdHMvdG9yY2h5LWJhc2VsaW5lcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUdU5OaACMEF9tYWtlX2VtcHR5X2NlbGyUk5QpUpSFlHSUUpSMHGNsb3VkcGlja2xlLmNsb3VkcGlja2xlX2Zhc3SUjBJfZnVuY3Rpb25fc2V0c3RhdGWUk5RoIH2UfZQoaBdoDowMX19xdWFsbmFtZV9flIwZY29uc3RhbnRfZm4uPGxvY2Fscz4uZnVuY5SMD19fYW5ub3RhdGlvbnNfX5R9lIwOX19rd2RlZmF1bHRzX1+UTowMX19kZWZhdWx0c19flE6MCl9fbW9kdWxlX1+UaBiMB19fZG9jX1+UTowLX19jbG9zdXJlX1+UaACMCl9tYWtlX2NlbGyUk5RHP3BiTdLxqfyFlFKUhZSMF19jbG91ZHBpY2tsZV9zdWJtb2R1bGVzlF2UjAtfX2dsb2JhbHNfX5R9lHWGlIZSMC4="
|
55 |
},
|
56 |
"tensorboard_log": null,
|
57 |
"lr_schedule": {
|
58 |
":type:": "<class 'function'>",
|
59 |
+
":serialized:": "gAWV2QIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwNX2J1aWx0aW5fdHlwZZSTlIwKTGFtYmRhVHlwZZSFlFKUKGgCjAhDb2RlVHlwZZSFlFKUKEsBSwBLAUsBSxNDBIgAUwCUToWUKYwBX5SFlIxVL3ZvbHVtZS9VU0VSU1RPUkUvcmFmZl9hbi9wcm9qZWN0cy90b3JjaHktYmFzZWxpbmVzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS3xDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMVS92b2x1bWUvVVNFUlNUT1JFL3JhZmZfYW4vcHJvamVjdHMvdG9yY2h5LWJhc2VsaW5lcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUdU5OaACMEF9tYWtlX2VtcHR5X2NlbGyUk5QpUpSFlHSUUpSMHGNsb3VkcGlja2xlLmNsb3VkcGlja2xlX2Zhc3SUjBJfZnVuY3Rpb25fc2V0c3RhdGWUk5RoIH2UfZQoaBdoDowMX19xdWFsbmFtZV9flIwZY29uc3RhbnRfZm4uPGxvY2Fscz4uZnVuY5SMD19fYW5ub3RhdGlvbnNfX5R9lIwOX19rd2RlZmF1bHRzX1+UTowMX19kZWZhdWx0c19flE6MCl9fbW9kdWxlX1+UaBiMB19fZG9jX1+UTowLX19jbG9zdXJlX1+UaACMCl9tYWtlX2NlbGyUk5RHP3BiTdLxqfyFlFKUhZSMF19jbG91ZHBpY2tsZV9zdWJtb2R1bGVzlF2UjAtfX2dsb2JhbHNfX5R9lHWGlIZSMC4="
|
60 |
},
|
61 |
"_last_obs": null,
|
62 |
+
"_last_episode_starts": null,
|
|
|
|
|
|
|
63 |
"_last_original_obs": {
|
64 |
":type:": "<class 'numpy.ndarray'>",
|
65 |
+
":serialized:": "gAWVfQAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYIAAAAAAAAAIjVkT5RJ348lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksBSwKGlIwBQ5R0lFKULg=="
|
66 |
},
|
67 |
+
"_episode_num": 766,
|
68 |
"use_sde": false,
|
69 |
"sde_sample_freq": -1,
|
70 |
"_current_progress_remaining": 0.0,
|
71 |
"ep_info_buffer": {
|
72 |
":type:": "<class 'collections.deque'>",
|
73 |
+
":serialized:": "gAWV4AsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHwFZAAAAAAACMAWyUS1mMAXSUR0ByYN5LRKHxdX2UKGgGR8BcQAAAAAAAaAdLcWgIR0ByZc7FKkEcdX2UKGgGR8BhwAAAAAAAaAdLjmgIR0BybDKQq7ROdX2UKGgGR8BeQAAAAAAAaAdLeWgIR0BycckHD766dX2UKGgGR8BeAAAAAAAAaAdLeGgIR0BydstDlYEGdX2UKGgGR8BcwAAAAAAAaAdLc2gIR0Bye8qc3EQ5dX2UKGgGR8BcwAAAAAAAaAdLc2gIR0BygU7kn1FpdX2UKGgGR8BdAAAAAAAAaAdLdGgIR0ByhkKrq+rVdX2UKGgGR8BfwAAAAAAAaAdLf2gIR0Byi97HAAQydX2UKGgGR8BXwAAAAAAAaAdLX2gIR0BykBs2vStvdX2UKGgGR8BggAAAAAAAaAdLhGgIR0BylcGY8dPtdX2UKGgGR8BegAAAAAAAaAdLemgIR0Bym07p3X7MdX2UKGgGR8BcwAAAAAAAaAdLc2gIR0ByoEQ8OkLydX2UKGgGR8BbAAAAAAAAaAdLbGgIR0BypJMIu5BkdX2UKGgGR8BewAAAAAAAaAdLe2gIR0Byskpobn5jdX2UKGgGR8BbAAAAAAAAaAdLbGgIR0BytyzRhMJydX2UKGgGR8BbwAAAAAAAaAdLb2gIR0ByvBSaVlf7dX2UKGgGR8BdwAAAAAAAaAdLd2gIR0BywQu14Pf9dX2UKGgGR8BgAAAAAAAAaAdLgGgIR0ByxqrvLHMmdX2UKGgGR8BcAAAAAAAAaAdLcGgIR0Byy5gqmTC+dX2UKGgGR8BXAAAAAAAAaAdLXGgIR0Byz8o/iYLLdX2UKGgGR8BbgAAAAAAAaAdLbmgIR0By1LRCx/utdX2UKGgGR8BdgAAAAAAAaAdLdmgIR0By2bYWcjJNdX2UKGgGR8BVgAAAAAAAaAdLVmgIR0By3eRW912adX2UKGgGR8BbgAAAAAAAaAdLbmgIR0By4u9US7GvdX2UKGgGR8BawAAAAAAAaAdLa2gIR0By50rNGEwndX2UKGgGR8Bg4AAAAAAAaAdLh2gIR0By7Z65Xlr/dX2UKGgGR8BdAAAAAAAAaAdLdGgIR0By8phy8zyjdX2UKGgGR8BfQAAAAAAAaAdLfWgIR0By+ENYr8R+dX2UKGgGR8BgwAAAAAAAaAdLhmgIR0By/fscABDHdX2UKGgGR8BdAAAAAAAAaAdLdGgIR0BzAvNW2gFpdX2UKGgGR8BbwAAAAAAAaAdLb2gIR0BzB+PJaJQ+dX2UKGgGR8BdQAAAAAAAaAdLdWgIR0BzDXasZHd5dX2UKGgGR8BbgAAAAAAAaAdLbmgIR0BzEdXxOLzgdX2UKGgGR8BbAAAAAAAAaAdLbGgIR0BzFsJfICEIdX2UKGgGR8BcgAAAAAAAaAdLcmgIR0BzG707KaG6dX2UKGgGR8BaAAAAAAAAaAdLaGgIR0BzIKYUnG83dX2UKGgGR8BcAAAAAAAAaAdLcGgIR0BzJZllK9PDdX2UKGgGR8BbQAAAAAAAaAdLbWgIR0BzKpZFG5MDdX2UKGgGR8BbAAAAAAAAaAdLbGgIR0BzLvYzzmOmdX2UKGgGR8BcAAAAAAAAaAdLcGgIR0BzM/9Hc1wYdX2UKGgGR8BbQAAAAAAAaAdLbWgIR0BzOPB7/n4gdX2UKGgGR8BcgAAAAAAAaAdLcmgIR0BzPfXe3x4IdX2UKGgGR8BcQAAAAAAAaAdLcWgIR0BzQvHZK3/hdX2UKGgGR8BewAAAAAAAaAdLe2gIR0BzSJ0vGp++dX2UKGgGR8BeAAAAAAAAaAdLeGgIR0BzTj4IrvsrdX2UKGgGR8BbwAAAAAAAaAdLb2gIR0BzU0aAFxGUdX2UKGgGR8BbgAAAAAAAaAdLbmgIR0BzV7BqKxcFdX2UKGgGR8BbwAAAAAAAaAdLb2gIR0BzXLdpItlJdX2UKGgGR8BcgAAAAAAAaAdLcmgIR0BzYcrBj4HpdX2UKGgGR8BWgAAAAAAAaAdLWmgIR0BzZgTTOPeYdX2UKGgGR8BbwAAAAAAAaAdLb2gIR0BzawGxD9fkdX2UKGgGR8BdAAAAAAAAaAdLdGgIR0BzcApRXOnmdX2UKGgGR8BbwAAAAAAAaAdLb2gIR0BzdQVclgMMdX2UKGgGR8BZgAAAAAAAaAdLZmgIR0BzefR1HOKPdX2UKGgGR8BfAAAAAAAAaAdLfGgIR0BzfxWsA/9pdX2UKGgGR8BewAAAAAAAaAdLe2gIR0BzhMsYl6Z6dX2UKGgGR8BcAAAAAAAAaAdLcGgIR0Bzicc3l0YCdX2UKGgGR8BdAAAAAAAAaAdLdGgIR0Bzjs2cawUydX2UKGgGR8BbQAAAAAAAaAdLbWgIR0Bzk8eLehwmdX2UKGgGR8BawAAAAAAAaAdLa2gIR0BzmMUlAu7IdX2UKGgGR8BbgAAAAAAAaAdLbmgIR0BznbxPO6d2dX2UKGgGR8BcgAAAAAAAaAdLcmgIR0Bzor5HmRvFdX2UKGgGR8BawAAAAAAAaAdLa2gIR0Bzpx6hQFcIdX2UKGgGR8BawAAAAAAAaAdLa2gIR0BzrBahYeT3dX2UKGgGR8BbAAAAAAAAaAdLbGgIR0BzsRRAKOT8dX2UKGgGR8BbAAAAAAAAaAdLbGgIR0BzthZEDyOJdX2UKGgGR8BaAAAAAAAAaAdLaGgIR0BzunuNPxhEdX2UKGgGR8BbQAAAAAAAaAdLbWgIR0Bzv4NG3F1kdX2UKGgGR8BcwAAAAAAAaAdLc2gIR0BzxJqpLmITdX2UKGgGR8BbgAAAAAAAaAdLbmgIR0BzyasU7CBPdX2UKGgGR8BcwAAAAAAAaAdLc2gIR0Bzzsg2ZRbbdX2UKGgGR8BYAAAAAAAAaAdLYGgIR0Bz0zMPjGT+dX2UKGgGR8BbAAAAAAAAaAdLbGgIR0Bz2DGPxQSBdX2UKGgGR8BbAAAAAAAAaAdLbGgIR0Bz3Tvc8DB/dX2UKGgGR8BbQAAAAAAAaAdLbWgIR0Bz4joePq9odX2UKGgGR8BdQAAAAAAAaAdLdWgIR0Bz51AC4jKQdX2UKGgGR8BdgAAAAAAAaAdLdmgIR0Bz7GKJl8PXdX2UKGgGR8BdQAAAAAAAaAdLdWgIR0Bz8gcebNKRdX2UKGgGR8BdAAAAAAAAaAdLdGgIR0Bz9xuvUz9CdX2UKGgGR8BdQAAAAAAAaAdLdWgIR0Bz/C7Xg9/0dX2UKGgGR8BcAAAAAAAAaAdLcGgIR0B0AT4dp7C0dX2UKGgGR8BdwAAAAAAAaAdLd2gIR0B0BvAVO9FndX2UKGgGR8BgQAAAAAAAaAdLgmgIR0B0DLgHeJpGdX2UKGgGR8BbgAAAAAAAaAdLbmgIR0B0EcKw6hg3dX2UKGgGR8BpAAAAAAAAaAdLyGgIR0B0Gn+0gKWtdX2UKGgGR8BhYAAAAAAAaAdLi2gIR0B0IQt/WlMzdX2UKGgGR8BdAAAAAAAAaAdLdGgIR0B0Jh4Z/CqIdX2UKGgGR8BbgAAAAAAAaAdLbmgIR0B0KyKdhAnldX2UKGgGR8BfwAAAAAAAaAdLf2gIR0B0MPX18LKFdX2UKGgGR8BdgAAAAAAAaAdLdmgIR0B0NhgOSW7fdX2UKGgGR8BWQAAAAAAAaAdLWWgIR0B0OnBGhEjPdX2UKGgGR8BfAAAAAAAAaAdLfGgIR0B0P56NVBD5dX2UKGgGR8BXgAAAAAAAaAdLXmgIR0B0RACNjslcdX2UKGgGR8BlwAAAAAAAaAdLrmgIR0B0TAVWS2YwdX2UKGgGR8BcwAAAAAAAaAdLc2gIR0B0USRhc7hfdX2UKGgGR8BdAAAAAAAAaAdLdGgIR0B0VuSq2jO+dX2UKGgGR8BbwAAAAAAAaAdLb2gIR0B0W/6FdszmdX2UKGgGR8BnoAAAAAAAaAdLvWgIR0B0ZB83Mpw0dX2UKGgGR8BaQAAAAAAAaAdLaWgIR0B0aS8h9srNdWUu"
|
74 |
},
|
75 |
"ep_success_buffer": {
|
76 |
":type:": "<class 'collections.deque'>",
|
|
|
89 |
":serialized:": "gAWVNQAAAAAAAACMIHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5idWZmZXJzlIwMUmVwbGF5QnVmZmVylJOULg==",
|
90 |
"__module__": "stable_baselines3.common.buffers",
|
91 |
"__doc__": "\n Replay buffer used in off-policy algorithms like SAC/TD3.\n\n :param buffer_size: Max number of element in the buffer\n :param observation_space: Observation space\n :param action_space: Action space\n :param device:\n :param n_envs: Number of parallel environments\n :param optimize_memory_usage: Enable a memory efficient variant\n of the replay buffer which reduces by almost a factor two the memory used,\n at a cost of more complexity.\n See https://github.com/DLR-RM/stable-baselines3/issues/37#issuecomment-637501195\n and https://github.com/DLR-RM/stable-baselines3/pull/28#issuecomment-637559274\n :param handle_timeout_termination: Handle timeout termination (due to timelimit)\n separately and treat the task as infinite horizon task.\n https://github.com/DLR-RM/stable-baselines3/issues/284\n ",
|
92 |
+
"__init__": "<function ReplayBuffer.__init__ at 0x7fd0679f2ef0>",
|
93 |
+
"add": "<function ReplayBuffer.add at 0x7fd0679f2f80>",
|
94 |
+
"sample": "<function ReplayBuffer.sample at 0x7fd0679e7680>",
|
95 |
+
"_get_samples": "<function ReplayBuffer._get_samples at 0x7fd0679e7710>",
|
96 |
"__abstractmethods__": "frozenset()",
|
97 |
+
"_abc_impl": "<_abc_data object at 0x7fd067a52480>"
|
98 |
},
|
99 |
"replay_buffer_kwargs": {},
|
100 |
"train_freq": {
|
|
|
107 |
"exploration_final_eps": 0.07,
|
108 |
"exploration_fraction": 0.2,
|
109 |
"target_update_interval": 600,
|
110 |
+
"_n_calls": 0,
|
111 |
"max_grad_norm": 10,
|
112 |
"exploration_rate": 0.07,
|
113 |
"exploration_schedule": {
|
114 |
":type:": "<class 'function'>",
|
115 |
":serialized:": "gAWVdQMAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwNX2J1aWx0aW5fdHlwZZSTlIwKTGFtYmRhVHlwZZSFlFKUKGgCjAhDb2RlVHlwZZSFlFKUKEsBSwBLAUsESxNDLGQBfAAYAIgBawRyEIgAUwCIAmQBfAAYAIgAiAIYABQAiAEbABcAUwBkAFMAlE5LAYaUKYwScHJvZ3Jlc3NfcmVtYWluaW5nlIWUjFEvaG9tZS9hbnRvbmluL0RvY3VtZW50cy9kbHIvcmwvdG9yY2h5LWJhc2VsaW5lcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEtuQwYAAQwBBAKUjANlbmSUjAxlbmRfZnJhY3Rpb26UjAVzdGFydJSHlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjFEvaG9tZS9hbnRvbmluL0RvY3VtZW50cy9kbHIvcmwvdG9yY2h5LWJhc2VsaW5lcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUdU5OaACMEF9tYWtlX2VtcHR5X2NlbGyUk5QpUpRoHilSlGgeKVKUh5R0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaCR9lH2UKGgZaA6MDF9fcXVhbG5hbWVfX5SMG2dldF9saW5lYXJfZm4uPGxvY2Fscz4uZnVuY5SMD19fYW5ub3RhdGlvbnNfX5R9lChoC4wIYnVpbHRpbnOUjAVmbG9hdJSTlIwGcmV0dXJulGgwdYwOX19rd2RlZmF1bHRzX1+UTowMX19kZWZhdWx0c19flE6MCl9fbW9kdWxlX1+UaBqMB19fZG9jX1+UTowLX19jbG9zdXJlX1+UaACMCl9tYWtlX2NlbGyUk5RHP7HrhR64UeyFlFKUaDhHP8mZmZmZmZqFlFKUaDhHP/AAAAAAAACFlFKUh5SMF19jbG91ZHBpY2tsZV9zdWJtb2R1bGVzlF2UjAtfX2dsb2JhbHNfX5R9lHWGlIZSMC4="
|
116 |
+
},
|
117 |
+
"_last_dones": {
|
118 |
+
":type:": "<class 'numpy.ndarray'>",
|
119 |
+
":serialized:": "gAWVdAAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYBAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwGFlIwBQ5R0lFKULg=="
|
120 |
+
},
|
121 |
+
"remove_time_limit_termination": false
|
122 |
}
|
dqn-MountainCar-v0/policy.optimizer.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 541889
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dafd4004bf3cd8d96bd4d1ba0c1fd9be807b61c68627f9b440ec2403c511064e
|
3 |
size 541889
|
dqn-MountainCar-v0/policy.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 542593
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:91f1074cea63d377fd5834feadeca1cc302cc81d783f82beced3b71a05440fa7
|
3 |
size 542593
|
replay.mp4
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:23049abdde89a57aba759c814174af4edc46992f8b2457f04f100407ed931bc7
|
3 |
+
size 260496
|
results.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"mean_reward": -
|
|
|
1 |
+
{"mean_reward": -103.4, "std_reward": 7.4859869088851605, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2022-05-20T09:54:22.907164"}
|
train_eval_metrics.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:366fc58740e75244e05d872c2610328a0431fd5e6836d86347fad61d189e6e80
|
3 |
+
size 20333
|