wilt8 commited on
Commit
68e922c
·
verified ·
1 Parent(s): 04bfc8d

Push agent to the Hub

Browse files
README.md CHANGED
@@ -17,7 +17,7 @@ model-index:
17
  type: LunarLander-v2
18
  metrics:
19
  - type: mean_reward
20
- value: -89.89 +/- 34.88
21
  name: mean_reward
22
  verified: false
23
  ---
@@ -37,8 +37,8 @@ model-index:
37
  'wandb_entity': None
38
  'capture_video': True
39
  'env_id': 'LunarLander-v2'
40
- 'total_timesteps': 500000
41
- 'learning_rate': 0.001
42
  'num_envs': 4
43
  'num_steps': 128
44
  'anneal_lr': True
@@ -53,7 +53,7 @@ model-index:
53
  'ent_coef': 0.01
54
  'vf_coef': 0.5
55
  'max_grad_norm': 0.5
56
- 'target_kl': None
57
  'repo_id': 'wilt8/ppo-CleanRL-LunarLander-v2'
58
  'batch_size': 512
59
  'minibatch_size': 128}
 
17
  type: LunarLander-v2
18
  metrics:
19
  - type: mean_reward
20
+ value: -126.16 +/- 65.77
21
  name: mean_reward
22
  verified: false
23
  ---
 
37
  'wandb_entity': None
38
  'capture_video': True
39
  'env_id': 'LunarLander-v2'
40
+ 'total_timesteps': 50000
41
+ 'learning_rate': 0.00025
42
  'num_envs': 4
43
  'num_steps': 128
44
  'anneal_lr': True
 
53
  'ent_coef': 0.01
54
  'vf_coef': 0.5
55
  'max_grad_norm': 0.5
56
+ 'target_kl': 0.015
57
  'repo_id': 'wilt8/ppo-CleanRL-LunarLander-v2'
58
  'batch_size': 512
59
  'minibatch_size': 128}
logs/events.out.tfevents.1725425946.coder-william-william-gpu-dev-01-5fd55f9c84-plnfx.1521941.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a7073f39a93f0e5330217f11df44285bbfe62de78f71c6bb7fc67d49d481a4e4
3
+ size 109726
model.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:318677b220e901295b2daa123bcfbdbad8592737edace0eace892497ebef1581
3
  size 43026
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a0bee8379c6900506726ea5a3990eb0003996db65b2f1015b138a8f71be43f52
3
  size 43026
replay.mp4 CHANGED
Binary files a/replay.mp4 and b/replay.mp4 differ
 
results.json CHANGED
@@ -1 +1 @@
1
- {"env_id": "LunarLander-v2", "mean_reward": -89.88970909595137, "std_reward": 34.881950781912344, "n_evaluation_episodes": 10, "eval_datetime": "2024-09-04T12:58:06.316835"}
 
1
+ {"env_id": "LunarLander-v2", "mean_reward": -126.16086227227284, "std_reward": 65.77039070781815, "n_evaluation_episodes": 10, "eval_datetime": "2024-09-04T12:59:35.846009"}