Add HalfCheetah-v4_PPO

#2
by gsc579 - opened
Files changed (24) hide show
  1. HalfCheetah-v4/Test_single_HalfCheetah-v4_PPO_20230528-052036/config.yaml +73 -0
  2. HalfCheetah-v4/Test_single_HalfCheetah-v4_PPO_20230528-052036/logs/log.txt +167 -0
  3. HalfCheetah-v4/Test_single_HalfCheetah-v4_PPO_20230528-052036/tb_logs/interact/events.out.tfevents.1685251236.gscaimax.2601283.0 +3 -0
  4. HalfCheetah-v4/Test_single_HalfCheetah-v4_PPO_20230528-052036/tb_logs/model/events.out.tfevents.1685251236.gscaimax.2601283.1 +3 -0
  5. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/config.yaml +73 -0
  6. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/logs/log.txt +0 -0
  7. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/1000 +3 -0
  8. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/1500 +3 -0
  9. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/2000 +3 -0
  10. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/2500 +3 -0
  11. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/3000 +3 -0
  12. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/3500 +3 -0
  13. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/4000 +3 -0
  14. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/4500 +3 -0
  15. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/500 +3 -0
  16. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/5000 +3 -0
  17. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/5500 +3 -0
  18. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/6000 +3 -0
  19. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/6500 +3 -0
  20. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/7000 +3 -0
  21. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/7500 +3 -0
  22. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/best +0 -0
  23. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/tb_logs/interact/events.out.tfevents.1685108058.gscaimax.2061337.0 +3 -0
  24. HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/tb_logs/model/events.out.tfevents.1685108058.gscaimax.2061337.1 +3 -0
HalfCheetah-v4/Test_single_HalfCheetah-v4_PPO_20230528-052036/config.yaml ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ general_cfg:
2
+ algo_name: PPO
3
+ collect_traj: false
4
+ device: cuda
5
+ env_name: gym
6
+ load_checkpoint: true
7
+ load_model_step: best
8
+ load_path: Train_single_HalfCheetah-v4_PPO_20230526-133418
9
+ max_episode: 100
10
+ max_step: 1000
11
+ mode: test
12
+ model_save_fre: 500
13
+ mp_backend: single
14
+ n_learners: 1
15
+ n_workers: 2
16
+ online_eval: true
17
+ online_eval_episode: 10
18
+ seed: 1
19
+ share_buffer: true
20
+ algo_cfg:
21
+ actor_hidden_dim: 256
22
+ actor_layers:
23
+ - activation: relu
24
+ layer_size:
25
+ - 256
26
+ layer_type: linear
27
+ - activation: relu
28
+ layer_size:
29
+ - 256
30
+ layer_type: linear
31
+ actor_lr: 0.0003
32
+ batch_size: 256
33
+ buffer_size: 100000
34
+ buffer_type: ONPOLICY_QUE
35
+ continuous: true
36
+ critic_hidden_dim: 256
37
+ critic_layers:
38
+ - activation: relu
39
+ layer_size:
40
+ - 256
41
+ layer_type: linear
42
+ - activation: relu
43
+ layer_size:
44
+ - 256
45
+ layer_type: linear
46
+ critic_loss_coef: 0.5
47
+ critic_lr: 0.001
48
+ entropy_coef: 0.01
49
+ eps_clip: 0.2
50
+ epsilon_decay: 500
51
+ epsilon_end: 0.01
52
+ epsilon_start: 0.95
53
+ gamma: 0.95
54
+ independ_actor: true
55
+ k_epochs: 8
56
+ kl_alpha: 2
57
+ kl_beta: 1.5
58
+ kl_lambda: 0.5
59
+ kl_target: 0.1
60
+ lr: 0.0001
61
+ min_policy: 0
62
+ ppo_type: clip
63
+ sgd_batch_size: 128
64
+ share_optimizer: false
65
+ target_update: 4
66
+ env_cfg:
67
+ id: HalfCheetah-v4
68
+ ignore_params:
69
+ - wrapper
70
+ - ignore_params
71
+ new_step_api: true
72
+ render_mode: null
73
+ wrapper: null
HalfCheetah-v4/Test_single_HalfCheetah-v4_PPO_20230528-052036/logs/log.txt ADDED
@@ -0,0 +1,167 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - General Configs:
2
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - ================================================================================
3
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - Name Value Type
4
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - env_name gym <class 'str'>
5
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - algo_name PPO <class 'str'>
6
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - mode test <class 'str'>
7
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - device cuda <class 'str'>
8
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - seed 1 <class 'int'>
9
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - max_episode 100 <class 'int'>
10
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - max_step 1000 <class 'int'>
11
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - collect_traj 0 <class 'bool'>
12
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - mp_backend single <class 'str'>
13
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - n_workers 2 <class 'int'>
14
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - n_learners 1 <class 'int'>
15
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - share_buffer 1 <class 'bool'>
16
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - online_eval 1 <class 'bool'>
17
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - online_eval_episode 10 <class 'int'>
18
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - model_save_fre 500 <class 'int'>
19
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - load_checkpoint 1 <class 'bool'>
20
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - load_path Train_single_HalfCheetah-v4_PPO_20230526-133418 <class 'str'>
21
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - load_model_step best <class 'str'>
22
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - ================================================================================
23
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - Algo Configs:
24
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - ================================================================================
25
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - Name Value Type
26
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - independ_actor 1 <class 'bool'>
27
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - share_optimizer 0 <class 'bool'>
28
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - ppo_type clip <class 'str'>
29
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - eps_clip 0.2 <class 'float'>
30
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - kl_target 0.1 <class 'float'>
31
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - kl_lambda 0.5 <class 'float'>
32
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - kl_beta 1.5 <class 'float'>
33
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - kl_alpha 2 <class 'int'>
34
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - continuous 1 <class 'bool'>
35
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - gamma 0.95 <class 'float'>
36
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - k_epochs 8 <class 'int'>
37
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - lr 0.0001 <class 'float'>
38
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - actor_lr 0.0003 <class 'float'>
39
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - critic_lr 0.001 <class 'float'>
40
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - critic_loss_coef 0.5 <class 'float'>
41
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - entropy_coef 0.01 <class 'float'>
42
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - batch_size 256 <class 'int'>
43
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - sgd_batch_size 128 <class 'int'>
44
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - actor_hidden_dim 256 <class 'int'>
45
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - critic_hidden_dim 256 <class 'int'>
46
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - min_policy 0 <class 'int'>
47
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - actor_layers [{'layer_type': 'linear', 'layer_size': [256], 'activation': 'relu'}, {'layer_type': 'linear', 'layer_size': [256], 'activation': 'relu'}] <class 'str'>
48
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - critic_layers [{'layer_type': 'linear', 'layer_size': [256], 'activation': 'relu'}, {'layer_type': 'linear', 'layer_size': [256], 'activation': 'relu'}] <class 'str'>
49
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - buffer_type ONPOLICY_QUE <class 'str'>
50
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - buffer_size 100000 <class 'int'>
51
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - epsilon_decay 500 <class 'int'>
52
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - epsilon_end 0.01 <class 'float'>
53
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - epsilon_start 0.95 <class 'float'>
54
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - target_update 4 <class 'int'>
55
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - ================================================================================
56
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - Env Configs:
57
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - ================================================================================
58
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - Name Value Type
59
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - id HalfCheetah-v4 <class 'str'>
60
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - render_mode None <class 'str'>
61
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - wrapper None <class 'str'>
62
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - ignore_params ['wrapper', 'ignore_params'] <class 'str'>
63
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - new_step_api 1 <class 'bool'>
64
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - ================================================================================
65
+ 2023-05-28 05:20:36 - SimpleLog - INFO: - obs_space: Box(-inf, inf, (17,), float64), n_actions: Box(-1.0, 1.0, (6,), float32)
66
+ 2023-05-28 05:20:38 - SimpleLog - INFO: - Start testing!
67
+ 2023-05-28 05:20:40 - SimpleLog - INFO: - episode: 0, ep_reward: 900.9887594821912, ep_step: 1000
68
+ 2023-05-28 05:20:40 - SimpleLog - INFO: - episode: 1, ep_reward: 900.9887594821912, ep_step: 1000
69
+ 2023-05-28 05:20:41 - SimpleLog - INFO: - episode: 2, ep_reward: 900.9887594821912, ep_step: 1000
70
+ 2023-05-28 05:20:41 - SimpleLog - INFO: - episode: 3, ep_reward: 900.9887594821912, ep_step: 1000
71
+ 2023-05-28 05:20:42 - SimpleLog - INFO: - episode: 4, ep_reward: 900.9887594821912, ep_step: 1000
72
+ 2023-05-28 05:20:42 - SimpleLog - INFO: - episode: 5, ep_reward: 900.9887594821912, ep_step: 1000
73
+ 2023-05-28 05:20:42 - SimpleLog - INFO: - episode: 6, ep_reward: 900.9887594821912, ep_step: 1000
74
+ 2023-05-28 05:20:43 - SimpleLog - INFO: - episode: 7, ep_reward: 900.9887594821912, ep_step: 1000
75
+ 2023-05-28 05:20:43 - SimpleLog - INFO: - episode: 8, ep_reward: 900.9887594821912, ep_step: 1000
76
+ 2023-05-28 05:20:43 - SimpleLog - INFO: - episode: 9, ep_reward: 900.9887594821912, ep_step: 1000
77
+ 2023-05-28 05:20:44 - SimpleLog - INFO: - episode: 10, ep_reward: 900.9887594821912, ep_step: 1000
78
+ 2023-05-28 05:20:44 - SimpleLog - INFO: - episode: 11, ep_reward: 900.9887594821912, ep_step: 1000
79
+ 2023-05-28 05:20:45 - SimpleLog - INFO: - episode: 12, ep_reward: 900.9887594821912, ep_step: 1000
80
+ 2023-05-28 05:20:45 - SimpleLog - INFO: - episode: 13, ep_reward: 900.9887594821912, ep_step: 1000
81
+ 2023-05-28 05:20:45 - SimpleLog - INFO: - episode: 14, ep_reward: 900.9887594821912, ep_step: 1000
82
+ 2023-05-28 05:20:46 - SimpleLog - INFO: - episode: 15, ep_reward: 900.9887594821912, ep_step: 1000
83
+ 2023-05-28 05:20:46 - SimpleLog - INFO: - episode: 16, ep_reward: 900.9887594821912, ep_step: 1000
84
+ 2023-05-28 05:20:47 - SimpleLog - INFO: - episode: 17, ep_reward: 900.9887594821912, ep_step: 1000
85
+ 2023-05-28 05:20:47 - SimpleLog - INFO: - episode: 18, ep_reward: 900.9887594821912, ep_step: 1000
86
+ 2023-05-28 05:20:47 - SimpleLog - INFO: - episode: 19, ep_reward: 900.9887594821912, ep_step: 1000
87
+ 2023-05-28 05:20:48 - SimpleLog - INFO: - episode: 20, ep_reward: 900.9887594821912, ep_step: 1000
88
+ 2023-05-28 05:20:48 - SimpleLog - INFO: - episode: 21, ep_reward: 900.9887594821912, ep_step: 1000
89
+ 2023-05-28 05:20:48 - SimpleLog - INFO: - episode: 22, ep_reward: 900.9887594821912, ep_step: 1000
90
+ 2023-05-28 05:20:49 - SimpleLog - INFO: - episode: 23, ep_reward: 900.9887594821912, ep_step: 1000
91
+ 2023-05-28 05:20:49 - SimpleLog - INFO: - episode: 24, ep_reward: 900.9887594821912, ep_step: 1000
92
+ 2023-05-28 05:20:50 - SimpleLog - INFO: - episode: 25, ep_reward: 900.9887594821912, ep_step: 1000
93
+ 2023-05-28 05:20:50 - SimpleLog - INFO: - episode: 26, ep_reward: 900.9887594821912, ep_step: 1000
94
+ 2023-05-28 05:20:50 - SimpleLog - INFO: - episode: 27, ep_reward: 900.9887594821912, ep_step: 1000
95
+ 2023-05-28 05:20:51 - SimpleLog - INFO: - episode: 28, ep_reward: 900.9887594821912, ep_step: 1000
96
+ 2023-05-28 05:20:51 - SimpleLog - INFO: - episode: 29, ep_reward: 900.9887594821912, ep_step: 1000
97
+ 2023-05-28 05:20:52 - SimpleLog - INFO: - episode: 30, ep_reward: 900.9887594821912, ep_step: 1000
98
+ 2023-05-28 05:20:52 - SimpleLog - INFO: - episode: 31, ep_reward: 900.9887594821912, ep_step: 1000
99
+ 2023-05-28 05:20:52 - SimpleLog - INFO: - episode: 32, ep_reward: 900.9887594821912, ep_step: 1000
100
+ 2023-05-28 05:20:53 - SimpleLog - INFO: - episode: 33, ep_reward: 900.9887594821912, ep_step: 1000
101
+ 2023-05-28 05:20:53 - SimpleLog - INFO: - episode: 34, ep_reward: 900.9887594821912, ep_step: 1000
102
+ 2023-05-28 05:20:53 - SimpleLog - INFO: - episode: 35, ep_reward: 900.9887594821912, ep_step: 1000
103
+ 2023-05-28 05:20:54 - SimpleLog - INFO: - episode: 36, ep_reward: 900.9887594821912, ep_step: 1000
104
+ 2023-05-28 05:20:54 - SimpleLog - INFO: - episode: 37, ep_reward: 900.9887594821912, ep_step: 1000
105
+ 2023-05-28 05:20:55 - SimpleLog - INFO: - episode: 38, ep_reward: 900.9887594821912, ep_step: 1000
106
+ 2023-05-28 05:20:55 - SimpleLog - INFO: - episode: 39, ep_reward: 900.9887594821912, ep_step: 1000
107
+ 2023-05-28 05:20:55 - SimpleLog - INFO: - episode: 40, ep_reward: 900.9887594821912, ep_step: 1000
108
+ 2023-05-28 05:20:56 - SimpleLog - INFO: - episode: 41, ep_reward: 900.9887594821912, ep_step: 1000
109
+ 2023-05-28 05:20:56 - SimpleLog - INFO: - episode: 42, ep_reward: 900.9887594821912, ep_step: 1000
110
+ 2023-05-28 05:20:56 - SimpleLog - INFO: - episode: 43, ep_reward: 900.9887594821912, ep_step: 1000
111
+ 2023-05-28 05:20:57 - SimpleLog - INFO: - episode: 44, ep_reward: 900.9887594821912, ep_step: 1000
112
+ 2023-05-28 05:20:57 - SimpleLog - INFO: - episode: 45, ep_reward: 900.9887594821912, ep_step: 1000
113
+ 2023-05-28 05:20:58 - SimpleLog - INFO: - episode: 46, ep_reward: 900.9887594821912, ep_step: 1000
114
+ 2023-05-28 05:20:58 - SimpleLog - INFO: - episode: 47, ep_reward: 900.9887594821912, ep_step: 1000
115
+ 2023-05-28 05:20:58 - SimpleLog - INFO: - episode: 48, ep_reward: 900.9887594821912, ep_step: 1000
116
+ 2023-05-28 05:20:59 - SimpleLog - INFO: - episode: 49, ep_reward: 900.9887594821912, ep_step: 1000
117
+ 2023-05-28 05:20:59 - SimpleLog - INFO: - episode: 50, ep_reward: 900.9887594821912, ep_step: 1000
118
+ 2023-05-28 05:21:00 - SimpleLog - INFO: - episode: 51, ep_reward: 900.9887594821912, ep_step: 1000
119
+ 2023-05-28 05:21:00 - SimpleLog - INFO: - episode: 52, ep_reward: 900.9887594821912, ep_step: 1000
120
+ 2023-05-28 05:21:00 - SimpleLog - INFO: - episode: 53, ep_reward: 900.9887594821912, ep_step: 1000
121
+ 2023-05-28 05:21:01 - SimpleLog - INFO: - episode: 54, ep_reward: 900.9887594821912, ep_step: 1000
122
+ 2023-05-28 05:21:01 - SimpleLog - INFO: - episode: 55, ep_reward: 900.9887594821912, ep_step: 1000
123
+ 2023-05-28 05:21:01 - SimpleLog - INFO: - episode: 56, ep_reward: 900.9887594821912, ep_step: 1000
124
+ 2023-05-28 05:21:02 - SimpleLog - INFO: - episode: 57, ep_reward: 900.9887594821912, ep_step: 1000
125
+ 2023-05-28 05:21:02 - SimpleLog - INFO: - episode: 58, ep_reward: 900.9887594821912, ep_step: 1000
126
+ 2023-05-28 05:21:03 - SimpleLog - INFO: - episode: 59, ep_reward: 900.9887594821912, ep_step: 1000
127
+ 2023-05-28 05:21:03 - SimpleLog - INFO: - episode: 60, ep_reward: 900.9887594821912, ep_step: 1000
128
+ 2023-05-28 05:21:03 - SimpleLog - INFO: - episode: 61, ep_reward: 900.9887594821912, ep_step: 1000
129
+ 2023-05-28 05:21:04 - SimpleLog - INFO: - episode: 62, ep_reward: 900.9887594821912, ep_step: 1000
130
+ 2023-05-28 05:21:04 - SimpleLog - INFO: - episode: 63, ep_reward: 900.9887594821912, ep_step: 1000
131
+ 2023-05-28 05:21:04 - SimpleLog - INFO: - episode: 64, ep_reward: 900.9887594821912, ep_step: 1000
132
+ 2023-05-28 05:21:05 - SimpleLog - INFO: - episode: 65, ep_reward: 900.9887594821912, ep_step: 1000
133
+ 2023-05-28 05:21:05 - SimpleLog - INFO: - episode: 66, ep_reward: 900.9887594821912, ep_step: 1000
134
+ 2023-05-28 05:21:06 - SimpleLog - INFO: - episode: 67, ep_reward: 900.9887594821912, ep_step: 1000
135
+ 2023-05-28 05:21:06 - SimpleLog - INFO: - episode: 68, ep_reward: 900.9887594821912, ep_step: 1000
136
+ 2023-05-28 05:21:06 - SimpleLog - INFO: - episode: 69, ep_reward: 900.9887594821912, ep_step: 1000
137
+ 2023-05-28 05:21:07 - SimpleLog - INFO: - episode: 70, ep_reward: 900.9887594821912, ep_step: 1000
138
+ 2023-05-28 05:21:07 - SimpleLog - INFO: - episode: 71, ep_reward: 900.9887594821912, ep_step: 1000
139
+ 2023-05-28 05:21:08 - SimpleLog - INFO: - episode: 72, ep_reward: 900.9887594821912, ep_step: 1000
140
+ 2023-05-28 05:21:08 - SimpleLog - INFO: - episode: 73, ep_reward: 900.9887594821912, ep_step: 1000
141
+ 2023-05-28 05:21:08 - SimpleLog - INFO: - episode: 74, ep_reward: 900.9887594821912, ep_step: 1000
142
+ 2023-05-28 05:21:09 - SimpleLog - INFO: - episode: 75, ep_reward: 900.9887594821912, ep_step: 1000
143
+ 2023-05-28 05:21:09 - SimpleLog - INFO: - episode: 76, ep_reward: 900.9887594821912, ep_step: 1000
144
+ 2023-05-28 05:21:09 - SimpleLog - INFO: - episode: 77, ep_reward: 900.9887594821912, ep_step: 1000
145
+ 2023-05-28 05:21:10 - SimpleLog - INFO: - episode: 78, ep_reward: 900.9887594821912, ep_step: 1000
146
+ 2023-05-28 05:21:10 - SimpleLog - INFO: - episode: 79, ep_reward: 900.9887594821912, ep_step: 1000
147
+ 2023-05-28 05:21:11 - SimpleLog - INFO: - episode: 80, ep_reward: 900.9887594821912, ep_step: 1000
148
+ 2023-05-28 05:21:11 - SimpleLog - INFO: - episode: 81, ep_reward: 900.9887594821912, ep_step: 1000
149
+ 2023-05-28 05:21:11 - SimpleLog - INFO: - episode: 82, ep_reward: 900.9887594821912, ep_step: 1000
150
+ 2023-05-28 05:21:12 - SimpleLog - INFO: - episode: 83, ep_reward: 900.9887594821912, ep_step: 1000
151
+ 2023-05-28 05:21:12 - SimpleLog - INFO: - episode: 84, ep_reward: 900.9887594821912, ep_step: 1000
152
+ 2023-05-28 05:21:13 - SimpleLog - INFO: - episode: 85, ep_reward: 900.9887594821912, ep_step: 1000
153
+ 2023-05-28 05:21:13 - SimpleLog - INFO: - episode: 86, ep_reward: 900.9887594821912, ep_step: 1000
154
+ 2023-05-28 05:21:13 - SimpleLog - INFO: - episode: 87, ep_reward: 900.9887594821912, ep_step: 1000
155
+ 2023-05-28 05:21:14 - SimpleLog - INFO: - episode: 88, ep_reward: 900.9887594821912, ep_step: 1000
156
+ 2023-05-28 05:21:14 - SimpleLog - INFO: - episode: 89, ep_reward: 900.9887594821912, ep_step: 1000
157
+ 2023-05-28 05:21:14 - SimpleLog - INFO: - episode: 90, ep_reward: 900.9887594821912, ep_step: 1000
158
+ 2023-05-28 05:21:15 - SimpleLog - INFO: - episode: 91, ep_reward: 900.9887594821912, ep_step: 1000
159
+ 2023-05-28 05:21:15 - SimpleLog - INFO: - episode: 92, ep_reward: 900.9887594821912, ep_step: 1000
160
+ 2023-05-28 05:21:16 - SimpleLog - INFO: - episode: 93, ep_reward: 900.9887594821912, ep_step: 1000
161
+ 2023-05-28 05:21:16 - SimpleLog - INFO: - episode: 94, ep_reward: 900.9887594821912, ep_step: 1000
162
+ 2023-05-28 05:21:16 - SimpleLog - INFO: - episode: 95, ep_reward: 900.9887594821912, ep_step: 1000
163
+ 2023-05-28 05:21:17 - SimpleLog - INFO: - episode: 96, ep_reward: 900.9887594821912, ep_step: 1000
164
+ 2023-05-28 05:21:17 - SimpleLog - INFO: - episode: 97, ep_reward: 900.9887594821912, ep_step: 1000
165
+ 2023-05-28 05:21:18 - SimpleLog - INFO: - episode: 98, ep_reward: 900.9887594821912, ep_step: 1000
166
+ 2023-05-28 05:21:18 - SimpleLog - INFO: - episode: 99, ep_reward: 900.9887594821912, ep_step: 1000
167
+ 2023-05-28 05:21:18 - SimpleLog - INFO: - Finish testing! total time consumed: 41.82s
HalfCheetah-v4/Test_single_HalfCheetah-v4_PPO_20230528-052036/tb_logs/interact/events.out.tfevents.1685251236.gscaimax.2601283.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e794cc47f1263198760be72aef138dbc80999c04d48267ac28ed32daaa24ef6b
3
+ size 10236
HalfCheetah-v4/Test_single_HalfCheetah-v4_PPO_20230528-052036/tb_logs/model/events.out.tfevents.1685251236.gscaimax.2601283.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7eaf05597b59d57f3e245911b577dac16d29fb13b177d5bec04fde5e0f87889
3
+ size 40
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/config.yaml ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ general_cfg:
2
+ algo_name: PPO
3
+ collect_traj: false
4
+ device: cuda
5
+ env_name: gym
6
+ load_checkpoint: false
7
+ load_model_step: best
8
+ load_path: Train_single_CartPole-v1_DQN_20230515-211721
9
+ max_episode: 2000
10
+ max_step: 1000
11
+ mode: train
12
+ model_save_fre: 500
13
+ mp_backend: single
14
+ n_learners: 1
15
+ n_workers: 2
16
+ online_eval: true
17
+ online_eval_episode: 10
18
+ seed: 1
19
+ share_buffer: true
20
+ algo_cfg:
21
+ actor_hidden_dim: 256
22
+ actor_layers:
23
+ - activation: relu
24
+ layer_size:
25
+ - 256
26
+ layer_type: linear
27
+ - activation: relu
28
+ layer_size:
29
+ - 256
30
+ layer_type: linear
31
+ actor_lr: 0.0003
32
+ batch_size: 256
33
+ buffer_size: 100000
34
+ buffer_type: ONPOLICY_QUE
35
+ continuous: true
36
+ critic_hidden_dim: 256
37
+ critic_layers:
38
+ - activation: relu
39
+ layer_size:
40
+ - 256
41
+ layer_type: linear
42
+ - activation: relu
43
+ layer_size:
44
+ - 256
45
+ layer_type: linear
46
+ critic_loss_coef: 0.5
47
+ critic_lr: 0.001
48
+ entropy_coef: 0.01
49
+ eps_clip: 0.2
50
+ epsilon_decay: 500
51
+ epsilon_end: 0.01
52
+ epsilon_start: 0.95
53
+ gamma: 0.95
54
+ independ_actor: true
55
+ k_epochs: 8
56
+ kl_alpha: 2
57
+ kl_beta: 1.5
58
+ kl_lambda: 0.5
59
+ kl_target: 0.1
60
+ lr: 0.0001
61
+ min_policy: 0
62
+ ppo_type: clip
63
+ sgd_batch_size: 128
64
+ share_optimizer: false
65
+ target_update: 4
66
+ env_cfg:
67
+ id: HalfCheetah-v4
68
+ ignore_params:
69
+ - wrapper
70
+ - ignore_params
71
+ new_step_api: true
72
+ render_mode: null
73
+ wrapper: null
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/logs/log.txt ADDED
The diff for this file is too large to render. See raw diff
 
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/1000 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5575cd57421daf2e85a1ecd32e6142ab632b3448f13a8d1256970b5be4d63e2a
3
+ size 574673
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/1500 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7216705e07503fe3e9b83ce29b67e06d6c41f4151679e8a3dc601b16857af4a
3
+ size 574673
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/2000 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d911241d9cdef0415d1b1c43ce775b2d63279d76a4b72e369d1ed9af47e38bc3
3
+ size 574673
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/2500 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dcc306852c2d5ddd7787042dcd317d0597613a2a2894c68d2698ee3a54996613
3
+ size 574673
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/3000 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7791e81178388bb79dac734f611e5a189edd3aebc47f381448fd70216ba33811
3
+ size 574673
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/3500 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c97da94eed32295e03ddb0342cd3753044c8b754542e6acaeb37efc26f07d2ce
3
+ size 574673
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/4000 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b200de28c4dbff4bf1f05a96c7b092fc37655a6798f49a10fa288bebb27c30c
3
+ size 574673
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/4500 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1681983025dcd66d27b44f335649c53a2985c6150e8dcc1f18a691938cb42b9c
3
+ size 574673
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/500 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b56991f9e342c59025adb8f6a6d15d7f7fed63364187636f53882898b3fc27b
3
+ size 574594
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/5000 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:64ea76c2868735c7f8cb59e71f8c57383bdcb287f185680bc7ea4f2e28400733
3
+ size 574673
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/5500 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7f4fb6364177582ea31d7a9d5ec78164e80142abe9cafaea1fec824dfb2c2c7b
3
+ size 574673
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/6000 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1cb9f1a6e34a1ec8740e21d15ae18a3c793016db372db6682d883729ea866a7c
3
+ size 574673
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/6500 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc15eb4ec4a66eb49e446cad353bba807a0b74dec014abbb86f3dc15c63bfd32
3
+ size 574673
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/7000 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:549a13c1e9d579f3cf77a308852dabe49b537d7e59c12675b116447dd5c24860
3
+ size 574673
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/7500 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:38d8a11a0123573b63f056c2ca1af6ae3ba5b406500dfc41a46f9f884b778947
3
+ size 574673
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/models/best ADDED
Binary file (575 kB). View file
 
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/tb_logs/interact/events.out.tfevents.1685108058.gscaimax.2061337.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a89cd6ef8a64dea10ab10947f1990d29247a6d8f480f60f87480ff7510b42ea
3
+ size 211780
HalfCheetah-v4/Train_single_HalfCheetah-v4_PPO_20230526-133418/tb_logs/model/events.out.tfevents.1685108058.gscaimax.2061337.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec052c91febdda1caa8015e8c4e54103eb04aebb018a68c08a54958f6bd7b18f
3
+ size 1280827