edbeeching HF staff commited on
Commit
baece98
1 Parent(s): d6c903a

Upload cfg.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. cfg.json +164 -0
cfg.json ADDED
@@ -0,0 +1,164 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "help": false,
3
+ "algo": "APPO",
4
+ "env": "atari_namethisgame",
5
+ "experiment": "20221014_2B__atari_namethisgame_1111",
6
+ "train_dir": "/gpfsscratch/rech/ajs/utv52ia/sample_factory/train_dir/atari_2b",
7
+ "restart_behavior": "resume",
8
+ "device": "gpu",
9
+ "seed": 1111,
10
+ "num_policies": 1,
11
+ "async_rl": true,
12
+ "serial_mode": false,
13
+ "batched_sampling": true,
14
+ "num_batches_to_accumulate": 2,
15
+ "worker_num_splits": 1,
16
+ "policy_workers_per_policy": 1,
17
+ "max_policy_lag": 1000,
18
+ "num_workers": 4,
19
+ "num_envs_per_worker": 1,
20
+ "batch_size": 1024,
21
+ "num_batches_per_epoch": 8,
22
+ "num_epochs": 2,
23
+ "rollout": 64,
24
+ "recurrence": 1,
25
+ "shuffle_minibatches": false,
26
+ "gamma": 0.99,
27
+ "reward_scale": 1.0,
28
+ "reward_clip": 1000.0,
29
+ "value_bootstrap": false,
30
+ "normalize_returns": true,
31
+ "exploration_loss_coeff": 0.0004677351413,
32
+ "value_loss_coeff": 0.5,
33
+ "kl_loss_coeff": 0.0,
34
+ "exploration_loss": "entropy",
35
+ "gae_lambda": 0.95,
36
+ "ppo_clip_ratio": 0.1,
37
+ "ppo_clip_value": 1.0,
38
+ "with_vtrace": false,
39
+ "vtrace_rho": 1.0,
40
+ "vtrace_c": 1.0,
41
+ "optimizer": "adam",
42
+ "adam_eps": 1e-05,
43
+ "adam_beta1": 0.9,
44
+ "adam_beta2": 0.999,
45
+ "max_grad_norm": 0.0,
46
+ "learning_rate": 0.0003033891184,
47
+ "lr_schedule": "linear_decay",
48
+ "lr_schedule_kl_threshold": 0.008,
49
+ "obs_subtract_mean": 0.0,
50
+ "obs_scale": 255.0,
51
+ "normalize_input": true,
52
+ "normalize_input_keys": [
53
+ "obs"
54
+ ],
55
+ "decorrelate_experience_max_seconds": 1,
56
+ "decorrelate_envs_on_one_worker": true,
57
+ "actor_worker_gpus": [],
58
+ "set_workers_cpu_affinity": true,
59
+ "force_envs_single_thread": false,
60
+ "default_niceness": 0,
61
+ "log_to_file": true,
62
+ "experiment_summaries_interval": 3,
63
+ "flush_summaries_interval": 30,
64
+ "stats_avg": 100,
65
+ "summaries_use_frameskip": true,
66
+ "heartbeat_interval": 10,
67
+ "heartbeat_reporting_interval": 60,
68
+ "train_for_env_steps": 2000000000,
69
+ "train_for_seconds": 3600000,
70
+ "save_every_sec": 120,
71
+ "keep_checkpoints": 2,
72
+ "load_checkpoint_kind": "latest",
73
+ "save_milestones_sec": 1200,
74
+ "save_best_every_sec": 5,
75
+ "save_best_metric": "reward",
76
+ "save_best_after": 100000,
77
+ "benchmark": false,
78
+ "encoder_mlp_layers": [
79
+ 512,
80
+ 512
81
+ ],
82
+ "encoder_conv_architecture": "convnet_atari",
83
+ "encoder_conv_mlp_layers": [
84
+ 512
85
+ ],
86
+ "use_rnn": false,
87
+ "rnn_size": 512,
88
+ "rnn_type": "gru",
89
+ "rnn_num_layers": 1,
90
+ "decoder_mlp_layers": [],
91
+ "nonlinearity": "relu",
92
+ "policy_initialization": "orthogonal",
93
+ "policy_init_gain": 1.0,
94
+ "actor_critic_share_weights": true,
95
+ "adaptive_stddev": false,
96
+ "continuous_tanh_scale": 0.0,
97
+ "initial_stddev": 1.0,
98
+ "use_env_info_cache": false,
99
+ "env_gpu_actions": false,
100
+ "env_frameskip": 4,
101
+ "env_framestack": 4,
102
+ "pixel_format": "CHW",
103
+ "use_record_episode_statistics": true,
104
+ "with_wandb": false,
105
+ "wandb_user": null,
106
+ "wandb_project": "sample_factory",
107
+ "wandb_group": null,
108
+ "wandb_job_type": "SF",
109
+ "wandb_tags": [],
110
+ "with_pbt": false,
111
+ "pbt_mix_policies_in_one_env": true,
112
+ "pbt_period_env_steps": 5000000,
113
+ "pbt_start_mutation": 20000000,
114
+ "pbt_replace_fraction": 0.3,
115
+ "pbt_mutation_rate": 0.15,
116
+ "pbt_replace_reward_gap": 0.1,
117
+ "pbt_replace_reward_gap_absolute": 1e-06,
118
+ "pbt_optimize_batch_size": false,
119
+ "pbt_optimize_gamma": false,
120
+ "pbt_target_objective": "true_objective",
121
+ "pbt_perturb_min": 1.05,
122
+ "pbt_perturb_max": 1.5,
123
+ "env_agents": 512,
124
+ "command_line": "--seed=1111 --experiment=20221014_2B__atari_namethisgame_1111 --env=atari_namethisgame --train_for_seconds=3600000 --algo=APPO --gamma=0.99 --num_workers=4 --num_envs_per_worker=1 --worker_num_splits=1 --env_agents=512 --benchmark=False --max_grad_norm=0.0 --decorrelate_experience_max_seconds=1 --encoder_conv_architecture=convnet_atari --encoder_conv_mlp_layers 512 --nonlinearity=relu --num_policies=1 --normalize_input=True --normalize_input_keys obs --normalize_returns=True --async_rl=True --batched_sampling=True --train_for_env_steps=2000000000 --save_milestones_sec=1200 --train_dir /gpfsscratch/rech/ajs/utv52ia/sample_factory/train_dir/atari_2b --rollout 64 --exploration_loss_coeff 0.0004677351413 --num_epochs 2 --batch_size 1024 --num_batches_per_epoch 8 --learning_rate 0.0003033891184",
125
+ "cli_args": {
126
+ "algo": "APPO",
127
+ "env": "atari_namethisgame",
128
+ "experiment": "20221014_2B__atari_namethisgame_1111",
129
+ "train_dir": "/gpfsscratch/rech/ajs/utv52ia/sample_factory/train_dir/atari_2b",
130
+ "seed": 1111,
131
+ "num_policies": 1,
132
+ "async_rl": true,
133
+ "batched_sampling": true,
134
+ "worker_num_splits": 1,
135
+ "num_workers": 4,
136
+ "num_envs_per_worker": 1,
137
+ "batch_size": 1024,
138
+ "num_batches_per_epoch": 8,
139
+ "num_epochs": 2,
140
+ "rollout": 64,
141
+ "gamma": 0.99,
142
+ "normalize_returns": true,
143
+ "exploration_loss_coeff": 0.0004677351413,
144
+ "max_grad_norm": 0.0,
145
+ "learning_rate": 0.0003033891184,
146
+ "normalize_input": true,
147
+ "normalize_input_keys": [
148
+ "obs"
149
+ ],
150
+ "decorrelate_experience_max_seconds": 1,
151
+ "train_for_env_steps": 2000000000,
152
+ "train_for_seconds": 3600000,
153
+ "save_milestones_sec": 1200,
154
+ "benchmark": false,
155
+ "encoder_conv_architecture": "convnet_atari",
156
+ "encoder_conv_mlp_layers": [
157
+ 512
158
+ ],
159
+ "nonlinearity": "relu",
160
+ "env_agents": 512
161
+ },
162
+ "git_hash": "unknown",
163
+ "git_repo_name": "not a git repository"
164
+ }