jaxgmg2_shared_init / train.yaml
David Quarel
Add README.md and train.yaml
c5fd14a
raw
history blame contribute delete
990 Bytes
parameters:
project_name: jaxgmg2_shared_init
action: rl
rl_action: train
# Learning
lr: 5e-5
alpha: 1.0
discount_rate: 0.98
cheese_loc: any
env_layout: open
# Training scale
num_total_env_steps: 1_351_680_000
num_levels: 9600
grad_acc_per_chunk: 4
num_rollout_steps: 64
# Resume from checkpoint 0 (shared initialisation, fresh optimizer)
resume_id: 0
resume_optim: false
# Checkpointing
ckpt_dir: jaxgmg2_shared_init
f_str_ckpt: "al_1.0_g_0.98_id_{run_id}_shared_init_seed_{seed}"
eval_schedule: "0:1,250:2,500:5,2000:10"
log_optimizer_state: true
# Logging
use_wandb: true
use_hf: true
wandb_project: jaxgmg2_shared_init
sweep:
- - resume: jaxgmg2_3phase_optim_state/al_1.0_g_0.98_id_19_seed_981019
run_id: 19
- resume: jaxgmg2_3phase_optim_state/al_1.0_g_0.98_id_27_seed_981027
run_id: 27
- - seed: 30
- seed: 31
- seed: 32
- seed: 33
- seed: 34
- seed: 35
- seed: 36
- seed: 37
- seed: 38
- seed: 39