@echo off

REM Seed for the random generation: ensure that the validation set remains the same.
set seed=1

REM Characteristics of the training instances
set n_city=20
set grid_size=100
set max_tw_gap=10
set max_tw_size=100

REM Parameters for the training
set batch_size=32
set hidden_layer=2
set latent_dim=32
set learning_rate=0.0001
set n_step=-1
set max_softmax_beta=10

REM Others
set plot_training=1
set mode=cpu

REM Folder to save the trained model
set network_arch=hidden_layer-%hidden_layer%-latent_dim-%latent_dim%
set result_root=trained-models\dqn\tsptw\n-city-%n_city%\grid-%grid_size%\tw-%max_tw_gap%-%max_tw_size%\seed-%seed%\%network_arch%
set save_dir=%result_root%\batch_size-%batch_size%-learning_rate-%learning_rate%-n_step-%n_step%-max_softmax_beta-%max_softmax_beta%

mkdir %save_dir%

py -3 src\problem\tsptw\main_training_dqn_tsptw.py ^
    --seed %seed% ^
    --n_city %n_city% ^
    --grid_size %grid_size% ^
    --max_tw_gap %max_tw_gap% ^
    --max_tw_size %max_tw_size% ^
    --batch_size %batch_size% ^
    --hidden_layer %hidden_layer% ^
    --latent_dim %latent_dim% ^
    --max_softmax_beta %max_softmax_beta% ^
    --learning_rate %learning_rate% ^
    --save_dir %save_dir% ^
    --plot_training %plot_training% ^
    --mode %mode% ^
    --n_step %n_step% ^
     > %save_dir%\log-training.txt