export CUDA_VISIBLE_DEVICES=1

model_name=timer
token_num=7
token_len=96
seq_len=$[$token_num*$token_len]

batch_size=8
d_model=256
d_ff=2048
e_layers=3
learning_rate=0.0001

setting_name=forecast_ETTh1_${model_name}_MultivariateDatasetBenchmark_sl${seq_len}_it${token_len}_ot${token_len}_lr${learning_rate}_bt${batch_size}_wd0_el${e_layers}_dm${d_model}_dff${d_ff}_nh8_cosFalse_test_0
mkdir -p logs

# training one model with a context length
python -u run.py \
  --task_name forecast \
  --is_training 1 \
  --root_path ./dataset/ETT-small/ \
  --data_path ETTh1.csv \
  --model_id ETTh1 \
  --model $model_name \
  --data MultivariateDatasetBenchmark  \
  --seq_len $seq_len \
  --input_token_len $token_len \
  --output_token_len $token_len \
  --test_seq_len $seq_len \
  --test_pred_len 96 \
  --batch_size $batch_size \
  --learning_rate $learning_rate \
  --train_epochs 10 \
  --d_model $d_model \
  --d_ff $d_ff \
  --gpu 0 \
  --lradj type1 \
  --use_norm \
  --e_layers $e_layers \
  --valid_last

# testing the model on all forecast lengths
for test_pred_len in 96 192 336 720
do
python -u run.py \
  --task_name forecast \
  --is_training 0 \
  --root_path ./dataset/ETT-small/ \
  --data_path ETTh1.csv \
  --model_id ETTh1 \
  --model $model_name \
  --data MultivariateDatasetBenchmark  \
  --seq_len $seq_len \
  --input_token_len $token_len \
  --output_token_len $token_len \
  --test_seq_len $seq_len \
  --test_pred_len $test_pred_len \
  --batch_size $batch_size \
  --learning_rate $learning_rate \
  --train_epochs 10 \
  --d_model $d_model \
  --d_ff $d_ff \
  --gpu 0 \
  --lradj type1 \
  --use_norm \
  --e_layers $e_layers \
  --valid_last \
  --test_dir $setting_name \
  --test_file_name checkpoint.pth
done