#!/bin/bash
nseed=3
ndecoderseed=3
project="ICLR2022_v2_long_text_evaluation"
env_name="language_modeling_via_stochastic_processes"
path2repo='/nlp/scr/rewang/public_language_modeling_via_stochastic_processes'
tag="randDecoder"

# GPT2
for decoder_seed in $(seq 1 1 $ndecoderseed); do
    for seed in $(seq 1 1 $nseed); do
        # # Wikisection
        nlprun -n gpt2 -g 1 'python run_generation_with_classification.py --model_type=gpt2 --model_name_or_path="'${path2repo}'/decoder/examples/pytorch/language-modeling/LM_toyWiki_gpt2_seed'${decoder_seed}'/" --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=wikisection_filter --cl_fpath="'${path2repo}'/encoder/code/encoder_models/wikisection8_tc/checkpoints/epoch=99-step=127199.ckpt" --latent_dim=8 --project='${project}' --tag='${tag}' --p=0.99 --label=gpt --no_eos --suppress_eos --seed='${seed} -a ${env_name}

        # Wikihow
        nlprun -n gpt2 -g 1 'python run_generation_with_classification.py --model_type=gpt2 --model_name_or_path="'${path2repo}'/decoder/examples/pytorch/language-modeling/LM_wikihow_gpt232_seed'${decoder_seed}'/" --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=wikihow --cl_fpath="'${path2repo}'/encoder/code/encoder_models/wikihow8_tc/checkpoints/epoch=99-step=150899.ckpt" --latent_dim=8 --project='${project}' --tag='${tag}' --p=0.99 --label=gpt --no_eos --suppress_eos --seed='${seed} -a ${env_name}

        # Tickettalk
        nlprun -n gpt2 -g 1 'python run_generation_with_classification.py --model_type=gpt2 --model_name_or_path="'${path2repo}'/decoder/examples/pytorch/language-modeling/LM_taskmaster_movies_gpt232_seed'${decoder_seed}'/" --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=taskmaster --cl_fpath="'${path2repo}'/encoder/code/encoder_models/taskmaster_movies8_tc/checkpoints/epoch=99-step=78099.ckpt" --latent_dim=8 --project='${project}' --tag='${tag}' --p=0.99 --label=gpt --no_eos --suppress_eos --seed='${seed} -a ${env_name}

        # Restaurant
        nlprun -n gpt2 -g 1 'python run_generation_with_classification.py --model_type=gpt2 --model_name_or_path="'${path2repo}'/decoder/examples/pytorch/language-modeling/LM_taskmaster_restaurant_gpt232_seed'${decoder_seed}'/" --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=restaurant_taskmaster --cl_fpath="'${path2repo}'/encoder/code/encoder_models/tmRestaurant8_tc/checkpoints/epoch=99-step=78099.ckpt" --latent_dim=8 --project='${project}' --tag='${tag}' --p=0.99 --no_eos --suppress_eos --label=gpt --seed='${seed} -a ${env_name}

        # Recipe
        nlprun -n gpt2 -g 1 'python run_generation_with_classification.py --model_type=gpt2 --model_name_or_path="'${path2repo}'/decoder/examples/pytorch/language-modeling/LM_recipe_gpt232_seed'${decoder_seed}'/" --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=recipe --cl_fpath="'${path2repo}'/encoder/code/encoder_models/recipe8_tc/checkpoints/epoch=99-step=21999.ckpt" --latent_dim=8 --project='${project}' --tag='${tag}' --p=0.99 --label=gpt --no_eos --suppress_eos --seed='${seed} -a ${env_name}

    done
done




# VAE - latent dim, dataset, bb/no bb
method='vae'
for decoder_seed in $(seq 1 1 $ndecoderseed); do
    for seed in $(seq 1 1 $nseed); do
        for latent_dim in {8,16,32}; do
            # Wikisection
            nlprun -n vae -g 1 'python run_cl_transition_fulldoc_generation_da_bb.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_toyWiki_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=wikisection_filter --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling_bb --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/wikisection'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=127199.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
            nlprun -n vae -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_toyWiki_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=wikisection_filter --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/wikisection'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=127199.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}

            # Wikihow
            # Buggy decoding
            nlprun -n vae -g 1 'python run_cl_transition_fulldoc_generation_da_bb.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_wikihow_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=wikihow --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling_bb --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/wikihow'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=150899.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
            nlprun -n vae -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_wikihow_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=wikihow --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/wikihow'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=150899.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}

            # Ticketk
            nlprun -n vae -g 1 'python run_cl_transition_fulldoc_generation_da_bb.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_taskmaster_movies_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=taskmaster --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling_bb --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/taskmaster_movies'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=78099.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
            nlprun -n vae -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_taskmaster_movies_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=taskmaster --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/taskmaster_movies'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=78099.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}

            # Restaurant
            nlprun -n vae -g 1 'python run_cl_transition_fulldoc_generation_da_bb.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_taskmaster_restaurant_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=restaurant_taskmaster --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling_bb --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/tmRestaurant'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=78099.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
            nlprun -n vae -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_taskmaster_restaurant_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=restaurant_taskmaster --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/tmRestaurant'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=78099.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}

            # Recipe
            nlprun -n vae -g 1 'python run_cl_transition_fulldoc_generation_da_bb.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_recipe_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=recipe --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling_bb --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/recipe'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=21999.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
            nlprun -n vae -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_recipe_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=recipe --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/recipe'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=21999.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
        done
    done
done

# Infonce
method='infonce'
for decoder_seed in $(seq 1 1 $ndecoderseed); do
    for seed in $(seq 1 1 $nseed); do
        for latent_dim in {8,16,32}; do
        #     # Wikisection
            nlprun -n infonce -g 1 'python run_cl_transition_fulldoc_generation_da_bb.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_toyWiki_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=wikisection_filter --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling_bb --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/wikisection'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=127199.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}

            nlprun -n infonce -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_toyWiki_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=wikisection_filter --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/wikisection'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=127199.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}

            # Wikihow
            nlprun -n infonce -g 1 'python run_cl_transition_fulldoc_generation_da_bb.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_wikihow_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=wikihow --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling_bb --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/wikihow'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=150899.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
            nlprun -n infonce -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_wikihow_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=wikihow --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/wikihow'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=150899.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}

            # Ticketk
            nlprun -n infonce  -g 1 'python run_cl_transition_fulldoc_generation_da_bb.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_taskmaster_movies_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=taskmaster --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling_bb --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/taskmaster_movies'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=78099.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
            nlprun -n infonce  -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_taskmaster_movies_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=taskmaster --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/taskmaster_movies'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=78099.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}

            # REstaurant
            nlprun -n infonce  -g 1 'python run_cl_transition_fulldoc_generation_da_bb.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_taskmaster_restaurant_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=restaurant_taskmaster --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling_bb --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/tmRestaurant'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=78099.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
            nlprun -n infonce  -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_taskmaster_restaurant_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=restaurant_taskmaster --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/tmRestaurant'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=78099.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}

            # # Recipe
            nlprun -n infonce -g 1 'python run_cl_transition_fulldoc_generation_da_bb.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_recipe_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=recipe --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling_bb --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/recipe'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=21999.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
            nlprun -n infonce -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_recipe_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=recipe --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/recipe'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=21999.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
        done
    done
done



# Brownian motion
method='brownian'

for decoder_seed in $(seq 1 1 $ndecoderseed); do
    for seed in $(seq 1 1 $nseed); do
        for latent_dim in {8,16,32}; do
        #     # Wikisection
        nlprun -n brownian -g 1 'python run_cl_transition_fulldoc_generation_da_bm.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_toyWiki_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=wikisection_filter --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling_bm --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/wikisection'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=127199.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
            nlprun -n brownian -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_toyWiki_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=wikisection_filter --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/wikisection'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=127199.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}

            # Wikihow
            nlprun -n brownian -g 1 'python run_cl_transition_fulldoc_generation_da_bm.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_wikihow_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=wikihow --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling_bm --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/wikihow'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=150899.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
            nlprun -n brownian -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_wikihow_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=wikihow --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/wikihow'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=150899.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}

            # Ticketk
            nlprun -n brownian -g 1 'python run_cl_transition_fulldoc_generation_da_bm.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_taskmaster_movies_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=taskmaster --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling_bm --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/taskmaster_movies'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=78099.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
            nlprun -n brownian -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_taskmaster_movies_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=taskmaster --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/taskmaster_movies'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=78099.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}

            # Retaurant
            nlprun -n brownian -g 1 'python run_cl_transition_fulldoc_generation_da_bm.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_taskmaster_restaurant_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=restaurant_taskmaster --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling_bm --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/tmRestaurant'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=78099.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
            nlprun -n brownian -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_taskmaster_restaurant_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=restaurant_taskmaster --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/tmRestaurant'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=78099.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}

            # # Recipe
            nlprun -n brownian -g 1 'python run_cl_transition_fulldoc_generation_da_bm.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_recipe_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=recipe --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling_bm --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/recipe'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=21999.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
            nlprun -n brownian -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_recipe_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=recipe --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/recipe'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=21999.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
        done
    done
done




# TC
method='tc'

for decoder_seed in $(seq 1 1 $ndecoderseed); do
    for seed in $(seq 1 1 $nseed); do
        for latent_dim in {8,16,32}; do
            # Wikisection
            nlprun -n wikisection_eval -g 1 'python run_cl_transition_fulldoc_generation_da_bb.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_toyWiki_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=wikisection_filter --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}'bb'${latent_dim}'_DASampling_bb --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/wikisection'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=127199.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
            nlprun -n wikisection_eval -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_toyWiki_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=wikisection_filter --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/wikisection'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=127199.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}

            # Wikihow
            nlprun -n wikihow_eval -g 1 'python run_cl_transition_fulldoc_generation_da_bb.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_wikihow_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=wikihow --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}'bb'${latent_dim}'_DASampling_bb --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/wikihow'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=150899.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
            nlprun -n wikihow_eval -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_wikihow_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=wikihow --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/wikihow'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=150899.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}

            # Ticketk
            nlprun -n movies_eval -g 1 'python run_cl_transition_fulldoc_generation_da_bb.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_taskmaster_movies_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=taskmaster --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling_bb --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/taskmaster_movies'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=78099.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
            nlprun -n movies_eval -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_taskmaster_movies_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=taskmaster --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/taskmaster_movies'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=78099.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}

            # Restaurant
            nlprun -n restaurant_eval -g 1 'python run_cl_transition_fulldoc_generation_da_bb.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_taskmaster_restaurant_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=restaurant_taskmaster --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling_bb --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/tmRestaurant'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=78099.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
            nlprun -n restaurant_eval -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_taskmaster_restaurant_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=restaurant_taskmaster --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/tmRestaurant'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=78099.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}

            # # Recipe
            nlprun -n recipe_eval -g 1 'python run_cl_transition_fulldoc_generation_da_bb.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_recipe_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=recipe --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling_bb --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/recipe'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=21999.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
            nlprun -n recipe_eval -g 1 'python run_cl_transition_fulldoc_generation_da.py --model_type=gpt2 --model_name_or_path='${path2repo}'/decoder/examples/pytorch/language-modeling/LM_recipe_'${method}''${latent_dim}'_seed'${decoder_seed}'/ --prompt="<|endoftext|>" --num_return_sequences=1 --num_intervals=1000 --method=sample --stop_token="<|endoftext|>" --dataset_name=recipe --project='${project}' --p=0.99 --seed='${seed}' --label=LM_'${method}''${latent_dim}'_DASampling --latent_dim='${latent_dim}' --cl_fpath='${path2repo}'/encoder/code/encoder_models/recipe'${latent_dim}'_'${method}'/checkpoints/epoch=99-step=21999.ckpt --no_eos --suppress_eos --tag='${tag} -a ${env_name}
        done
    done
done
#
