# pretrained model
#CUDA_VISIBLE_DEVICES=0 fairseq-generate --task language_modeling_bpe data-bin/writingPrompts-event2story_bpe500 \
#  --arch transformer_lm_gpt_bpe_format --path xxxxx \
#  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --temperature 0.8 --nbest 1 \
#  --results-path ../out/transformer-lm_bpe500_warmlr --quiet \
#  --skip-invalid-size-inputs-valid-test --tokens-per-sample 1024 --sample-break-mode eos \
#  --pretrained  --pretrained-checkpoint /home/rickwwang/project_research/gpt-2-master/models/117M/gpt2model.pytorch

# conv-s2s
CUDA_VISIBLE_DEVICES=0 fairseq-generate --task translation_bpe data-bin/writingPrompts-prompt2story_bpe500_fix \
  --arch fconv_self_att_wp --path ../fire_data/running/conv-s2s-d0.3_bpe500/checkpoint_best.pt  \
  --batch-size 32 --beam 1 --sampling --sampling-topk 40 --nbest 1 \
  --results-path ../fire_data/running/conv-s2s-d0.3_bpe500 --quiet \
  --skip-invalid-size-inputs-valid-test --min-len 150 --max-len-b 250 --exp-id 1

# conv-s2s fusion
CUDA_VISIBLE_DEVICES=0 fairseq-generate --task translation_bpe data-bin/writingPrompts-prompt2story_bpe500_fix \
  --arch fconv_self_att_wp_fusion --path ../fire_data/running/conv-s2s-d0.3-fusion_bpe500/checkpoint_best.pt  \
  --batch-size 32 --beam 1 --sampling --sampling-topk 40 --nbest 1 \
  --results-path ../fire_data/running/conv-s2s-d0.3-fusion_bpe500 --quiet \
  --skip-invalid-size-inputs-valid-test --min-len 150 --max-len-b 250 --exp-id 1 \
  --model-overrides "{'pretrained_checkpoint':'../fire_data/running/conv-s2s-d0.3_bpe500/checkpoint_best.pt'}"

# event to story model
#CUDA_VISIBLE_DEVICES=0 fairseq-generate --task hierstory_bpe data-bin/writingPrompts-event2story_bpe500 \
#  --arch bilstm_h_transformer_lm_gpt --path ../out/bilstm_transformer-lm_bpe500/checkpoint_best.pt  \
#  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --temperature 0.8 --nbest 1 \
#  --results-path ../out/bilstm_transformer-lm_bpe500 --quiet \
#  --skip-invalid-size-inputs-valid-test --use-context True --max-len-b 1024
CUDA_VISIBLE_DEVICES=0 fairseq-generate --task hierstory_bpe data-bin/writingPrompts-event2story_bpe500 \
  --arch bilstm_h_transformer_lm_gpt_format --path ../fire_data/running/bilstm_transformer-lm_bpe500_format_pre/checkpoint_best.pt  \
  --batch-size 32 --beam 1 --sampling --sampling-topk 40 --nbest 1 \
  --results-path ../fire_data/running/bilstm_transformer-lm_bpe500_format_pre --quiet \
  --skip-invalid-size-inputs-valid-test --use-context True --min-len 150 --max-len-b 250 --exp-id 1
CUDA_VISIBLE_DEVICES=0 fairseq-generate --task hierstory_bpe data-bin/writingPrompts-srl2story_bpe500 \
  --arch bilstm_h_transformer_lm_gpt_format --path ../fire_data/running/bilstm_transformer-lm_bpe500_format_pre_srl/checkpoint_best.pt  \
  --batch-size 32 --beam 1 --sampling --sampling-topk 40 --nbest 1 \
  --results-path ../fire_data/running/bilstm_transformer-lm_bpe500_format_pre_srl --quiet \
  --skip-invalid-size-inputs-valid-test --use-context True --min-len 150 --max-len-b 250 --exp-id 1
CUDA_VISIBLE_DEVICES=0 fairseq-generate --task hierstory_bpe data-bin/writingPrompts-srl2story_bpe500 \
  --arch bilstm_h_transformer_lm_gpt_format --path ../fire_data/running/bilstm_transformer-lm_bpe500_format_pre_srl/checkpoint_best.pt  \
  --batch-size 32 --beam 1 --sampling --sampling-topk 40 --nbest 1 \
  --results-path ../fire_data/running/bilstm_transformer-lm_bpe500_format_pre_srl --quiet \
  --skip-invalid-size-inputs-valid-test --use-context True --min-len 150 --max-len-b 250 --exp-id 2 \
  --generated-event True --generated-event-path /home/rickwwang/project_research/out/prompt2srl2_500/49hyp.txt.1.topk40.t1.0
CUDA_VISIBLE_DEVICES=7 fairseq-generate --task hierstory_bpe data-bin/writingPrompts-srl2story_bpe500 \
  --arch bilstmhier_h_transformer_lm_gpt_format --path ../out/bilstm_transformer-lm_bpe500_format_pre_srl_hier/checkpoint_best.pt  \
  --batch-size 32 --beam 1 --sampling --sampling-topk 5 --nbest 1 \
  --results-path ../out/bilstm_transformer-lm_bpe500_format_pre_srl_hier --quiet \
  --skip-invalid-size-inputs-valid-test --use-context True --min-len 150 --max-len-b 250 --exp-id 1
CUDA_VISIBLE_DEVICES=5 fairseq-generate --task hierstory_bpe data-bin/writingPrompts-srl2story_bpe500 \
  --arch bilstmhier_h_transformer_lm_gpt_format --path ../out/bilstm_transformer-lm_bpe500_format_pre_srl_hier_pos/checkpoint_best.pt  \
  --batch-size 32 --beam 1 --sampling --sampling-topk 5 --nbest 1 \
  --results-path ../out/bilstm_transformer-lm_bpe500_format_pre_srl_hier_pos --quiet \
  --skip-invalid-size-inputs-valid-test --use-context True --min-len 150 --max-len-b 250 --exp-id 1
CUDA_VISIBLE_DEVICES=5 fairseq-generate --task hierstory_bpe data-bin/writingPrompts-srl2story_bpe500 \
  --arch bilstmhier_h_transformer_lm_gpt_format --path ../out/bilstm_transformer-lm_bpe500_format_pre_srl_hier_pos/checkpoint_best.pt  \
  --batch-size 32 --beam 1 --sampling --sampling-topk 40 --nbest 1 \
  --results-path ../out/bilstm_transformer-lm_bpe500_format_pre_srl_hier_pos --quiet \
  --skip-invalid-size-inputs-valid-test --use-context True --min-len 150 --max-len-b 250 --exp-id 1 \
  --generated-event True --generated-event-path /home/rickwwang/project_research/out/prompt2srl2_500/49hyp.txt.1.topk40.t1.0
CUDA_VISIBLE_DEVICES=5 fairseq-generate --task hierstory_bpe data-bin/writingPrompts-srl2story_bpe500_fix \
  --arch bilstmhier_h_transformer_lm_gpt_format --path ../out/bilstm_transformer-lm_bpe500_format_pre_srl_hier_pos_fix/checkpoint_best.pt  \
  --batch-size 32 --beam 1 --sampling --sampling-topk 40 --nbest 1 \
  --results-path ../out/bilstm_transformer-lm_bpe500_format_pre_srl_hier_pos_fix --quiet \
  --skip-invalid-size-inputs-valid-test --use-context True --min-len 150 --max-len-b 250 --exp-id 1 \
  --generated-event True --generated-event-path /home/rickwwang/project_research/out/prompt2srl2_500_wo_smooth_d1/40hyp.txt.1.topk20.t1.0.nr6
CUDA_VISIBLE_DEVICES=6 fairseq-generate --task hierstory_bpe data-bin/writingPrompts-srl2story_bpe500_fix \
  --arch bilstmhier_h_transformer_lm_gpt_format --path ../out/bilstm_transformer-lm_bpe500_format_pre_srl_hier_pos_fix/checkpoint_best.pt  \
  --batch-size 32 --beam 1 --sampling --sampling-topk 40 --nbest 1 \
  --results-path ../out/bilstm_transformer-lm_bpe500_format_pre_srl_hier_pos_fix --quiet \
  --skip-invalid-size-inputs-valid-test --use-context True --min-len 150 --max-len-b 250 --exp-id 1 \
  --generated-event True --generated-event-path /home/rickwwang/project_research/fire_data/running/prompt2srl2_500_wo_smooth_d1_verb_loss/29hyp.txt.1.topk20.t1.0.nr6
CUDA_VISIBLE_DEVICES=7 fairseq-generate --task hierstory_bpe data-bin/writingPrompts-srl2story_bpe500_fix \
  --arch bilstmhier_h_transformer_lm_gpt_format --path ../out/bilstm_transformer-lm_bpe500_format_pre_srl_hier_pos_fix/checkpoint_best.pt  \
  --batch-size 32 --beam 1 --sampling --sampling-topk 40 --nbest 1 \
  --results-path ../out/bilstm_transformer-lm_bpe500_format_pre_srl_hier_pos_fix --quiet \
  --skip-invalid-size-inputs-valid-test --use-context True --min-len 150 --max-len-b 250 --exp-id 1 \
  --generated-event True --generated-event-path /home/rickwwang/project_research/fire_data/running/prompt2srl2_500_wo_smooth_d1_verb_loss4/21hyp.txt.1.topk20.t1.0.nr6
CUDA_VISIBLE_DEVICES=4 fairseq-generate --task hierstory_bpe data-bin/writingPrompts-srl2story_bpe500_fix \
  --arch bilstmhier_h_transformer_lm_gpt_format --path ../out/bilstm_transformer-lm_bpe500_format_pre_srl_hier_pos_fix/checkpoint_best.pt  \
  --batch-size 32 --beam 1 --sampling --sampling-topk 5 --nbest 1 \
  --results-path ../out/bilstm_transformer-lm_bpe500_format_pre_srl_hier_pos_fix --quiet \
  --skip-invalid-size-inputs-valid-test --use-context True --min-len 150 --max-len-b 250 --exp-id 1 \
  --generated-event True --generated-event-path /home/rickwwang/project_research/fire_data/running/prompt2srl2_500_wo_smooth_d1_verb_loss/29hyp.txt.1.topk20.t1.0.nr6

# gan model
CUDA_VISIBLE_DEVICES=5 fairseq-generate --user-dir coherence_story --task hierstory_bpe_gan data-bin/writingPrompts-srl2story_bpe500_fix \
  --arch fairseqgan_gpt_format --path ../out/bilstm_translm_coherence_gan/adv_gen_mg/checkpoint_1_1000.pt  \
  --batch-size 32 --beam 1 --sampling --sampling-topk 20 --nbest 1 \
  --results-path ../out/bilstm_translm_coherence_gan/adv_gen_mg --quiet \
  --skip-invalid-size-inputs-valid-test --use-context True --min-len 150 --max-len-b 250 --exp-id 1 --max-batch 3

# story language model
#CUDA_VISIBLE_DEVICES=0 fairseq-generate --task language_modeling_bpe data-bin/writingPrompts-event2story_bpe500 \
#  --arch transformer_lm_gpt_bpe_format --path ../out/transformer-lm_bpe500_warmlr_format/checkpoint_best.pt \
#  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --temperature 0.8 --nbest 1 \
#  --results-path ../out/transformer-lm_bpe500_warmlr_format --quiet \
#  --skip-invalid-size-inputs-valid-test --tokens-per-sample 1024 --sample-break-mode eos \
#  --use-context True --max-len-b 1024
#CUDA_VISIBLE_DEVICES=0 fairseq-generate --task language_modeling_bpe data-bin/writingPrompts-event2story_bpe500 \
#  --arch transformer_lm_gpt_bpe_format --path ../out/transformer-lm_bpe500_warmlr_format2/checkpoint_best.pt \
#  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --temperature 0.8 --nbest 1 \
#  --results-path ../out/transformer-lm_bpe500_warmlr_format2 --quiet \
#  --skip-invalid-size-inputs-valid-test --tokens-per-sample 1024 --sample-break-mode eos \
#  --use-context True --max-len-b 1024
#CUDA_VISIBLE_DEVICES=0 fairseq-generate --task language_modeling_bpe data-bin/writingPrompts-event2story_bpe500 \
#  --arch transformer_lm_gpt_bpe_format --path ../out/transformer-lm_bpe500_warmlr_format_pre2/checkpoint_best.pt \
#  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --temperature 0.8 --nbest 1 \
#  --results-path ../out/transformer-lm_bpe500_warmlr_format_pre2 --quiet \
#  --skip-invalid-size-inputs-valid-test --tokens-per-sample 1024 --sample-break-mode eos \
#  --use-context True --max-len-b 1024
CUDA_VISIBLE_DEVICES=0 fairseq-generate --task language_modeling_bpe data-bin/writingPrompts-event2story_bpe500 \
  --arch transformer_lm_gpt_bpe_format --path ../fire_data/running/transformer-lm_bpe500_warmlr_format2/checkpoint_best.pt \
  --batch-size 32 --beam 1 --sampling --sampling-topk 40 --nbest 1 \
  --results-path ../fire_data/running/transformer-lm_bpe500_warmlr_format2 --quiet \
  --skip-invalid-size-inputs-valid-test --tokens-per-sample 1024 --sample-break-mode eos \
  --use-context True --min-len 150 --max-len-b 250 --exp-id 1
CUDA_VISIBLE_DEVICES=0 fairseq-generate --task language_modeling_bpe data-bin/writingPrompts-event2story_bpe500 \
  --arch transformer_lm_gpt_bpe_format --path ../fire_data/running/transformer-lm_bpe500_warmlr_format_pre2/checkpoint_best.pt \
  --batch-size 32 --beam 1 --sampling --sampling-topk 40 --nbest 1 \
  --results-path ../fire_data/running/transformer-lm_bpe500_warmlr_format_pre2 --quiet \
  --skip-invalid-size-inputs-valid-test --tokens-per-sample 1024 --sample-break-mode eos \
  --use-context True --min-len 150 --max-len-b 250 --exp-id 1
CUDA_VISIBLE_DEVICES=4 fairseq-generate --task language_modeling_bpe data-bin/writingPrompts-event2story_bpe500 \
  --arch transformer_lm_bpe_memory_gpt_format --path ../fire_data/running/transformer-lm_bpe500_warmlr_format_pre2_memory_gpt/checkpoint_best.pt \
  --batch-size 32 --beam 1 --sampling --sampling-topk 20 --nbest 1 \
  --results-path ../fire_data/running/transformer-lm_bpe500_warmlr_format_pre2_memory_gpt --quiet \
  --skip-invalid-size-inputs-valid-test --tokens-per-sample 1024 --sample-break-mode eos \
  --use-context True --min-len 150 --max-len-b 250 --exp-id 1

# rl model
CUDA_VISIBLE_DEVICES=1 fairseq-generate --user-dir coherence_story --task language_modeling_bpe_rl data-bin/writingPrompts-event2story_bpe500 \
  --arch fairseqrl_gpt_format --path ../out/transformer-lm_bpe500_warmlr_format_pre2_coherence_dis_rl/checkpoint_28_513000.pt  \
  --batch-size 32 --beam 1 --sampling --sampling-topk 20 --nbest 1 \
  --results-path ../out/transformer-lm_bpe500_warmlr_format_pre2_coherence_dis_rl --quiet \
  --skip-invalid-size-inputs-valid-test --tokens-per-sample 1024 --sample-break-mode eos \
  --use-context True --min-len 150 --max-len-b 250 --exp-id 1 --max-batch 3
CUDA_VISIBLE_DEVICES=0 fairseq-generate --user-dir coherence_story --task language_modeling_bpe_rl data-bin/writingPrompts-event2story_bpe500 \
  --arch fairseqrl_gpt_format --path ../out/transformer-lm_bpe500_warmlr_format_pre2_coherence_dis_rl_05/checkpoint_28_513000.pt  \
  --batch-size 32 --beam 1 --sampling --sampling-topk 20 --nbest 1 \
  --results-path ../out/transformer-lm_bpe500_warmlr_format_pre2_coherence_dis_rl_05 --quiet \
  --skip-invalid-size-inputs-valid-test --tokens-per-sample 1024 --sample-break-mode eos \
  --use-context True --min-len 150 --max-len-b 250 --exp-id 1 --max-batch 3
CUDA_VISIBLE_DEVICES=2 fairseq-generate --user-dir coherence_story --task language_modeling_bpe_rl data-bin/writingPrompts-event2story_bpe500 \
  --arch fairseqrl_gpt_format --path ../out/transformer-lm_bpe500_warmlr_format_pre2_coherence_dis_rl_09/checkpoint_28_513000.pt  \
  --batch-size 32 --beam 1 --sampling --sampling-topk 20 --nbest 1 \
  --results-path ../out/transformer-lm_bpe500_warmlr_format_pre2_coherence_dis_rl_09 --quiet \
  --skip-invalid-size-inputs-valid-test --tokens-per-sample 1024 --sample-break-mode eos \
  --use-context True --min-len 150 --max-len-b 250 --exp-id 1 --max-batch 3
CUDA_VISIBLE_DEVICES=6 fairseq-generate --user-dir coherence_story --task language_modeling_bpe_rl data-bin/writingPrompts-event2story_bpe500 \
  --arch fairseqrl_gpt_format --path ../out/transformer-lm_bpe500_warmlr_format_pre2_coherence_dis_rl_en_05/checkpoint_29_506000.pt  \
  --batch-size 32 --beam 1 --sampling --sampling-topk 20 --nbest 1 --temperature 16 \
  --results-path ../out/transformer-lm_bpe500_warmlr_format_pre2_coherence_dis_rl_en_05 --quiet \
  --skip-invalid-size-inputs-valid-test --tokens-per-sample 1024 --sample-break-mode eos \
  --use-context True --min-len 150 --max-len-b 250 --exp-id 1 --max-batch 3
CUDA_VISIBLE_DEVICES=6 fairseq-generate --user-dir coherence_story --task language_modeling_bpe_rl data-bin/writingPrompts-event2story_bpe500 \
  --arch fairseqrl_gpt_format --path ../fire_data/running/transformer-lm_bpe500_warmlr_format_pre2_coherence_dis_rl_en_09/checkpoint_28_498000.pt  \
  --batch-size 32 --beam 1 --sampling --sampling-topk 20 --nbest 1 \
  --results-path ../fire_data/running/transformer-lm_bpe500_warmlr_format_pre2_coherence_dis_rl_en_09 --quiet \
  --skip-invalid-size-inputs-valid-test --tokens-per-sample 1024 --sample-break-mode eos \
  --use-context True --min-len 150 --max-len-b 250 --exp-id 1 --max-batch 3

# prompt to event
#CUDA_VISIBLE_DEVICES=3 fairseq-generate --task translation data-bin/writingPrompts-prompt2event_500 \
#  --arch transformer_prompt_to_event --path ../fire_data/500version/prompt2event_500/checkpoint_best.pt \
#  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --temperature 0.8 --nbest 1 \
#  --results-path ../fire_data/500version/prompt2event_500 --quiet \
#  --skip-invalid-size-inputs-valid-test  --max-len-b 256
#CUDA_VISIBLE_DEVICES=2 fairseq-generate --task translation data-bin/writingPrompts-prompt2srl2_500 \
#  --arch transformer_prompt_to_event --path ../out/prompt2srl2_500/checkpoint_best.pt \
#  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --temperature 0.8 --nbest 1 \
#  --results-path ../out/prompt2srl2_500 --quiet \
#  --skip-invalid-size-inputs-valid-test  --max-len-b 256
#CUDA_VISIBLE_DEVICES=3 fairseq-generate --task translation data-bin/writingPrompts-prompt2srl2_500 \
#  --arch transformer_prompt_to_event --path ../out/prompt2srl2_500/checkpoint_best.pt \
#  --batch-size 32 --beam 5 \
#  --results-path ../out/prompt2srl2_500 --quiet \
#  --skip-invalid-size-inputs-valid-test  --max-len-b 256
#CUDA_VISIBLE_DEVICES=2 fairseq-generate --task translation data-bin/writingPrompts-prompt2srl2_500 \
#  --arch transformer_prompt_to_event --path ../out/prompt2srl2_500/checkpoint_best.pt \
#  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --temperature 0.8 --nbest 1 \
#  --results-path ../out/prompt2srl2_500 --quiet \
#  --skip-invalid-size-inputs-valid-test  --max-len-b 256 --no-repeat-ngram-size 6
#CUDA_VISIBLE_DEVICES=2 fairseq-generate --task translation data-bin/writingPrompts-prompt2srl2_500 \
#  --arch transformer_prompt_to_event --path ../out/prompt2srl2_500_fine/checkpoint_best.pt \
#  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --temperature 0.8 --nbest 1 \
#  --results-path ../out/prompt2srl2_500_fine --quiet \
#  --skip-invalid-size-inputs-valid-test  --max-len-b 256
CUDA_VISIBLE_DEVICES=2 fairseq-generate --task translation data-bin/writingPrompts-prompt2srl2_500 \
  --arch transformer_prompt_to_event --path ../out/prompt2srl2_500/checkpoint_best.pt \
  --batch-size 32 --beam 1 --sampling --sampling-topk 40 --nbest 1 \
  --results-path ../out/prompt2srl2_500 --quiet \
  --skip-invalid-size-inputs-valid-test  --min-len 150 --max-len-b 250 --unkpen 1 --exp-id 1
CUDA_VISIBLE_DEVICES=5 fairseq-generate --task translation data-bin/writingPrompts-prompt2srl2_500 \
  --arch transformer_prompt_to_event --path ../fire_data/running/prompt2srl2_500_fine_fix/checkpoint_best.pt \
  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --nbest 1 \
  --results-path ../fire_data/running/prompt2srl2_500_fine_fix --quiet \
  --skip-invalid-size-inputs-valid-test  --min-len 150 --max-len-b 250 --unkpen 1 --exp-id 1
CUDA_VISIBLE_DEVICES=5 fairseq-generate --task translation data-bin/writingPrompts-prompt2srl2_500 \
  --arch transformer_prompt_to_event --path ../fire_data/running/prompt2srl2_500_fine_fix2/checkpoint_best.pt \
  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --nbest 1 \
  --results-path ../fire_data/running/prompt2srl2_500_fine_fix2 --quiet \
  --skip-invalid-size-inputs-valid-test  --min-len 150 --max-len-b 250 --unkpen 1 --exp-id 1
CUDA_VISIBLE_DEVICES=7 fairseq-generate --task translation data-bin/writingPrompts-prompt2srl2_500 \
  --arch transformer_prompt_to_event --path ../out/prompt2srl2_500_wo_smooth_d1/checkpoint_best.pt \
  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --nbest 1 \
  --results-path ../out/prompt2srl2_500_wo_smooth_d1 --quiet \
  --skip-invalid-size-inputs-valid-test  --min-len 150 --max-len-b 250 --unkpen 1 --exp-id 1
CUDA_VISIBLE_DEVICES=7 fairseq-generate --task translation data-bin/writingPrompts-prompt2srl2_500 \
  --arch fconv_self_att_wp --path ../fire_data/running/prompt2srl2_500_wo_smooth_d1_conv/checkpoint_best.pt \
  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --nbest 1 \
  --results-path ../fire_data/running/prompt2srl2_500_wo_smooth_d1_conv --quiet \
  --skip-invalid-size-inputs-valid-test  --min-len 150 --max-len-b 250 --unkpen 1 --exp-id 1
CUDA_VISIBLE_DEVICES=3 fairseq-generate --task translation data-bin/writingPrompts-prompt2srl2_500 \
  --arch transformer_prompt_to_event --path ../out/prompt2srl2_500/checkpoint_best.pt \
  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --nbest 1 \
  --results-path ../out/prompt2srl2_500 --quiet \
  --skip-invalid-size-inputs-valid-test  --min-len 150 --max-len-b 250 --unkpen 1 --exp-id 1 --no-repeat-ngram-size 6
CUDA_VISIBLE_DEVICES=0 fairseq-generate --task translation data-bin/writingPrompts-prompt2srl2_500 \
  --arch transformer_verb_prompt_to_event --use-verb-attention --path ../out/prompt2srl2_500_wo_smooth_d1_verb/checkpoint_best.pt \
  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --nbest 1 \
  --results-path ../out/prompt2srl2_500_wo_smooth_d1_verb --quiet \
  --skip-invalid-size-inputs-valid-test  --min-len 150 --max-len-b 250 --unkpen 1 --exp-id 1
CUDA_VISIBLE_DEVICES=3 fairseq-generate --task translation data-bin/writingPrompts-prompt2srl2_500_nosympol \
  --arch transformer_prompt_to_event_big  --path ../fire_data/running/prompt2srl2_500_wo_smooth_d1_gpt2_nosympol/checkpoint_best.pt \
  --batch-size 32 --beam 1 --sampling --sampling-topk 20 --nbest 1 \
  --results-path ../fire_data/running/prompt2srl2_500_wo_smooth_d1_gpt2_nosympol --quiet \
  --skip-invalid-size-inputs-valid-test  --min-len 150 --max-len-b 250 --unkpen 1 --exp-id 1

# prompt to verb
#CUDA_VISIBLE_DEVICES=2 fairseq-generate --task translation data-bin/writingPrompts-prompt2verb_500 \
#  --arch transformer_prompt_to_event --path ../out/prompt2verb_500/checkpoint_best.pt \
#  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --temperature 0.8 --nbest 1 \
#  --results-path ../out/prompt2verb_500 --quiet \
#  --skip-invalid-size-inputs-valid-test  --max-len-b 1024
#CUDA_VISIBLE_DEVICES=2 fairseq-generate --task translation data-bin/writingPrompts-prompt2verb_500 \
#  --arch transformer_prompt_to_event --path ../out/prompt2verb_500/checkpoint_best.pt \
#  --batch-size 32 --beam 5 \
#  --results-path ../out/prompt2verb_500 --quiet \
#  --skip-invalid-size-inputs-valid-test  --max-len-b 1024
#CUDA_VISIBLE_DEVICES=2 fairseq-generate --task translation data-bin/writingPrompts-prompt2mainverb_500 \
#  --arch transformer_prompt_to_event --path ../out/prompt2mainverb_500/checkpoint_best.pt \
#  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --temperature 0.8 --nbest 1 \
#  --results-path ../out/prompt2mainverb_500 --quiet \
#  --skip-invalid-size-inputs-valid-test  --max-len-b 1024
#CUDA_VISIBLE_DEVICES=3 fairseq-generate --task translation data-bin/writingPrompts-prompt2mainverb_500 \
#  --arch transformer_prompt_to_event --path ../out/prompt2mainverb_500/checkpoint_best.pt \
#  --batch-size 32 --beam 5 \
#  --results-path ../out/prompt2mainverb_500 --quiet \
#  --skip-invalid-size-inputs-valid-test  --max-len-b 1024
#CUDA_VISIBLE_DEVICES=2 fairseq-generate --task translation data-bin/writingPrompts-prompt2srl_500 \
#  --arch transformer_prompt_to_event --path ../out/prompt2srl_500/checkpoint_best.pt \
#  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --temperature 0.8 --nbest 1 \
#  --results-path ../out/prompt2srl_500 --quiet \
#  --skip-invalid-size-inputs-valid-test  --max-len-b 256
#CUDA_VISIBLE_DEVICES=3 fairseq-generate --task translation data-bin/writingPrompts-prompt2srl_500 \
#  --arch transformer_prompt_to_event --path ../out/prompt2srl_500/checkpoint_best.pt \
#  --batch-size 32 --beam 5 \
#  --results-path ../out/prompt2srl_500 --quiet \
#  --skip-invalid-size-inputs-valid-test  --max-len-b 256
#CUDA_VISIBLE_DEVICES=2 fairseq-generate --task translation data-bin/writingPrompts-prompt2srl_500 \
#  --arch transformer_prompt_to_event_big --path ../out/prompt2srl_500_big/checkpoint_best.pt \
#  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --temperature 0.8 --nbest 1 \
#  --results-path ../out/prompt2srl_500_big --quiet \
#  --skip-invalid-size-inputs-valid-test  --max-len-b 256
#CUDA_VISIBLE_DEVICES=3 fairseq-generate --task translation data-bin/writingPrompts-prompt2srl_500 \
#  --arch transformer_prompt_to_event_big --path ../out/prompt2srl_500_big/checkpoint_best.pt \
#  --batch-size 32 --beam 5 \
#  --results-path ../out/prompt2srl_500_big --quiet \
#  --skip-invalid-size-inputs-valid-test  --max-len-b 256

# prompt to story model
#CUDA_VISIBLE_DEVICES=3 fairseq-generate --task translation_bpe data-bin/writingPrompts-prompt2story_bpe500 \
#  --arch transformer_prompt_to_event_bpe --path ../fire_data/500version/prompt2story_bpe500/checkpoint_best.pt \
#  --batch-size 32 --beam 5 \
#  --results-path ../fire_data/500version/prompt2story_bpe500 --quiet \
#  --skip-invalid-size-inputs-valid-test  --max-len-b 1024
#CUDA_VISIBLE_DEVICES=3 fairseq-generate --task translation_bpe data-bin/writingPrompts-prompt2story_bpe500 \
#  --arch transformer_prompt_to_event_bpe --path ../fire_data/500version/prompt2story_bpe500/checkpoint_best.pt \
#  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --temperature 0.8 --nbest 1 \
#  --results-path ../fire_data/500version/prompt2story_bpe500 --quiet \
#  --skip-invalid-size-inputs-valid-test  --max-len-b 1024
#CUDA_VISIBLE_DEVICES=2 fairseq-generate --task translation data-bin/writingPrompts-prompt2story_500 \
#  --arch transformer_prompt_to_event --path ../out/prompt2story_500/checkpoint_best.pt \
#  --batch-size 32 --beam 1 --sampling --sampling-topk 10 --temperature 0.8 --nbest 1 \
#  --results-path ../out/prompt2story_500 --quiet \
#  --skip-invalid-size-inputs-valid-test  --max-len-b 1024
#CUDA_VISIBLE_DEVICES=3 fairseq-generate --task translation data-bin/writingPrompts-prompt2story_500 \
#  --arch transformer_prompt_to_event --path ../out/prompt2story_500/checkpoint_best.pt \
#  --batch-size 32 --beam 5 \
#  --results-path ../out/prompt2story_500 --quiet \
#  --skip-invalid-size-inputs-valid-test  --max-len-b 1024