Batch upload part 20
Browse files- nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/RTE.tsv +3001 -0
- nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/all_results.json +9 -0
- nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/eval_results.json +9 -0
- nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/ft/adapter_config.json +26 -0
- nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/ft/added_tokens.json +3 -0
- nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/ft/special_tokens_map.json +15 -0
- nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/ft/spm.model +3 -0
- nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/ft/tokenizer.json +0 -0
- nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/ft/tokenizer_config.json +60 -0
- nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/ft2/adapter_config.json +26 -0
- nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/ft2/adapter_model.bin +3 -0
- nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/trainer_state.json +411 -0
- nlu/glue_exp/rte/2dr0.15,mlr5e-04,clr1e-03,ep=30.0t=18d21h08m36/RTE.tsv +3001 -0
- nlu/glue_exp/rte/2dr0.15,mlr5e-04,clr1e-03,ep=30.0t=18d21h08m36/all_results.json +9 -0
- nlu/glue_exp/rte/2dr0.15,mlr5e-04,clr1e-03,ep=30.0t=18d21h08m36/eval_results.json +9 -0
- nlu/glue_exp/rte/2dr0.15,mlr5e-04,clr1e-03,ep=30.0t=18d21h08m36/ft/added_tokens.json +3 -0
- nlu/glue_exp/rte/2dr0.15,mlr5e-04,clr1e-03,ep=30.0t=18d21h08m36/ft/special_tokens_map.json +15 -0
- nlu/glue_exp/rte/2dr0.15,mlr5e-04,clr1e-03,ep=30.0t=18d21h08m36/ft/spm.model +3 -0
- nlu/glue_exp/rte/2dr0.15,mlr5e-04,clr1e-03,ep=30.0t=18d21h08m36/ft/tokenizer.json +0 -0
- nlu/glue_exp/rte/2dr0.15,mlr5e-04,clr1e-03,ep=30.0t=18d21h08m36/ft/tokenizer_config.json +60 -0
- nlu/glue_exp/rte/2dr0.15,mlr5e-04,clr1e-03,ep=30.0t=18d21h08m36/trainer_state.json +411 -0
- nlu/test.sh +81 -0
- nlu/training_metrics_bs8.json +26 -0
- omini/train_flux/train_spatial_alignment.py +211 -0
- omini/train_flux/train_spatial_alignment_rotation.py +211 -0
- omini/train_flux/train_subject.py +205 -0
- omini/train_flux/train_subject_rotation.py +205 -0
- omini/train_flux/train_token_integration.py +136 -0
- omini/train_flux/trainer.py +384 -0
- omini/train_flux/trainer_rotation.py +449 -0
- train/README.md +253 -0
- train/config/compact_token_representation.yaml +65 -0
- train/config/feature_reuse.yaml +64 -0
- train/config/multi_condition.yaml +68 -0
- train/config/spatial_alignment.yaml +64 -0
- train/config/spatial_alignment_rotation.yaml +63 -0
- train/config/subject.yaml +54 -0
- train/config/subject_rotation.yaml +53 -0
- train/config/token_integration.yaml +63 -0
- train/requirements.txt +15 -0
- train/script/data_download/data_download1.sh +1 -0
- train/script/data_download/data_download2.sh +3 -0
- train/script/train_compact_token_representation.sh +13 -0
- train/script/train_feature_reuse.sh +13 -0
- train/script/train_multi_condition.sh +13 -0
- train/script/train_spatial_alignment.sh +13 -0
- train/script/train_spatial_alignment_rotation.sh +13 -0
- train/script/train_subject.sh +13 -0
- train/script/train_subject_rotation.sh +13 -0
- train/script/train_token_intergration.sh +13 -0
nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/RTE.tsv
ADDED
|
@@ -0,0 +1,3001 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
index prediction
|
| 2 |
+
0 not_entailment
|
| 3 |
+
1 not_entailment
|
| 4 |
+
2 entailment
|
| 5 |
+
3 not_entailment
|
| 6 |
+
4 entailment
|
| 7 |
+
5 entailment
|
| 8 |
+
6 entailment
|
| 9 |
+
7 not_entailment
|
| 10 |
+
8 not_entailment
|
| 11 |
+
9 entailment
|
| 12 |
+
10 not_entailment
|
| 13 |
+
11 entailment
|
| 14 |
+
12 not_entailment
|
| 15 |
+
13 entailment
|
| 16 |
+
14 not_entailment
|
| 17 |
+
15 not_entailment
|
| 18 |
+
16 not_entailment
|
| 19 |
+
17 entailment
|
| 20 |
+
18 entailment
|
| 21 |
+
19 not_entailment
|
| 22 |
+
20 entailment
|
| 23 |
+
21 not_entailment
|
| 24 |
+
22 not_entailment
|
| 25 |
+
23 not_entailment
|
| 26 |
+
24 not_entailment
|
| 27 |
+
25 not_entailment
|
| 28 |
+
26 entailment
|
| 29 |
+
27 entailment
|
| 30 |
+
28 entailment
|
| 31 |
+
29 entailment
|
| 32 |
+
30 not_entailment
|
| 33 |
+
31 entailment
|
| 34 |
+
32 not_entailment
|
| 35 |
+
33 not_entailment
|
| 36 |
+
34 not_entailment
|
| 37 |
+
35 entailment
|
| 38 |
+
36 not_entailment
|
| 39 |
+
37 entailment
|
| 40 |
+
38 not_entailment
|
| 41 |
+
39 entailment
|
| 42 |
+
40 not_entailment
|
| 43 |
+
41 not_entailment
|
| 44 |
+
42 entailment
|
| 45 |
+
43 not_entailment
|
| 46 |
+
44 not_entailment
|
| 47 |
+
45 entailment
|
| 48 |
+
46 entailment
|
| 49 |
+
47 not_entailment
|
| 50 |
+
48 entailment
|
| 51 |
+
49 not_entailment
|
| 52 |
+
50 not_entailment
|
| 53 |
+
51 entailment
|
| 54 |
+
52 not_entailment
|
| 55 |
+
53 entailment
|
| 56 |
+
54 entailment
|
| 57 |
+
55 not_entailment
|
| 58 |
+
56 entailment
|
| 59 |
+
57 not_entailment
|
| 60 |
+
58 not_entailment
|
| 61 |
+
59 entailment
|
| 62 |
+
60 entailment
|
| 63 |
+
61 not_entailment
|
| 64 |
+
62 not_entailment
|
| 65 |
+
63 entailment
|
| 66 |
+
64 entailment
|
| 67 |
+
65 not_entailment
|
| 68 |
+
66 entailment
|
| 69 |
+
67 entailment
|
| 70 |
+
68 not_entailment
|
| 71 |
+
69 entailment
|
| 72 |
+
70 entailment
|
| 73 |
+
71 entailment
|
| 74 |
+
72 entailment
|
| 75 |
+
73 entailment
|
| 76 |
+
74 entailment
|
| 77 |
+
75 entailment
|
| 78 |
+
76 not_entailment
|
| 79 |
+
77 entailment
|
| 80 |
+
78 entailment
|
| 81 |
+
79 not_entailment
|
| 82 |
+
80 not_entailment
|
| 83 |
+
81 entailment
|
| 84 |
+
82 not_entailment
|
| 85 |
+
83 not_entailment
|
| 86 |
+
84 not_entailment
|
| 87 |
+
85 not_entailment
|
| 88 |
+
86 entailment
|
| 89 |
+
87 not_entailment
|
| 90 |
+
88 entailment
|
| 91 |
+
89 entailment
|
| 92 |
+
90 not_entailment
|
| 93 |
+
91 entailment
|
| 94 |
+
92 not_entailment
|
| 95 |
+
93 entailment
|
| 96 |
+
94 entailment
|
| 97 |
+
95 not_entailment
|
| 98 |
+
96 entailment
|
| 99 |
+
97 entailment
|
| 100 |
+
98 not_entailment
|
| 101 |
+
99 entailment
|
| 102 |
+
100 entailment
|
| 103 |
+
101 entailment
|
| 104 |
+
102 not_entailment
|
| 105 |
+
103 entailment
|
| 106 |
+
104 entailment
|
| 107 |
+
105 not_entailment
|
| 108 |
+
106 entailment
|
| 109 |
+
107 not_entailment
|
| 110 |
+
108 entailment
|
| 111 |
+
109 not_entailment
|
| 112 |
+
110 entailment
|
| 113 |
+
111 not_entailment
|
| 114 |
+
112 not_entailment
|
| 115 |
+
113 not_entailment
|
| 116 |
+
114 entailment
|
| 117 |
+
115 not_entailment
|
| 118 |
+
116 entailment
|
| 119 |
+
117 not_entailment
|
| 120 |
+
118 not_entailment
|
| 121 |
+
119 not_entailment
|
| 122 |
+
120 not_entailment
|
| 123 |
+
121 not_entailment
|
| 124 |
+
122 not_entailment
|
| 125 |
+
123 not_entailment
|
| 126 |
+
124 entailment
|
| 127 |
+
125 entailment
|
| 128 |
+
126 entailment
|
| 129 |
+
127 entailment
|
| 130 |
+
128 entailment
|
| 131 |
+
129 entailment
|
| 132 |
+
130 entailment
|
| 133 |
+
131 entailment
|
| 134 |
+
132 not_entailment
|
| 135 |
+
133 not_entailment
|
| 136 |
+
134 entailment
|
| 137 |
+
135 not_entailment
|
| 138 |
+
136 not_entailment
|
| 139 |
+
137 not_entailment
|
| 140 |
+
138 entailment
|
| 141 |
+
139 not_entailment
|
| 142 |
+
140 entailment
|
| 143 |
+
141 not_entailment
|
| 144 |
+
142 not_entailment
|
| 145 |
+
143 entailment
|
| 146 |
+
144 entailment
|
| 147 |
+
145 not_entailment
|
| 148 |
+
146 not_entailment
|
| 149 |
+
147 entailment
|
| 150 |
+
148 not_entailment
|
| 151 |
+
149 entailment
|
| 152 |
+
150 entailment
|
| 153 |
+
151 entailment
|
| 154 |
+
152 not_entailment
|
| 155 |
+
153 entailment
|
| 156 |
+
154 entailment
|
| 157 |
+
155 not_entailment
|
| 158 |
+
156 not_entailment
|
| 159 |
+
157 entailment
|
| 160 |
+
158 not_entailment
|
| 161 |
+
159 not_entailment
|
| 162 |
+
160 entailment
|
| 163 |
+
161 not_entailment
|
| 164 |
+
162 entailment
|
| 165 |
+
163 not_entailment
|
| 166 |
+
164 not_entailment
|
| 167 |
+
165 entailment
|
| 168 |
+
166 entailment
|
| 169 |
+
167 entailment
|
| 170 |
+
168 entailment
|
| 171 |
+
169 not_entailment
|
| 172 |
+
170 entailment
|
| 173 |
+
171 entailment
|
| 174 |
+
172 not_entailment
|
| 175 |
+
173 entailment
|
| 176 |
+
174 entailment
|
| 177 |
+
175 not_entailment
|
| 178 |
+
176 not_entailment
|
| 179 |
+
177 entailment
|
| 180 |
+
178 entailment
|
| 181 |
+
179 not_entailment
|
| 182 |
+
180 not_entailment
|
| 183 |
+
181 not_entailment
|
| 184 |
+
182 entailment
|
| 185 |
+
183 entailment
|
| 186 |
+
184 entailment
|
| 187 |
+
185 not_entailment
|
| 188 |
+
186 entailment
|
| 189 |
+
187 entailment
|
| 190 |
+
188 not_entailment
|
| 191 |
+
189 not_entailment
|
| 192 |
+
190 entailment
|
| 193 |
+
191 entailment
|
| 194 |
+
192 entailment
|
| 195 |
+
193 not_entailment
|
| 196 |
+
194 entailment
|
| 197 |
+
195 entailment
|
| 198 |
+
196 entailment
|
| 199 |
+
197 not_entailment
|
| 200 |
+
198 not_entailment
|
| 201 |
+
199 not_entailment
|
| 202 |
+
200 entailment
|
| 203 |
+
201 entailment
|
| 204 |
+
202 entailment
|
| 205 |
+
203 not_entailment
|
| 206 |
+
204 not_entailment
|
| 207 |
+
205 entailment
|
| 208 |
+
206 not_entailment
|
| 209 |
+
207 not_entailment
|
| 210 |
+
208 not_entailment
|
| 211 |
+
209 entailment
|
| 212 |
+
210 not_entailment
|
| 213 |
+
211 not_entailment
|
| 214 |
+
212 entailment
|
| 215 |
+
213 entailment
|
| 216 |
+
214 entailment
|
| 217 |
+
215 entailment
|
| 218 |
+
216 not_entailment
|
| 219 |
+
217 not_entailment
|
| 220 |
+
218 not_entailment
|
| 221 |
+
219 not_entailment
|
| 222 |
+
220 entailment
|
| 223 |
+
221 entailment
|
| 224 |
+
222 entailment
|
| 225 |
+
223 not_entailment
|
| 226 |
+
224 entailment
|
| 227 |
+
225 not_entailment
|
| 228 |
+
226 entailment
|
| 229 |
+
227 entailment
|
| 230 |
+
228 not_entailment
|
| 231 |
+
229 not_entailment
|
| 232 |
+
230 entailment
|
| 233 |
+
231 entailment
|
| 234 |
+
232 entailment
|
| 235 |
+
233 not_entailment
|
| 236 |
+
234 entailment
|
| 237 |
+
235 not_entailment
|
| 238 |
+
236 entailment
|
| 239 |
+
237 entailment
|
| 240 |
+
238 not_entailment
|
| 241 |
+
239 entailment
|
| 242 |
+
240 entailment
|
| 243 |
+
241 entailment
|
| 244 |
+
242 entailment
|
| 245 |
+
243 entailment
|
| 246 |
+
244 not_entailment
|
| 247 |
+
245 not_entailment
|
| 248 |
+
246 entailment
|
| 249 |
+
247 not_entailment
|
| 250 |
+
248 entailment
|
| 251 |
+
249 entailment
|
| 252 |
+
250 not_entailment
|
| 253 |
+
251 not_entailment
|
| 254 |
+
252 entailment
|
| 255 |
+
253 not_entailment
|
| 256 |
+
254 entailment
|
| 257 |
+
255 entailment
|
| 258 |
+
256 not_entailment
|
| 259 |
+
257 not_entailment
|
| 260 |
+
258 entailment
|
| 261 |
+
259 entailment
|
| 262 |
+
260 entailment
|
| 263 |
+
261 not_entailment
|
| 264 |
+
262 entailment
|
| 265 |
+
263 entailment
|
| 266 |
+
264 entailment
|
| 267 |
+
265 entailment
|
| 268 |
+
266 not_entailment
|
| 269 |
+
267 not_entailment
|
| 270 |
+
268 entailment
|
| 271 |
+
269 not_entailment
|
| 272 |
+
270 not_entailment
|
| 273 |
+
271 entailment
|
| 274 |
+
272 not_entailment
|
| 275 |
+
273 entailment
|
| 276 |
+
274 entailment
|
| 277 |
+
275 not_entailment
|
| 278 |
+
276 not_entailment
|
| 279 |
+
277 entailment
|
| 280 |
+
278 not_entailment
|
| 281 |
+
279 not_entailment
|
| 282 |
+
280 entailment
|
| 283 |
+
281 entailment
|
| 284 |
+
282 entailment
|
| 285 |
+
283 not_entailment
|
| 286 |
+
284 not_entailment
|
| 287 |
+
285 not_entailment
|
| 288 |
+
286 entailment
|
| 289 |
+
287 not_entailment
|
| 290 |
+
288 not_entailment
|
| 291 |
+
289 entailment
|
| 292 |
+
290 not_entailment
|
| 293 |
+
291 not_entailment
|
| 294 |
+
292 not_entailment
|
| 295 |
+
293 entailment
|
| 296 |
+
294 entailment
|
| 297 |
+
295 not_entailment
|
| 298 |
+
296 not_entailment
|
| 299 |
+
297 not_entailment
|
| 300 |
+
298 entailment
|
| 301 |
+
299 entailment
|
| 302 |
+
300 not_entailment
|
| 303 |
+
301 entailment
|
| 304 |
+
302 entailment
|
| 305 |
+
303 entailment
|
| 306 |
+
304 entailment
|
| 307 |
+
305 not_entailment
|
| 308 |
+
306 entailment
|
| 309 |
+
307 not_entailment
|
| 310 |
+
308 not_entailment
|
| 311 |
+
309 entailment
|
| 312 |
+
310 not_entailment
|
| 313 |
+
311 entailment
|
| 314 |
+
312 not_entailment
|
| 315 |
+
313 not_entailment
|
| 316 |
+
314 entailment
|
| 317 |
+
315 not_entailment
|
| 318 |
+
316 entailment
|
| 319 |
+
317 not_entailment
|
| 320 |
+
318 entailment
|
| 321 |
+
319 entailment
|
| 322 |
+
320 not_entailment
|
| 323 |
+
321 not_entailment
|
| 324 |
+
322 entailment
|
| 325 |
+
323 entailment
|
| 326 |
+
324 entailment
|
| 327 |
+
325 not_entailment
|
| 328 |
+
326 not_entailment
|
| 329 |
+
327 entailment
|
| 330 |
+
328 not_entailment
|
| 331 |
+
329 entailment
|
| 332 |
+
330 entailment
|
| 333 |
+
331 entailment
|
| 334 |
+
332 not_entailment
|
| 335 |
+
333 not_entailment
|
| 336 |
+
334 not_entailment
|
| 337 |
+
335 entailment
|
| 338 |
+
336 not_entailment
|
| 339 |
+
337 entailment
|
| 340 |
+
338 entailment
|
| 341 |
+
339 entailment
|
| 342 |
+
340 entailment
|
| 343 |
+
341 entailment
|
| 344 |
+
342 entailment
|
| 345 |
+
343 not_entailment
|
| 346 |
+
344 not_entailment
|
| 347 |
+
345 entailment
|
| 348 |
+
346 entailment
|
| 349 |
+
347 entailment
|
| 350 |
+
348 not_entailment
|
| 351 |
+
349 not_entailment
|
| 352 |
+
350 entailment
|
| 353 |
+
351 entailment
|
| 354 |
+
352 not_entailment
|
| 355 |
+
353 entailment
|
| 356 |
+
354 entailment
|
| 357 |
+
355 not_entailment
|
| 358 |
+
356 entailment
|
| 359 |
+
357 not_entailment
|
| 360 |
+
358 entailment
|
| 361 |
+
359 entailment
|
| 362 |
+
360 entailment
|
| 363 |
+
361 entailment
|
| 364 |
+
362 entailment
|
| 365 |
+
363 not_entailment
|
| 366 |
+
364 entailment
|
| 367 |
+
365 entailment
|
| 368 |
+
366 entailment
|
| 369 |
+
367 entailment
|
| 370 |
+
368 entailment
|
| 371 |
+
369 not_entailment
|
| 372 |
+
370 not_entailment
|
| 373 |
+
371 entailment
|
| 374 |
+
372 not_entailment
|
| 375 |
+
373 entailment
|
| 376 |
+
374 not_entailment
|
| 377 |
+
375 entailment
|
| 378 |
+
376 not_entailment
|
| 379 |
+
377 entailment
|
| 380 |
+
378 not_entailment
|
| 381 |
+
379 not_entailment
|
| 382 |
+
380 not_entailment
|
| 383 |
+
381 not_entailment
|
| 384 |
+
382 entailment
|
| 385 |
+
383 entailment
|
| 386 |
+
384 not_entailment
|
| 387 |
+
385 entailment
|
| 388 |
+
386 not_entailment
|
| 389 |
+
387 entailment
|
| 390 |
+
388 entailment
|
| 391 |
+
389 not_entailment
|
| 392 |
+
390 not_entailment
|
| 393 |
+
391 not_entailment
|
| 394 |
+
392 entailment
|
| 395 |
+
393 entailment
|
| 396 |
+
394 entailment
|
| 397 |
+
395 entailment
|
| 398 |
+
396 entailment
|
| 399 |
+
397 not_entailment
|
| 400 |
+
398 entailment
|
| 401 |
+
399 not_entailment
|
| 402 |
+
400 entailment
|
| 403 |
+
401 entailment
|
| 404 |
+
402 entailment
|
| 405 |
+
403 entailment
|
| 406 |
+
404 not_entailment
|
| 407 |
+
405 entailment
|
| 408 |
+
406 not_entailment
|
| 409 |
+
407 entailment
|
| 410 |
+
408 not_entailment
|
| 411 |
+
409 entailment
|
| 412 |
+
410 not_entailment
|
| 413 |
+
411 entailment
|
| 414 |
+
412 entailment
|
| 415 |
+
413 entailment
|
| 416 |
+
414 not_entailment
|
| 417 |
+
415 not_entailment
|
| 418 |
+
416 not_entailment
|
| 419 |
+
417 not_entailment
|
| 420 |
+
418 not_entailment
|
| 421 |
+
419 not_entailment
|
| 422 |
+
420 entailment
|
| 423 |
+
421 entailment
|
| 424 |
+
422 not_entailment
|
| 425 |
+
423 not_entailment
|
| 426 |
+
424 entailment
|
| 427 |
+
425 entailment
|
| 428 |
+
426 entailment
|
| 429 |
+
427 not_entailment
|
| 430 |
+
428 entailment
|
| 431 |
+
429 entailment
|
| 432 |
+
430 not_entailment
|
| 433 |
+
431 not_entailment
|
| 434 |
+
432 entailment
|
| 435 |
+
433 entailment
|
| 436 |
+
434 not_entailment
|
| 437 |
+
435 not_entailment
|
| 438 |
+
436 not_entailment
|
| 439 |
+
437 entailment
|
| 440 |
+
438 entailment
|
| 441 |
+
439 not_entailment
|
| 442 |
+
440 not_entailment
|
| 443 |
+
441 entailment
|
| 444 |
+
442 entailment
|
| 445 |
+
443 entailment
|
| 446 |
+
444 not_entailment
|
| 447 |
+
445 entailment
|
| 448 |
+
446 entailment
|
| 449 |
+
447 not_entailment
|
| 450 |
+
448 entailment
|
| 451 |
+
449 entailment
|
| 452 |
+
450 entailment
|
| 453 |
+
451 entailment
|
| 454 |
+
452 not_entailment
|
| 455 |
+
453 entailment
|
| 456 |
+
454 not_entailment
|
| 457 |
+
455 not_entailment
|
| 458 |
+
456 entailment
|
| 459 |
+
457 entailment
|
| 460 |
+
458 entailment
|
| 461 |
+
459 entailment
|
| 462 |
+
460 entailment
|
| 463 |
+
461 entailment
|
| 464 |
+
462 entailment
|
| 465 |
+
463 entailment
|
| 466 |
+
464 entailment
|
| 467 |
+
465 entailment
|
| 468 |
+
466 entailment
|
| 469 |
+
467 not_entailment
|
| 470 |
+
468 entailment
|
| 471 |
+
469 entailment
|
| 472 |
+
470 not_entailment
|
| 473 |
+
471 entailment
|
| 474 |
+
472 not_entailment
|
| 475 |
+
473 entailment
|
| 476 |
+
474 entailment
|
| 477 |
+
475 not_entailment
|
| 478 |
+
476 not_entailment
|
| 479 |
+
477 not_entailment
|
| 480 |
+
478 not_entailment
|
| 481 |
+
479 entailment
|
| 482 |
+
480 not_entailment
|
| 483 |
+
481 entailment
|
| 484 |
+
482 entailment
|
| 485 |
+
483 entailment
|
| 486 |
+
484 entailment
|
| 487 |
+
485 entailment
|
| 488 |
+
486 not_entailment
|
| 489 |
+
487 entailment
|
| 490 |
+
488 not_entailment
|
| 491 |
+
489 entailment
|
| 492 |
+
490 entailment
|
| 493 |
+
491 entailment
|
| 494 |
+
492 not_entailment
|
| 495 |
+
493 entailment
|
| 496 |
+
494 entailment
|
| 497 |
+
495 not_entailment
|
| 498 |
+
496 not_entailment
|
| 499 |
+
497 not_entailment
|
| 500 |
+
498 entailment
|
| 501 |
+
499 entailment
|
| 502 |
+
500 not_entailment
|
| 503 |
+
501 entailment
|
| 504 |
+
502 entailment
|
| 505 |
+
503 entailment
|
| 506 |
+
504 not_entailment
|
| 507 |
+
505 entailment
|
| 508 |
+
506 entailment
|
| 509 |
+
507 not_entailment
|
| 510 |
+
508 entailment
|
| 511 |
+
509 not_entailment
|
| 512 |
+
510 not_entailment
|
| 513 |
+
511 entailment
|
| 514 |
+
512 not_entailment
|
| 515 |
+
513 entailment
|
| 516 |
+
514 entailment
|
| 517 |
+
515 not_entailment
|
| 518 |
+
516 not_entailment
|
| 519 |
+
517 entailment
|
| 520 |
+
518 not_entailment
|
| 521 |
+
519 entailment
|
| 522 |
+
520 not_entailment
|
| 523 |
+
521 not_entailment
|
| 524 |
+
522 not_entailment
|
| 525 |
+
523 entailment
|
| 526 |
+
524 entailment
|
| 527 |
+
525 not_entailment
|
| 528 |
+
526 not_entailment
|
| 529 |
+
527 entailment
|
| 530 |
+
528 entailment
|
| 531 |
+
529 not_entailment
|
| 532 |
+
530 entailment
|
| 533 |
+
531 not_entailment
|
| 534 |
+
532 not_entailment
|
| 535 |
+
533 entailment
|
| 536 |
+
534 entailment
|
| 537 |
+
535 entailment
|
| 538 |
+
536 not_entailment
|
| 539 |
+
537 entailment
|
| 540 |
+
538 not_entailment
|
| 541 |
+
539 not_entailment
|
| 542 |
+
540 entailment
|
| 543 |
+
541 not_entailment
|
| 544 |
+
542 not_entailment
|
| 545 |
+
543 entailment
|
| 546 |
+
544 entailment
|
| 547 |
+
545 not_entailment
|
| 548 |
+
546 entailment
|
| 549 |
+
547 entailment
|
| 550 |
+
548 entailment
|
| 551 |
+
549 entailment
|
| 552 |
+
550 not_entailment
|
| 553 |
+
551 entailment
|
| 554 |
+
552 entailment
|
| 555 |
+
553 not_entailment
|
| 556 |
+
554 entailment
|
| 557 |
+
555 not_entailment
|
| 558 |
+
556 not_entailment
|
| 559 |
+
557 not_entailment
|
| 560 |
+
558 not_entailment
|
| 561 |
+
559 not_entailment
|
| 562 |
+
560 entailment
|
| 563 |
+
561 entailment
|
| 564 |
+
562 not_entailment
|
| 565 |
+
563 entailment
|
| 566 |
+
564 entailment
|
| 567 |
+
565 entailment
|
| 568 |
+
566 entailment
|
| 569 |
+
567 entailment
|
| 570 |
+
568 not_entailment
|
| 571 |
+
569 entailment
|
| 572 |
+
570 not_entailment
|
| 573 |
+
571 entailment
|
| 574 |
+
572 entailment
|
| 575 |
+
573 not_entailment
|
| 576 |
+
574 entailment
|
| 577 |
+
575 entailment
|
| 578 |
+
576 entailment
|
| 579 |
+
577 entailment
|
| 580 |
+
578 not_entailment
|
| 581 |
+
579 entailment
|
| 582 |
+
580 entailment
|
| 583 |
+
581 not_entailment
|
| 584 |
+
582 not_entailment
|
| 585 |
+
583 entailment
|
| 586 |
+
584 entailment
|
| 587 |
+
585 entailment
|
| 588 |
+
586 entailment
|
| 589 |
+
587 not_entailment
|
| 590 |
+
588 entailment
|
| 591 |
+
589 not_entailment
|
| 592 |
+
590 entailment
|
| 593 |
+
591 not_entailment
|
| 594 |
+
592 not_entailment
|
| 595 |
+
593 not_entailment
|
| 596 |
+
594 entailment
|
| 597 |
+
595 entailment
|
| 598 |
+
596 entailment
|
| 599 |
+
597 not_entailment
|
| 600 |
+
598 entailment
|
| 601 |
+
599 not_entailment
|
| 602 |
+
600 entailment
|
| 603 |
+
601 entailment
|
| 604 |
+
602 entailment
|
| 605 |
+
603 not_entailment
|
| 606 |
+
604 not_entailment
|
| 607 |
+
605 not_entailment
|
| 608 |
+
606 entailment
|
| 609 |
+
607 entailment
|
| 610 |
+
608 not_entailment
|
| 611 |
+
609 entailment
|
| 612 |
+
610 entailment
|
| 613 |
+
611 not_entailment
|
| 614 |
+
612 entailment
|
| 615 |
+
613 not_entailment
|
| 616 |
+
614 not_entailment
|
| 617 |
+
615 entailment
|
| 618 |
+
616 not_entailment
|
| 619 |
+
617 entailment
|
| 620 |
+
618 not_entailment
|
| 621 |
+
619 entailment
|
| 622 |
+
620 entailment
|
| 623 |
+
621 entailment
|
| 624 |
+
622 entailment
|
| 625 |
+
623 not_entailment
|
| 626 |
+
624 entailment
|
| 627 |
+
625 entailment
|
| 628 |
+
626 not_entailment
|
| 629 |
+
627 entailment
|
| 630 |
+
628 not_entailment
|
| 631 |
+
629 not_entailment
|
| 632 |
+
630 entailment
|
| 633 |
+
631 entailment
|
| 634 |
+
632 not_entailment
|
| 635 |
+
633 not_entailment
|
| 636 |
+
634 not_entailment
|
| 637 |
+
635 not_entailment
|
| 638 |
+
636 entailment
|
| 639 |
+
637 not_entailment
|
| 640 |
+
638 not_entailment
|
| 641 |
+
639 entailment
|
| 642 |
+
640 entailment
|
| 643 |
+
641 not_entailment
|
| 644 |
+
642 entailment
|
| 645 |
+
643 not_entailment
|
| 646 |
+
644 entailment
|
| 647 |
+
645 not_entailment
|
| 648 |
+
646 entailment
|
| 649 |
+
647 entailment
|
| 650 |
+
648 entailment
|
| 651 |
+
649 not_entailment
|
| 652 |
+
650 not_entailment
|
| 653 |
+
651 not_entailment
|
| 654 |
+
652 not_entailment
|
| 655 |
+
653 not_entailment
|
| 656 |
+
654 entailment
|
| 657 |
+
655 entailment
|
| 658 |
+
656 entailment
|
| 659 |
+
657 entailment
|
| 660 |
+
658 not_entailment
|
| 661 |
+
659 not_entailment
|
| 662 |
+
660 not_entailment
|
| 663 |
+
661 entailment
|
| 664 |
+
662 entailment
|
| 665 |
+
663 entailment
|
| 666 |
+
664 not_entailment
|
| 667 |
+
665 not_entailment
|
| 668 |
+
666 entailment
|
| 669 |
+
667 not_entailment
|
| 670 |
+
668 not_entailment
|
| 671 |
+
669 not_entailment
|
| 672 |
+
670 entailment
|
| 673 |
+
671 entailment
|
| 674 |
+
672 not_entailment
|
| 675 |
+
673 entailment
|
| 676 |
+
674 not_entailment
|
| 677 |
+
675 entailment
|
| 678 |
+
676 entailment
|
| 679 |
+
677 entailment
|
| 680 |
+
678 entailment
|
| 681 |
+
679 entailment
|
| 682 |
+
680 entailment
|
| 683 |
+
681 entailment
|
| 684 |
+
682 not_entailment
|
| 685 |
+
683 not_entailment
|
| 686 |
+
684 not_entailment
|
| 687 |
+
685 entailment
|
| 688 |
+
686 entailment
|
| 689 |
+
687 entailment
|
| 690 |
+
688 not_entailment
|
| 691 |
+
689 entailment
|
| 692 |
+
690 entailment
|
| 693 |
+
691 not_entailment
|
| 694 |
+
692 entailment
|
| 695 |
+
693 entailment
|
| 696 |
+
694 entailment
|
| 697 |
+
695 not_entailment
|
| 698 |
+
696 not_entailment
|
| 699 |
+
697 entailment
|
| 700 |
+
698 entailment
|
| 701 |
+
699 entailment
|
| 702 |
+
700 entailment
|
| 703 |
+
701 entailment
|
| 704 |
+
702 entailment
|
| 705 |
+
703 not_entailment
|
| 706 |
+
704 not_entailment
|
| 707 |
+
705 not_entailment
|
| 708 |
+
706 not_entailment
|
| 709 |
+
707 entailment
|
| 710 |
+
708 entailment
|
| 711 |
+
709 entailment
|
| 712 |
+
710 not_entailment
|
| 713 |
+
711 entailment
|
| 714 |
+
712 not_entailment
|
| 715 |
+
713 entailment
|
| 716 |
+
714 not_entailment
|
| 717 |
+
715 not_entailment
|
| 718 |
+
716 not_entailment
|
| 719 |
+
717 entailment
|
| 720 |
+
718 not_entailment
|
| 721 |
+
719 not_entailment
|
| 722 |
+
720 entailment
|
| 723 |
+
721 entailment
|
| 724 |
+
722 entailment
|
| 725 |
+
723 not_entailment
|
| 726 |
+
724 entailment
|
| 727 |
+
725 not_entailment
|
| 728 |
+
726 not_entailment
|
| 729 |
+
727 entailment
|
| 730 |
+
728 entailment
|
| 731 |
+
729 entailment
|
| 732 |
+
730 entailment
|
| 733 |
+
731 entailment
|
| 734 |
+
732 not_entailment
|
| 735 |
+
733 entailment
|
| 736 |
+
734 entailment
|
| 737 |
+
735 entailment
|
| 738 |
+
736 not_entailment
|
| 739 |
+
737 not_entailment
|
| 740 |
+
738 entailment
|
| 741 |
+
739 entailment
|
| 742 |
+
740 not_entailment
|
| 743 |
+
741 entailment
|
| 744 |
+
742 entailment
|
| 745 |
+
743 entailment
|
| 746 |
+
744 not_entailment
|
| 747 |
+
745 entailment
|
| 748 |
+
746 not_entailment
|
| 749 |
+
747 not_entailment
|
| 750 |
+
748 not_entailment
|
| 751 |
+
749 not_entailment
|
| 752 |
+
750 entailment
|
| 753 |
+
751 entailment
|
| 754 |
+
752 not_entailment
|
| 755 |
+
753 not_entailment
|
| 756 |
+
754 entailment
|
| 757 |
+
755 not_entailment
|
| 758 |
+
756 entailment
|
| 759 |
+
757 not_entailment
|
| 760 |
+
758 entailment
|
| 761 |
+
759 entailment
|
| 762 |
+
760 entailment
|
| 763 |
+
761 entailment
|
| 764 |
+
762 entailment
|
| 765 |
+
763 entailment
|
| 766 |
+
764 not_entailment
|
| 767 |
+
765 not_entailment
|
| 768 |
+
766 entailment
|
| 769 |
+
767 not_entailment
|
| 770 |
+
768 entailment
|
| 771 |
+
769 not_entailment
|
| 772 |
+
770 entailment
|
| 773 |
+
771 not_entailment
|
| 774 |
+
772 not_entailment
|
| 775 |
+
773 not_entailment
|
| 776 |
+
774 not_entailment
|
| 777 |
+
775 entailment
|
| 778 |
+
776 not_entailment
|
| 779 |
+
777 entailment
|
| 780 |
+
778 entailment
|
| 781 |
+
779 not_entailment
|
| 782 |
+
780 entailment
|
| 783 |
+
781 entailment
|
| 784 |
+
782 entailment
|
| 785 |
+
783 entailment
|
| 786 |
+
784 entailment
|
| 787 |
+
785 entailment
|
| 788 |
+
786 entailment
|
| 789 |
+
787 entailment
|
| 790 |
+
788 entailment
|
| 791 |
+
789 entailment
|
| 792 |
+
790 not_entailment
|
| 793 |
+
791 entailment
|
| 794 |
+
792 not_entailment
|
| 795 |
+
793 entailment
|
| 796 |
+
794 not_entailment
|
| 797 |
+
795 entailment
|
| 798 |
+
796 entailment
|
| 799 |
+
797 entailment
|
| 800 |
+
798 not_entailment
|
| 801 |
+
799 not_entailment
|
| 802 |
+
800 entailment
|
| 803 |
+
801 entailment
|
| 804 |
+
802 entailment
|
| 805 |
+
803 entailment
|
| 806 |
+
804 not_entailment
|
| 807 |
+
805 entailment
|
| 808 |
+
806 not_entailment
|
| 809 |
+
807 entailment
|
| 810 |
+
808 entailment
|
| 811 |
+
809 not_entailment
|
| 812 |
+
810 not_entailment
|
| 813 |
+
811 not_entailment
|
| 814 |
+
812 not_entailment
|
| 815 |
+
813 entailment
|
| 816 |
+
814 entailment
|
| 817 |
+
815 entailment
|
| 818 |
+
816 not_entailment
|
| 819 |
+
817 not_entailment
|
| 820 |
+
818 entailment
|
| 821 |
+
819 not_entailment
|
| 822 |
+
820 entailment
|
| 823 |
+
821 not_entailment
|
| 824 |
+
822 entailment
|
| 825 |
+
823 entailment
|
| 826 |
+
824 entailment
|
| 827 |
+
825 entailment
|
| 828 |
+
826 entailment
|
| 829 |
+
827 entailment
|
| 830 |
+
828 entailment
|
| 831 |
+
829 entailment
|
| 832 |
+
830 entailment
|
| 833 |
+
831 entailment
|
| 834 |
+
832 entailment
|
| 835 |
+
833 not_entailment
|
| 836 |
+
834 entailment
|
| 837 |
+
835 entailment
|
| 838 |
+
836 entailment
|
| 839 |
+
837 not_entailment
|
| 840 |
+
838 not_entailment
|
| 841 |
+
839 entailment
|
| 842 |
+
840 entailment
|
| 843 |
+
841 entailment
|
| 844 |
+
842 entailment
|
| 845 |
+
843 not_entailment
|
| 846 |
+
844 entailment
|
| 847 |
+
845 entailment
|
| 848 |
+
846 entailment
|
| 849 |
+
847 entailment
|
| 850 |
+
848 entailment
|
| 851 |
+
849 not_entailment
|
| 852 |
+
850 entailment
|
| 853 |
+
851 entailment
|
| 854 |
+
852 entailment
|
| 855 |
+
853 entailment
|
| 856 |
+
854 not_entailment
|
| 857 |
+
855 not_entailment
|
| 858 |
+
856 entailment
|
| 859 |
+
857 not_entailment
|
| 860 |
+
858 entailment
|
| 861 |
+
859 entailment
|
| 862 |
+
860 entailment
|
| 863 |
+
861 not_entailment
|
| 864 |
+
862 not_entailment
|
| 865 |
+
863 not_entailment
|
| 866 |
+
864 entailment
|
| 867 |
+
865 entailment
|
| 868 |
+
866 entailment
|
| 869 |
+
867 entailment
|
| 870 |
+
868 entailment
|
| 871 |
+
869 entailment
|
| 872 |
+
870 entailment
|
| 873 |
+
871 entailment
|
| 874 |
+
872 entailment
|
| 875 |
+
873 entailment
|
| 876 |
+
874 entailment
|
| 877 |
+
875 entailment
|
| 878 |
+
876 not_entailment
|
| 879 |
+
877 entailment
|
| 880 |
+
878 entailment
|
| 881 |
+
879 not_entailment
|
| 882 |
+
880 entailment
|
| 883 |
+
881 entailment
|
| 884 |
+
882 entailment
|
| 885 |
+
883 entailment
|
| 886 |
+
884 entailment
|
| 887 |
+
885 entailment
|
| 888 |
+
886 entailment
|
| 889 |
+
887 entailment
|
| 890 |
+
888 not_entailment
|
| 891 |
+
889 entailment
|
| 892 |
+
890 entailment
|
| 893 |
+
891 entailment
|
| 894 |
+
892 entailment
|
| 895 |
+
893 entailment
|
| 896 |
+
894 entailment
|
| 897 |
+
895 entailment
|
| 898 |
+
896 entailment
|
| 899 |
+
897 not_entailment
|
| 900 |
+
898 not_entailment
|
| 901 |
+
899 entailment
|
| 902 |
+
900 not_entailment
|
| 903 |
+
901 not_entailment
|
| 904 |
+
902 entailment
|
| 905 |
+
903 not_entailment
|
| 906 |
+
904 entailment
|
| 907 |
+
905 entailment
|
| 908 |
+
906 entailment
|
| 909 |
+
907 entailment
|
| 910 |
+
908 entailment
|
| 911 |
+
909 entailment
|
| 912 |
+
910 not_entailment
|
| 913 |
+
911 entailment
|
| 914 |
+
912 entailment
|
| 915 |
+
913 entailment
|
| 916 |
+
914 entailment
|
| 917 |
+
915 entailment
|
| 918 |
+
916 entailment
|
| 919 |
+
917 entailment
|
| 920 |
+
918 not_entailment
|
| 921 |
+
919 not_entailment
|
| 922 |
+
920 entailment
|
| 923 |
+
921 entailment
|
| 924 |
+
922 entailment
|
| 925 |
+
923 not_entailment
|
| 926 |
+
924 not_entailment
|
| 927 |
+
925 entailment
|
| 928 |
+
926 entailment
|
| 929 |
+
927 entailment
|
| 930 |
+
928 entailment
|
| 931 |
+
929 not_entailment
|
| 932 |
+
930 not_entailment
|
| 933 |
+
931 entailment
|
| 934 |
+
932 not_entailment
|
| 935 |
+
933 not_entailment
|
| 936 |
+
934 entailment
|
| 937 |
+
935 entailment
|
| 938 |
+
936 entailment
|
| 939 |
+
937 entailment
|
| 940 |
+
938 not_entailment
|
| 941 |
+
939 not_entailment
|
| 942 |
+
940 not_entailment
|
| 943 |
+
941 entailment
|
| 944 |
+
942 entailment
|
| 945 |
+
943 entailment
|
| 946 |
+
944 entailment
|
| 947 |
+
945 not_entailment
|
| 948 |
+
946 not_entailment
|
| 949 |
+
947 not_entailment
|
| 950 |
+
948 not_entailment
|
| 951 |
+
949 not_entailment
|
| 952 |
+
950 entailment
|
| 953 |
+
951 entailment
|
| 954 |
+
952 entailment
|
| 955 |
+
953 not_entailment
|
| 956 |
+
954 entailment
|
| 957 |
+
955 not_entailment
|
| 958 |
+
956 entailment
|
| 959 |
+
957 entailment
|
| 960 |
+
958 not_entailment
|
| 961 |
+
959 entailment
|
| 962 |
+
960 entailment
|
| 963 |
+
961 entailment
|
| 964 |
+
962 not_entailment
|
| 965 |
+
963 entailment
|
| 966 |
+
964 entailment
|
| 967 |
+
965 entailment
|
| 968 |
+
966 entailment
|
| 969 |
+
967 entailment
|
| 970 |
+
968 entailment
|
| 971 |
+
969 not_entailment
|
| 972 |
+
970 entailment
|
| 973 |
+
971 entailment
|
| 974 |
+
972 entailment
|
| 975 |
+
973 entailment
|
| 976 |
+
974 entailment
|
| 977 |
+
975 entailment
|
| 978 |
+
976 entailment
|
| 979 |
+
977 entailment
|
| 980 |
+
978 not_entailment
|
| 981 |
+
979 not_entailment
|
| 982 |
+
980 not_entailment
|
| 983 |
+
981 not_entailment
|
| 984 |
+
982 entailment
|
| 985 |
+
983 entailment
|
| 986 |
+
984 not_entailment
|
| 987 |
+
985 entailment
|
| 988 |
+
986 entailment
|
| 989 |
+
987 entailment
|
| 990 |
+
988 not_entailment
|
| 991 |
+
989 entailment
|
| 992 |
+
990 entailment
|
| 993 |
+
991 not_entailment
|
| 994 |
+
992 entailment
|
| 995 |
+
993 not_entailment
|
| 996 |
+
994 entailment
|
| 997 |
+
995 entailment
|
| 998 |
+
996 entailment
|
| 999 |
+
997 not_entailment
|
| 1000 |
+
998 entailment
|
| 1001 |
+
999 not_entailment
|
| 1002 |
+
1000 entailment
|
| 1003 |
+
1001 not_entailment
|
| 1004 |
+
1002 entailment
|
| 1005 |
+
1003 not_entailment
|
| 1006 |
+
1004 not_entailment
|
| 1007 |
+
1005 entailment
|
| 1008 |
+
1006 entailment
|
| 1009 |
+
1007 entailment
|
| 1010 |
+
1008 entailment
|
| 1011 |
+
1009 entailment
|
| 1012 |
+
1010 entailment
|
| 1013 |
+
1011 not_entailment
|
| 1014 |
+
1012 not_entailment
|
| 1015 |
+
1013 not_entailment
|
| 1016 |
+
1014 not_entailment
|
| 1017 |
+
1015 entailment
|
| 1018 |
+
1016 entailment
|
| 1019 |
+
1017 entailment
|
| 1020 |
+
1018 entailment
|
| 1021 |
+
1019 entailment
|
| 1022 |
+
1020 entailment
|
| 1023 |
+
1021 entailment
|
| 1024 |
+
1022 not_entailment
|
| 1025 |
+
1023 entailment
|
| 1026 |
+
1024 entailment
|
| 1027 |
+
1025 not_entailment
|
| 1028 |
+
1026 not_entailment
|
| 1029 |
+
1027 entailment
|
| 1030 |
+
1028 entailment
|
| 1031 |
+
1029 not_entailment
|
| 1032 |
+
1030 not_entailment
|
| 1033 |
+
1031 not_entailment
|
| 1034 |
+
1032 entailment
|
| 1035 |
+
1033 entailment
|
| 1036 |
+
1034 entailment
|
| 1037 |
+
1035 entailment
|
| 1038 |
+
1036 not_entailment
|
| 1039 |
+
1037 not_entailment
|
| 1040 |
+
1038 not_entailment
|
| 1041 |
+
1039 entailment
|
| 1042 |
+
1040 not_entailment
|
| 1043 |
+
1041 not_entailment
|
| 1044 |
+
1042 not_entailment
|
| 1045 |
+
1043 not_entailment
|
| 1046 |
+
1044 not_entailment
|
| 1047 |
+
1045 entailment
|
| 1048 |
+
1046 not_entailment
|
| 1049 |
+
1047 entailment
|
| 1050 |
+
1048 entailment
|
| 1051 |
+
1049 entailment
|
| 1052 |
+
1050 not_entailment
|
| 1053 |
+
1051 entailment
|
| 1054 |
+
1052 entailment
|
| 1055 |
+
1053 not_entailment
|
| 1056 |
+
1054 entailment
|
| 1057 |
+
1055 entailment
|
| 1058 |
+
1056 not_entailment
|
| 1059 |
+
1057 entailment
|
| 1060 |
+
1058 entailment
|
| 1061 |
+
1059 entailment
|
| 1062 |
+
1060 entailment
|
| 1063 |
+
1061 not_entailment
|
| 1064 |
+
1062 not_entailment
|
| 1065 |
+
1063 entailment
|
| 1066 |
+
1064 not_entailment
|
| 1067 |
+
1065 entailment
|
| 1068 |
+
1066 entailment
|
| 1069 |
+
1067 entailment
|
| 1070 |
+
1068 entailment
|
| 1071 |
+
1069 not_entailment
|
| 1072 |
+
1070 entailment
|
| 1073 |
+
1071 entailment
|
| 1074 |
+
1072 entailment
|
| 1075 |
+
1073 entailment
|
| 1076 |
+
1074 not_entailment
|
| 1077 |
+
1075 not_entailment
|
| 1078 |
+
1076 not_entailment
|
| 1079 |
+
1077 entailment
|
| 1080 |
+
1078 not_entailment
|
| 1081 |
+
1079 not_entailment
|
| 1082 |
+
1080 entailment
|
| 1083 |
+
1081 not_entailment
|
| 1084 |
+
1082 not_entailment
|
| 1085 |
+
1083 entailment
|
| 1086 |
+
1084 entailment
|
| 1087 |
+
1085 entailment
|
| 1088 |
+
1086 entailment
|
| 1089 |
+
1087 not_entailment
|
| 1090 |
+
1088 entailment
|
| 1091 |
+
1089 not_entailment
|
| 1092 |
+
1090 not_entailment
|
| 1093 |
+
1091 entailment
|
| 1094 |
+
1092 not_entailment
|
| 1095 |
+
1093 not_entailment
|
| 1096 |
+
1094 not_entailment
|
| 1097 |
+
1095 not_entailment
|
| 1098 |
+
1096 entailment
|
| 1099 |
+
1097 entailment
|
| 1100 |
+
1098 not_entailment
|
| 1101 |
+
1099 not_entailment
|
| 1102 |
+
1100 not_entailment
|
| 1103 |
+
1101 entailment
|
| 1104 |
+
1102 entailment
|
| 1105 |
+
1103 not_entailment
|
| 1106 |
+
1104 not_entailment
|
| 1107 |
+
1105 not_entailment
|
| 1108 |
+
1106 not_entailment
|
| 1109 |
+
1107 not_entailment
|
| 1110 |
+
1108 not_entailment
|
| 1111 |
+
1109 not_entailment
|
| 1112 |
+
1110 not_entailment
|
| 1113 |
+
1111 not_entailment
|
| 1114 |
+
1112 not_entailment
|
| 1115 |
+
1113 entailment
|
| 1116 |
+
1114 entailment
|
| 1117 |
+
1115 not_entailment
|
| 1118 |
+
1116 not_entailment
|
| 1119 |
+
1117 not_entailment
|
| 1120 |
+
1118 not_entailment
|
| 1121 |
+
1119 entailment
|
| 1122 |
+
1120 not_entailment
|
| 1123 |
+
1121 not_entailment
|
| 1124 |
+
1122 not_entailment
|
| 1125 |
+
1123 entailment
|
| 1126 |
+
1124 entailment
|
| 1127 |
+
1125 entailment
|
| 1128 |
+
1126 entailment
|
| 1129 |
+
1127 entailment
|
| 1130 |
+
1128 not_entailment
|
| 1131 |
+
1129 not_entailment
|
| 1132 |
+
1130 not_entailment
|
| 1133 |
+
1131 not_entailment
|
| 1134 |
+
1132 not_entailment
|
| 1135 |
+
1133 not_entailment
|
| 1136 |
+
1134 not_entailment
|
| 1137 |
+
1135 not_entailment
|
| 1138 |
+
1136 not_entailment
|
| 1139 |
+
1137 not_entailment
|
| 1140 |
+
1138 not_entailment
|
| 1141 |
+
1139 entailment
|
| 1142 |
+
1140 not_entailment
|
| 1143 |
+
1141 not_entailment
|
| 1144 |
+
1142 entailment
|
| 1145 |
+
1143 not_entailment
|
| 1146 |
+
1144 not_entailment
|
| 1147 |
+
1145 entailment
|
| 1148 |
+
1146 not_entailment
|
| 1149 |
+
1147 entailment
|
| 1150 |
+
1148 not_entailment
|
| 1151 |
+
1149 not_entailment
|
| 1152 |
+
1150 not_entailment
|
| 1153 |
+
1151 not_entailment
|
| 1154 |
+
1152 not_entailment
|
| 1155 |
+
1153 entailment
|
| 1156 |
+
1154 not_entailment
|
| 1157 |
+
1155 entailment
|
| 1158 |
+
1156 entailment
|
| 1159 |
+
1157 entailment
|
| 1160 |
+
1158 not_entailment
|
| 1161 |
+
1159 not_entailment
|
| 1162 |
+
1160 entailment
|
| 1163 |
+
1161 not_entailment
|
| 1164 |
+
1162 not_entailment
|
| 1165 |
+
1163 not_entailment
|
| 1166 |
+
1164 not_entailment
|
| 1167 |
+
1165 entailment
|
| 1168 |
+
1166 not_entailment
|
| 1169 |
+
1167 entailment
|
| 1170 |
+
1168 entailment
|
| 1171 |
+
1169 not_entailment
|
| 1172 |
+
1170 entailment
|
| 1173 |
+
1171 entailment
|
| 1174 |
+
1172 entailment
|
| 1175 |
+
1173 entailment
|
| 1176 |
+
1174 not_entailment
|
| 1177 |
+
1175 entailment
|
| 1178 |
+
1176 entailment
|
| 1179 |
+
1177 entailment
|
| 1180 |
+
1178 not_entailment
|
| 1181 |
+
1179 not_entailment
|
| 1182 |
+
1180 not_entailment
|
| 1183 |
+
1181 entailment
|
| 1184 |
+
1182 not_entailment
|
| 1185 |
+
1183 not_entailment
|
| 1186 |
+
1184 not_entailment
|
| 1187 |
+
1185 not_entailment
|
| 1188 |
+
1186 not_entailment
|
| 1189 |
+
1187 not_entailment
|
| 1190 |
+
1188 entailment
|
| 1191 |
+
1189 not_entailment
|
| 1192 |
+
1190 not_entailment
|
| 1193 |
+
1191 not_entailment
|
| 1194 |
+
1192 not_entailment
|
| 1195 |
+
1193 not_entailment
|
| 1196 |
+
1194 not_entailment
|
| 1197 |
+
1195 entailment
|
| 1198 |
+
1196 not_entailment
|
| 1199 |
+
1197 not_entailment
|
| 1200 |
+
1198 not_entailment
|
| 1201 |
+
1199 not_entailment
|
| 1202 |
+
1200 entailment
|
| 1203 |
+
1201 entailment
|
| 1204 |
+
1202 not_entailment
|
| 1205 |
+
1203 not_entailment
|
| 1206 |
+
1204 entailment
|
| 1207 |
+
1205 entailment
|
| 1208 |
+
1206 entailment
|
| 1209 |
+
1207 entailment
|
| 1210 |
+
1208 not_entailment
|
| 1211 |
+
1209 not_entailment
|
| 1212 |
+
1210 entailment
|
| 1213 |
+
1211 entailment
|
| 1214 |
+
1212 not_entailment
|
| 1215 |
+
1213 entailment
|
| 1216 |
+
1214 entailment
|
| 1217 |
+
1215 not_entailment
|
| 1218 |
+
1216 entailment
|
| 1219 |
+
1217 not_entailment
|
| 1220 |
+
1218 entailment
|
| 1221 |
+
1219 entailment
|
| 1222 |
+
1220 entailment
|
| 1223 |
+
1221 entailment
|
| 1224 |
+
1222 entailment
|
| 1225 |
+
1223 entailment
|
| 1226 |
+
1224 not_entailment
|
| 1227 |
+
1225 entailment
|
| 1228 |
+
1226 entailment
|
| 1229 |
+
1227 not_entailment
|
| 1230 |
+
1228 entailment
|
| 1231 |
+
1229 entailment
|
| 1232 |
+
1230 entailment
|
| 1233 |
+
1231 entailment
|
| 1234 |
+
1232 entailment
|
| 1235 |
+
1233 not_entailment
|
| 1236 |
+
1234 entailment
|
| 1237 |
+
1235 entailment
|
| 1238 |
+
1236 entailment
|
| 1239 |
+
1237 entailment
|
| 1240 |
+
1238 entailment
|
| 1241 |
+
1239 not_entailment
|
| 1242 |
+
1240 not_entailment
|
| 1243 |
+
1241 entailment
|
| 1244 |
+
1242 entailment
|
| 1245 |
+
1243 entailment
|
| 1246 |
+
1244 entailment
|
| 1247 |
+
1245 entailment
|
| 1248 |
+
1246 entailment
|
| 1249 |
+
1247 not_entailment
|
| 1250 |
+
1248 entailment
|
| 1251 |
+
1249 entailment
|
| 1252 |
+
1250 not_entailment
|
| 1253 |
+
1251 entailment
|
| 1254 |
+
1252 entailment
|
| 1255 |
+
1253 entailment
|
| 1256 |
+
1254 not_entailment
|
| 1257 |
+
1255 entailment
|
| 1258 |
+
1256 not_entailment
|
| 1259 |
+
1257 not_entailment
|
| 1260 |
+
1258 entailment
|
| 1261 |
+
1259 not_entailment
|
| 1262 |
+
1260 not_entailment
|
| 1263 |
+
1261 entailment
|
| 1264 |
+
1262 not_entailment
|
| 1265 |
+
1263 not_entailment
|
| 1266 |
+
1264 not_entailment
|
| 1267 |
+
1265 entailment
|
| 1268 |
+
1266 entailment
|
| 1269 |
+
1267 entailment
|
| 1270 |
+
1268 not_entailment
|
| 1271 |
+
1269 not_entailment
|
| 1272 |
+
1270 entailment
|
| 1273 |
+
1271 entailment
|
| 1274 |
+
1272 entailment
|
| 1275 |
+
1273 entailment
|
| 1276 |
+
1274 entailment
|
| 1277 |
+
1275 entailment
|
| 1278 |
+
1276 not_entailment
|
| 1279 |
+
1277 entailment
|
| 1280 |
+
1278 entailment
|
| 1281 |
+
1279 entailment
|
| 1282 |
+
1280 entailment
|
| 1283 |
+
1281 not_entailment
|
| 1284 |
+
1282 not_entailment
|
| 1285 |
+
1283 not_entailment
|
| 1286 |
+
1284 not_entailment
|
| 1287 |
+
1285 not_entailment
|
| 1288 |
+
1286 not_entailment
|
| 1289 |
+
1287 entailment
|
| 1290 |
+
1288 entailment
|
| 1291 |
+
1289 not_entailment
|
| 1292 |
+
1290 entailment
|
| 1293 |
+
1291 entailment
|
| 1294 |
+
1292 entailment
|
| 1295 |
+
1293 not_entailment
|
| 1296 |
+
1294 not_entailment
|
| 1297 |
+
1295 not_entailment
|
| 1298 |
+
1296 not_entailment
|
| 1299 |
+
1297 not_entailment
|
| 1300 |
+
1298 entailment
|
| 1301 |
+
1299 not_entailment
|
| 1302 |
+
1300 entailment
|
| 1303 |
+
1301 not_entailment
|
| 1304 |
+
1302 entailment
|
| 1305 |
+
1303 not_entailment
|
| 1306 |
+
1304 entailment
|
| 1307 |
+
1305 entailment
|
| 1308 |
+
1306 not_entailment
|
| 1309 |
+
1307 not_entailment
|
| 1310 |
+
1308 entailment
|
| 1311 |
+
1309 not_entailment
|
| 1312 |
+
1310 not_entailment
|
| 1313 |
+
1311 not_entailment
|
| 1314 |
+
1312 not_entailment
|
| 1315 |
+
1313 entailment
|
| 1316 |
+
1314 entailment
|
| 1317 |
+
1315 not_entailment
|
| 1318 |
+
1316 not_entailment
|
| 1319 |
+
1317 entailment
|
| 1320 |
+
1318 not_entailment
|
| 1321 |
+
1319 not_entailment
|
| 1322 |
+
1320 entailment
|
| 1323 |
+
1321 entailment
|
| 1324 |
+
1322 not_entailment
|
| 1325 |
+
1323 not_entailment
|
| 1326 |
+
1324 not_entailment
|
| 1327 |
+
1325 not_entailment
|
| 1328 |
+
1326 entailment
|
| 1329 |
+
1327 entailment
|
| 1330 |
+
1328 not_entailment
|
| 1331 |
+
1329 entailment
|
| 1332 |
+
1330 entailment
|
| 1333 |
+
1331 not_entailment
|
| 1334 |
+
1332 not_entailment
|
| 1335 |
+
1333 not_entailment
|
| 1336 |
+
1334 entailment
|
| 1337 |
+
1335 entailment
|
| 1338 |
+
1336 not_entailment
|
| 1339 |
+
1337 entailment
|
| 1340 |
+
1338 entailment
|
| 1341 |
+
1339 entailment
|
| 1342 |
+
1340 entailment
|
| 1343 |
+
1341 entailment
|
| 1344 |
+
1342 entailment
|
| 1345 |
+
1343 entailment
|
| 1346 |
+
1344 not_entailment
|
| 1347 |
+
1345 not_entailment
|
| 1348 |
+
1346 entailment
|
| 1349 |
+
1347 entailment
|
| 1350 |
+
1348 entailment
|
| 1351 |
+
1349 entailment
|
| 1352 |
+
1350 not_entailment
|
| 1353 |
+
1351 entailment
|
| 1354 |
+
1352 entailment
|
| 1355 |
+
1353 not_entailment
|
| 1356 |
+
1354 entailment
|
| 1357 |
+
1355 entailment
|
| 1358 |
+
1356 entailment
|
| 1359 |
+
1357 entailment
|
| 1360 |
+
1358 entailment
|
| 1361 |
+
1359 entailment
|
| 1362 |
+
1360 not_entailment
|
| 1363 |
+
1361 not_entailment
|
| 1364 |
+
1362 not_entailment
|
| 1365 |
+
1363 not_entailment
|
| 1366 |
+
1364 not_entailment
|
| 1367 |
+
1365 entailment
|
| 1368 |
+
1366 entailment
|
| 1369 |
+
1367 not_entailment
|
| 1370 |
+
1368 entailment
|
| 1371 |
+
1369 not_entailment
|
| 1372 |
+
1370 not_entailment
|
| 1373 |
+
1371 entailment
|
| 1374 |
+
1372 not_entailment
|
| 1375 |
+
1373 entailment
|
| 1376 |
+
1374 entailment
|
| 1377 |
+
1375 entailment
|
| 1378 |
+
1376 not_entailment
|
| 1379 |
+
1377 not_entailment
|
| 1380 |
+
1378 not_entailment
|
| 1381 |
+
1379 not_entailment
|
| 1382 |
+
1380 not_entailment
|
| 1383 |
+
1381 entailment
|
| 1384 |
+
1382 entailment
|
| 1385 |
+
1383 not_entailment
|
| 1386 |
+
1384 not_entailment
|
| 1387 |
+
1385 not_entailment
|
| 1388 |
+
1386 entailment
|
| 1389 |
+
1387 entailment
|
| 1390 |
+
1388 not_entailment
|
| 1391 |
+
1389 not_entailment
|
| 1392 |
+
1390 entailment
|
| 1393 |
+
1391 not_entailment
|
| 1394 |
+
1392 not_entailment
|
| 1395 |
+
1393 entailment
|
| 1396 |
+
1394 entailment
|
| 1397 |
+
1395 not_entailment
|
| 1398 |
+
1396 not_entailment
|
| 1399 |
+
1397 entailment
|
| 1400 |
+
1398 not_entailment
|
| 1401 |
+
1399 entailment
|
| 1402 |
+
1400 not_entailment
|
| 1403 |
+
1401 not_entailment
|
| 1404 |
+
1402 entailment
|
| 1405 |
+
1403 entailment
|
| 1406 |
+
1404 entailment
|
| 1407 |
+
1405 entailment
|
| 1408 |
+
1406 entailment
|
| 1409 |
+
1407 entailment
|
| 1410 |
+
1408 not_entailment
|
| 1411 |
+
1409 not_entailment
|
| 1412 |
+
1410 entailment
|
| 1413 |
+
1411 entailment
|
| 1414 |
+
1412 not_entailment
|
| 1415 |
+
1413 entailment
|
| 1416 |
+
1414 not_entailment
|
| 1417 |
+
1415 entailment
|
| 1418 |
+
1416 entailment
|
| 1419 |
+
1417 entailment
|
| 1420 |
+
1418 entailment
|
| 1421 |
+
1419 entailment
|
| 1422 |
+
1420 not_entailment
|
| 1423 |
+
1421 entailment
|
| 1424 |
+
1422 entailment
|
| 1425 |
+
1423 entailment
|
| 1426 |
+
1424 entailment
|
| 1427 |
+
1425 entailment
|
| 1428 |
+
1426 entailment
|
| 1429 |
+
1427 entailment
|
| 1430 |
+
1428 entailment
|
| 1431 |
+
1429 not_entailment
|
| 1432 |
+
1430 entailment
|
| 1433 |
+
1431 entailment
|
| 1434 |
+
1432 not_entailment
|
| 1435 |
+
1433 entailment
|
| 1436 |
+
1434 entailment
|
| 1437 |
+
1435 not_entailment
|
| 1438 |
+
1436 entailment
|
| 1439 |
+
1437 not_entailment
|
| 1440 |
+
1438 entailment
|
| 1441 |
+
1439 entailment
|
| 1442 |
+
1440 entailment
|
| 1443 |
+
1441 entailment
|
| 1444 |
+
1442 entailment
|
| 1445 |
+
1443 not_entailment
|
| 1446 |
+
1444 entailment
|
| 1447 |
+
1445 entailment
|
| 1448 |
+
1446 not_entailment
|
| 1449 |
+
1447 entailment
|
| 1450 |
+
1448 not_entailment
|
| 1451 |
+
1449 entailment
|
| 1452 |
+
1450 entailment
|
| 1453 |
+
1451 not_entailment
|
| 1454 |
+
1452 not_entailment
|
| 1455 |
+
1453 not_entailment
|
| 1456 |
+
1454 entailment
|
| 1457 |
+
1455 entailment
|
| 1458 |
+
1456 entailment
|
| 1459 |
+
1457 entailment
|
| 1460 |
+
1458 entailment
|
| 1461 |
+
1459 entailment
|
| 1462 |
+
1460 not_entailment
|
| 1463 |
+
1461 entailment
|
| 1464 |
+
1462 entailment
|
| 1465 |
+
1463 entailment
|
| 1466 |
+
1464 entailment
|
| 1467 |
+
1465 entailment
|
| 1468 |
+
1466 entailment
|
| 1469 |
+
1467 entailment
|
| 1470 |
+
1468 entailment
|
| 1471 |
+
1469 not_entailment
|
| 1472 |
+
1470 entailment
|
| 1473 |
+
1471 entailment
|
| 1474 |
+
1472 not_entailment
|
| 1475 |
+
1473 not_entailment
|
| 1476 |
+
1474 entailment
|
| 1477 |
+
1475 entailment
|
| 1478 |
+
1476 entailment
|
| 1479 |
+
1477 entailment
|
| 1480 |
+
1478 entailment
|
| 1481 |
+
1479 not_entailment
|
| 1482 |
+
1480 not_entailment
|
| 1483 |
+
1481 not_entailment
|
| 1484 |
+
1482 entailment
|
| 1485 |
+
1483 entailment
|
| 1486 |
+
1484 entailment
|
| 1487 |
+
1485 entailment
|
| 1488 |
+
1486 entailment
|
| 1489 |
+
1487 entailment
|
| 1490 |
+
1488 entailment
|
| 1491 |
+
1489 entailment
|
| 1492 |
+
1490 entailment
|
| 1493 |
+
1491 entailment
|
| 1494 |
+
1492 entailment
|
| 1495 |
+
1493 entailment
|
| 1496 |
+
1494 entailment
|
| 1497 |
+
1495 entailment
|
| 1498 |
+
1496 entailment
|
| 1499 |
+
1497 not_entailment
|
| 1500 |
+
1498 not_entailment
|
| 1501 |
+
1499 not_entailment
|
| 1502 |
+
1500 not_entailment
|
| 1503 |
+
1501 entailment
|
| 1504 |
+
1502 not_entailment
|
| 1505 |
+
1503 not_entailment
|
| 1506 |
+
1504 entailment
|
| 1507 |
+
1505 entailment
|
| 1508 |
+
1506 not_entailment
|
| 1509 |
+
1507 entailment
|
| 1510 |
+
1508 not_entailment
|
| 1511 |
+
1509 not_entailment
|
| 1512 |
+
1510 entailment
|
| 1513 |
+
1511 entailment
|
| 1514 |
+
1512 entailment
|
| 1515 |
+
1513 not_entailment
|
| 1516 |
+
1514 entailment
|
| 1517 |
+
1515 entailment
|
| 1518 |
+
1516 entailment
|
| 1519 |
+
1517 entailment
|
| 1520 |
+
1518 entailment
|
| 1521 |
+
1519 entailment
|
| 1522 |
+
1520 entailment
|
| 1523 |
+
1521 entailment
|
| 1524 |
+
1522 entailment
|
| 1525 |
+
1523 entailment
|
| 1526 |
+
1524 entailment
|
| 1527 |
+
1525 entailment
|
| 1528 |
+
1526 not_entailment
|
| 1529 |
+
1527 entailment
|
| 1530 |
+
1528 not_entailment
|
| 1531 |
+
1529 entailment
|
| 1532 |
+
1530 entailment
|
| 1533 |
+
1531 entailment
|
| 1534 |
+
1532 entailment
|
| 1535 |
+
1533 entailment
|
| 1536 |
+
1534 entailment
|
| 1537 |
+
1535 entailment
|
| 1538 |
+
1536 entailment
|
| 1539 |
+
1537 not_entailment
|
| 1540 |
+
1538 entailment
|
| 1541 |
+
1539 not_entailment
|
| 1542 |
+
1540 not_entailment
|
| 1543 |
+
1541 not_entailment
|
| 1544 |
+
1542 not_entailment
|
| 1545 |
+
1543 entailment
|
| 1546 |
+
1544 entailment
|
| 1547 |
+
1545 entailment
|
| 1548 |
+
1546 not_entailment
|
| 1549 |
+
1547 entailment
|
| 1550 |
+
1548 entailment
|
| 1551 |
+
1549 entailment
|
| 1552 |
+
1550 entailment
|
| 1553 |
+
1551 entailment
|
| 1554 |
+
1552 entailment
|
| 1555 |
+
1553 entailment
|
| 1556 |
+
1554 not_entailment
|
| 1557 |
+
1555 entailment
|
| 1558 |
+
1556 entailment
|
| 1559 |
+
1557 entailment
|
| 1560 |
+
1558 not_entailment
|
| 1561 |
+
1559 not_entailment
|
| 1562 |
+
1560 entailment
|
| 1563 |
+
1561 not_entailment
|
| 1564 |
+
1562 entailment
|
| 1565 |
+
1563 entailment
|
| 1566 |
+
1564 not_entailment
|
| 1567 |
+
1565 entailment
|
| 1568 |
+
1566 not_entailment
|
| 1569 |
+
1567 entailment
|
| 1570 |
+
1568 not_entailment
|
| 1571 |
+
1569 not_entailment
|
| 1572 |
+
1570 entailment
|
| 1573 |
+
1571 not_entailment
|
| 1574 |
+
1572 entailment
|
| 1575 |
+
1573 entailment
|
| 1576 |
+
1574 not_entailment
|
| 1577 |
+
1575 not_entailment
|
| 1578 |
+
1576 entailment
|
| 1579 |
+
1577 entailment
|
| 1580 |
+
1578 entailment
|
| 1581 |
+
1579 not_entailment
|
| 1582 |
+
1580 entailment
|
| 1583 |
+
1581 entailment
|
| 1584 |
+
1582 not_entailment
|
| 1585 |
+
1583 not_entailment
|
| 1586 |
+
1584 not_entailment
|
| 1587 |
+
1585 entailment
|
| 1588 |
+
1586 entailment
|
| 1589 |
+
1587 not_entailment
|
| 1590 |
+
1588 not_entailment
|
| 1591 |
+
1589 entailment
|
| 1592 |
+
1590 entailment
|
| 1593 |
+
1591 not_entailment
|
| 1594 |
+
1592 entailment
|
| 1595 |
+
1593 not_entailment
|
| 1596 |
+
1594 entailment
|
| 1597 |
+
1595 entailment
|
| 1598 |
+
1596 not_entailment
|
| 1599 |
+
1597 entailment
|
| 1600 |
+
1598 entailment
|
| 1601 |
+
1599 not_entailment
|
| 1602 |
+
1600 entailment
|
| 1603 |
+
1601 entailment
|
| 1604 |
+
1602 entailment
|
| 1605 |
+
1603 not_entailment
|
| 1606 |
+
1604 not_entailment
|
| 1607 |
+
1605 entailment
|
| 1608 |
+
1606 entailment
|
| 1609 |
+
1607 not_entailment
|
| 1610 |
+
1608 entailment
|
| 1611 |
+
1609 entailment
|
| 1612 |
+
1610 entailment
|
| 1613 |
+
1611 not_entailment
|
| 1614 |
+
1612 entailment
|
| 1615 |
+
1613 entailment
|
| 1616 |
+
1614 not_entailment
|
| 1617 |
+
1615 not_entailment
|
| 1618 |
+
1616 not_entailment
|
| 1619 |
+
1617 entailment
|
| 1620 |
+
1618 not_entailment
|
| 1621 |
+
1619 not_entailment
|
| 1622 |
+
1620 not_entailment
|
| 1623 |
+
1621 entailment
|
| 1624 |
+
1622 not_entailment
|
| 1625 |
+
1623 entailment
|
| 1626 |
+
1624 entailment
|
| 1627 |
+
1625 not_entailment
|
| 1628 |
+
1626 not_entailment
|
| 1629 |
+
1627 entailment
|
| 1630 |
+
1628 not_entailment
|
| 1631 |
+
1629 entailment
|
| 1632 |
+
1630 not_entailment
|
| 1633 |
+
1631 entailment
|
| 1634 |
+
1632 entailment
|
| 1635 |
+
1633 not_entailment
|
| 1636 |
+
1634 entailment
|
| 1637 |
+
1635 entailment
|
| 1638 |
+
1636 not_entailment
|
| 1639 |
+
1637 not_entailment
|
| 1640 |
+
1638 not_entailment
|
| 1641 |
+
1639 not_entailment
|
| 1642 |
+
1640 entailment
|
| 1643 |
+
1641 not_entailment
|
| 1644 |
+
1642 not_entailment
|
| 1645 |
+
1643 entailment
|
| 1646 |
+
1644 not_entailment
|
| 1647 |
+
1645 not_entailment
|
| 1648 |
+
1646 not_entailment
|
| 1649 |
+
1647 not_entailment
|
| 1650 |
+
1648 not_entailment
|
| 1651 |
+
1649 not_entailment
|
| 1652 |
+
1650 entailment
|
| 1653 |
+
1651 entailment
|
| 1654 |
+
1652 not_entailment
|
| 1655 |
+
1653 not_entailment
|
| 1656 |
+
1654 entailment
|
| 1657 |
+
1655 entailment
|
| 1658 |
+
1656 entailment
|
| 1659 |
+
1657 entailment
|
| 1660 |
+
1658 entailment
|
| 1661 |
+
1659 entailment
|
| 1662 |
+
1660 not_entailment
|
| 1663 |
+
1661 entailment
|
| 1664 |
+
1662 entailment
|
| 1665 |
+
1663 not_entailment
|
| 1666 |
+
1664 entailment
|
| 1667 |
+
1665 not_entailment
|
| 1668 |
+
1666 entailment
|
| 1669 |
+
1667 not_entailment
|
| 1670 |
+
1668 entailment
|
| 1671 |
+
1669 entailment
|
| 1672 |
+
1670 entailment
|
| 1673 |
+
1671 entailment
|
| 1674 |
+
1672 not_entailment
|
| 1675 |
+
1673 not_entailment
|
| 1676 |
+
1674 entailment
|
| 1677 |
+
1675 not_entailment
|
| 1678 |
+
1676 entailment
|
| 1679 |
+
1677 entailment
|
| 1680 |
+
1678 not_entailment
|
| 1681 |
+
1679 entailment
|
| 1682 |
+
1680 entailment
|
| 1683 |
+
1681 entailment
|
| 1684 |
+
1682 not_entailment
|
| 1685 |
+
1683 entailment
|
| 1686 |
+
1684 entailment
|
| 1687 |
+
1685 not_entailment
|
| 1688 |
+
1686 entailment
|
| 1689 |
+
1687 entailment
|
| 1690 |
+
1688 not_entailment
|
| 1691 |
+
1689 not_entailment
|
| 1692 |
+
1690 entailment
|
| 1693 |
+
1691 entailment
|
| 1694 |
+
1692 not_entailment
|
| 1695 |
+
1693 not_entailment
|
| 1696 |
+
1694 not_entailment
|
| 1697 |
+
1695 entailment
|
| 1698 |
+
1696 entailment
|
| 1699 |
+
1697 not_entailment
|
| 1700 |
+
1698 entailment
|
| 1701 |
+
1699 not_entailment
|
| 1702 |
+
1700 entailment
|
| 1703 |
+
1701 entailment
|
| 1704 |
+
1702 not_entailment
|
| 1705 |
+
1703 entailment
|
| 1706 |
+
1704 entailment
|
| 1707 |
+
1705 not_entailment
|
| 1708 |
+
1706 not_entailment
|
| 1709 |
+
1707 entailment
|
| 1710 |
+
1708 not_entailment
|
| 1711 |
+
1709 not_entailment
|
| 1712 |
+
1710 not_entailment
|
| 1713 |
+
1711 not_entailment
|
| 1714 |
+
1712 entailment
|
| 1715 |
+
1713 entailment
|
| 1716 |
+
1714 not_entailment
|
| 1717 |
+
1715 entailment
|
| 1718 |
+
1716 entailment
|
| 1719 |
+
1717 entailment
|
| 1720 |
+
1718 not_entailment
|
| 1721 |
+
1719 not_entailment
|
| 1722 |
+
1720 not_entailment
|
| 1723 |
+
1721 entailment
|
| 1724 |
+
1722 entailment
|
| 1725 |
+
1723 not_entailment
|
| 1726 |
+
1724 entailment
|
| 1727 |
+
1725 entailment
|
| 1728 |
+
1726 entailment
|
| 1729 |
+
1727 entailment
|
| 1730 |
+
1728 entailment
|
| 1731 |
+
1729 entailment
|
| 1732 |
+
1730 not_entailment
|
| 1733 |
+
1731 entailment
|
| 1734 |
+
1732 not_entailment
|
| 1735 |
+
1733 entailment
|
| 1736 |
+
1734 entailment
|
| 1737 |
+
1735 not_entailment
|
| 1738 |
+
1736 entailment
|
| 1739 |
+
1737 entailment
|
| 1740 |
+
1738 not_entailment
|
| 1741 |
+
1739 not_entailment
|
| 1742 |
+
1740 entailment
|
| 1743 |
+
1741 entailment
|
| 1744 |
+
1742 not_entailment
|
| 1745 |
+
1743 not_entailment
|
| 1746 |
+
1744 entailment
|
| 1747 |
+
1745 not_entailment
|
| 1748 |
+
1746 entailment
|
| 1749 |
+
1747 not_entailment
|
| 1750 |
+
1748 entailment
|
| 1751 |
+
1749 not_entailment
|
| 1752 |
+
1750 not_entailment
|
| 1753 |
+
1751 not_entailment
|
| 1754 |
+
1752 not_entailment
|
| 1755 |
+
1753 entailment
|
| 1756 |
+
1754 not_entailment
|
| 1757 |
+
1755 not_entailment
|
| 1758 |
+
1756 not_entailment
|
| 1759 |
+
1757 not_entailment
|
| 1760 |
+
1758 entailment
|
| 1761 |
+
1759 entailment
|
| 1762 |
+
1760 entailment
|
| 1763 |
+
1761 entailment
|
| 1764 |
+
1762 entailment
|
| 1765 |
+
1763 not_entailment
|
| 1766 |
+
1764 entailment
|
| 1767 |
+
1765 entailment
|
| 1768 |
+
1766 not_entailment
|
| 1769 |
+
1767 not_entailment
|
| 1770 |
+
1768 entailment
|
| 1771 |
+
1769 not_entailment
|
| 1772 |
+
1770 entailment
|
| 1773 |
+
1771 not_entailment
|
| 1774 |
+
1772 not_entailment
|
| 1775 |
+
1773 entailment
|
| 1776 |
+
1774 not_entailment
|
| 1777 |
+
1775 entailment
|
| 1778 |
+
1776 not_entailment
|
| 1779 |
+
1777 entailment
|
| 1780 |
+
1778 not_entailment
|
| 1781 |
+
1779 entailment
|
| 1782 |
+
1780 not_entailment
|
| 1783 |
+
1781 not_entailment
|
| 1784 |
+
1782 not_entailment
|
| 1785 |
+
1783 entailment
|
| 1786 |
+
1784 not_entailment
|
| 1787 |
+
1785 entailment
|
| 1788 |
+
1786 entailment
|
| 1789 |
+
1787 not_entailment
|
| 1790 |
+
1788 entailment
|
| 1791 |
+
1789 entailment
|
| 1792 |
+
1790 entailment
|
| 1793 |
+
1791 entailment
|
| 1794 |
+
1792 entailment
|
| 1795 |
+
1793 entailment
|
| 1796 |
+
1794 entailment
|
| 1797 |
+
1795 entailment
|
| 1798 |
+
1796 not_entailment
|
| 1799 |
+
1797 not_entailment
|
| 1800 |
+
1798 entailment
|
| 1801 |
+
1799 not_entailment
|
| 1802 |
+
1800 entailment
|
| 1803 |
+
1801 not_entailment
|
| 1804 |
+
1802 entailment
|
| 1805 |
+
1803 entailment
|
| 1806 |
+
1804 not_entailment
|
| 1807 |
+
1805 entailment
|
| 1808 |
+
1806 not_entailment
|
| 1809 |
+
1807 not_entailment
|
| 1810 |
+
1808 not_entailment
|
| 1811 |
+
1809 entailment
|
| 1812 |
+
1810 entailment
|
| 1813 |
+
1811 entailment
|
| 1814 |
+
1812 not_entailment
|
| 1815 |
+
1813 entailment
|
| 1816 |
+
1814 entailment
|
| 1817 |
+
1815 entailment
|
| 1818 |
+
1816 entailment
|
| 1819 |
+
1817 not_entailment
|
| 1820 |
+
1818 entailment
|
| 1821 |
+
1819 entailment
|
| 1822 |
+
1820 not_entailment
|
| 1823 |
+
1821 not_entailment
|
| 1824 |
+
1822 entailment
|
| 1825 |
+
1823 not_entailment
|
| 1826 |
+
1824 entailment
|
| 1827 |
+
1825 entailment
|
| 1828 |
+
1826 entailment
|
| 1829 |
+
1827 not_entailment
|
| 1830 |
+
1828 entailment
|
| 1831 |
+
1829 entailment
|
| 1832 |
+
1830 entailment
|
| 1833 |
+
1831 not_entailment
|
| 1834 |
+
1832 not_entailment
|
| 1835 |
+
1833 not_entailment
|
| 1836 |
+
1834 entailment
|
| 1837 |
+
1835 entailment
|
| 1838 |
+
1836 entailment
|
| 1839 |
+
1837 entailment
|
| 1840 |
+
1838 not_entailment
|
| 1841 |
+
1839 not_entailment
|
| 1842 |
+
1840 entailment
|
| 1843 |
+
1841 entailment
|
| 1844 |
+
1842 not_entailment
|
| 1845 |
+
1843 entailment
|
| 1846 |
+
1844 not_entailment
|
| 1847 |
+
1845 not_entailment
|
| 1848 |
+
1846 not_entailment
|
| 1849 |
+
1847 not_entailment
|
| 1850 |
+
1848 not_entailment
|
| 1851 |
+
1849 not_entailment
|
| 1852 |
+
1850 not_entailment
|
| 1853 |
+
1851 not_entailment
|
| 1854 |
+
1852 entailment
|
| 1855 |
+
1853 not_entailment
|
| 1856 |
+
1854 entailment
|
| 1857 |
+
1855 not_entailment
|
| 1858 |
+
1856 not_entailment
|
| 1859 |
+
1857 entailment
|
| 1860 |
+
1858 entailment
|
| 1861 |
+
1859 not_entailment
|
| 1862 |
+
1860 not_entailment
|
| 1863 |
+
1861 entailment
|
| 1864 |
+
1862 not_entailment
|
| 1865 |
+
1863 entailment
|
| 1866 |
+
1864 entailment
|
| 1867 |
+
1865 not_entailment
|
| 1868 |
+
1866 entailment
|
| 1869 |
+
1867 not_entailment
|
| 1870 |
+
1868 entailment
|
| 1871 |
+
1869 entailment
|
| 1872 |
+
1870 not_entailment
|
| 1873 |
+
1871 entailment
|
| 1874 |
+
1872 entailment
|
| 1875 |
+
1873 not_entailment
|
| 1876 |
+
1874 entailment
|
| 1877 |
+
1875 entailment
|
| 1878 |
+
1876 not_entailment
|
| 1879 |
+
1877 not_entailment
|
| 1880 |
+
1878 not_entailment
|
| 1881 |
+
1879 not_entailment
|
| 1882 |
+
1880 not_entailment
|
| 1883 |
+
1881 entailment
|
| 1884 |
+
1882 not_entailment
|
| 1885 |
+
1883 not_entailment
|
| 1886 |
+
1884 entailment
|
| 1887 |
+
1885 entailment
|
| 1888 |
+
1886 entailment
|
| 1889 |
+
1887 entailment
|
| 1890 |
+
1888 entailment
|
| 1891 |
+
1889 entailment
|
| 1892 |
+
1890 not_entailment
|
| 1893 |
+
1891 not_entailment
|
| 1894 |
+
1892 entailment
|
| 1895 |
+
1893 entailment
|
| 1896 |
+
1894 entailment
|
| 1897 |
+
1895 not_entailment
|
| 1898 |
+
1896 entailment
|
| 1899 |
+
1897 entailment
|
| 1900 |
+
1898 not_entailment
|
| 1901 |
+
1899 entailment
|
| 1902 |
+
1900 not_entailment
|
| 1903 |
+
1901 entailment
|
| 1904 |
+
1902 not_entailment
|
| 1905 |
+
1903 not_entailment
|
| 1906 |
+
1904 not_entailment
|
| 1907 |
+
1905 not_entailment
|
| 1908 |
+
1906 entailment
|
| 1909 |
+
1907 not_entailment
|
| 1910 |
+
1908 not_entailment
|
| 1911 |
+
1909 entailment
|
| 1912 |
+
1910 entailment
|
| 1913 |
+
1911 entailment
|
| 1914 |
+
1912 not_entailment
|
| 1915 |
+
1913 not_entailment
|
| 1916 |
+
1914 entailment
|
| 1917 |
+
1915 not_entailment
|
| 1918 |
+
1916 not_entailment
|
| 1919 |
+
1917 not_entailment
|
| 1920 |
+
1918 not_entailment
|
| 1921 |
+
1919 entailment
|
| 1922 |
+
1920 not_entailment
|
| 1923 |
+
1921 not_entailment
|
| 1924 |
+
1922 entailment
|
| 1925 |
+
1923 entailment
|
| 1926 |
+
1924 entailment
|
| 1927 |
+
1925 entailment
|
| 1928 |
+
1926 not_entailment
|
| 1929 |
+
1927 not_entailment
|
| 1930 |
+
1928 not_entailment
|
| 1931 |
+
1929 entailment
|
| 1932 |
+
1930 not_entailment
|
| 1933 |
+
1931 entailment
|
| 1934 |
+
1932 entailment
|
| 1935 |
+
1933 not_entailment
|
| 1936 |
+
1934 not_entailment
|
| 1937 |
+
1935 entailment
|
| 1938 |
+
1936 not_entailment
|
| 1939 |
+
1937 not_entailment
|
| 1940 |
+
1938 not_entailment
|
| 1941 |
+
1939 entailment
|
| 1942 |
+
1940 entailment
|
| 1943 |
+
1941 not_entailment
|
| 1944 |
+
1942 not_entailment
|
| 1945 |
+
1943 entailment
|
| 1946 |
+
1944 entailment
|
| 1947 |
+
1945 not_entailment
|
| 1948 |
+
1946 entailment
|
| 1949 |
+
1947 entailment
|
| 1950 |
+
1948 entailment
|
| 1951 |
+
1949 not_entailment
|
| 1952 |
+
1950 not_entailment
|
| 1953 |
+
1951 not_entailment
|
| 1954 |
+
1952 not_entailment
|
| 1955 |
+
1953 not_entailment
|
| 1956 |
+
1954 not_entailment
|
| 1957 |
+
1955 not_entailment
|
| 1958 |
+
1956 entailment
|
| 1959 |
+
1957 entailment
|
| 1960 |
+
1958 not_entailment
|
| 1961 |
+
1959 entailment
|
| 1962 |
+
1960 entailment
|
| 1963 |
+
1961 not_entailment
|
| 1964 |
+
1962 entailment
|
| 1965 |
+
1963 not_entailment
|
| 1966 |
+
1964 not_entailment
|
| 1967 |
+
1965 entailment
|
| 1968 |
+
1966 not_entailment
|
| 1969 |
+
1967 entailment
|
| 1970 |
+
1968 not_entailment
|
| 1971 |
+
1969 not_entailment
|
| 1972 |
+
1970 entailment
|
| 1973 |
+
1971 entailment
|
| 1974 |
+
1972 not_entailment
|
| 1975 |
+
1973 entailment
|
| 1976 |
+
1974 entailment
|
| 1977 |
+
1975 not_entailment
|
| 1978 |
+
1976 entailment
|
| 1979 |
+
1977 not_entailment
|
| 1980 |
+
1978 entailment
|
| 1981 |
+
1979 entailment
|
| 1982 |
+
1980 entailment
|
| 1983 |
+
1981 entailment
|
| 1984 |
+
1982 not_entailment
|
| 1985 |
+
1983 entailment
|
| 1986 |
+
1984 entailment
|
| 1987 |
+
1985 entailment
|
| 1988 |
+
1986 entailment
|
| 1989 |
+
1987 not_entailment
|
| 1990 |
+
1988 not_entailment
|
| 1991 |
+
1989 entailment
|
| 1992 |
+
1990 entailment
|
| 1993 |
+
1991 entailment
|
| 1994 |
+
1992 not_entailment
|
| 1995 |
+
1993 not_entailment
|
| 1996 |
+
1994 entailment
|
| 1997 |
+
1995 entailment
|
| 1998 |
+
1996 entailment
|
| 1999 |
+
1997 not_entailment
|
| 2000 |
+
1998 entailment
|
| 2001 |
+
1999 entailment
|
| 2002 |
+
2000 not_entailment
|
| 2003 |
+
2001 not_entailment
|
| 2004 |
+
2002 entailment
|
| 2005 |
+
2003 entailment
|
| 2006 |
+
2004 not_entailment
|
| 2007 |
+
2005 entailment
|
| 2008 |
+
2006 not_entailment
|
| 2009 |
+
2007 not_entailment
|
| 2010 |
+
2008 not_entailment
|
| 2011 |
+
2009 not_entailment
|
| 2012 |
+
2010 entailment
|
| 2013 |
+
2011 entailment
|
| 2014 |
+
2012 not_entailment
|
| 2015 |
+
2013 entailment
|
| 2016 |
+
2014 entailment
|
| 2017 |
+
2015 entailment
|
| 2018 |
+
2016 entailment
|
| 2019 |
+
2017 entailment
|
| 2020 |
+
2018 not_entailment
|
| 2021 |
+
2019 entailment
|
| 2022 |
+
2020 not_entailment
|
| 2023 |
+
2021 not_entailment
|
| 2024 |
+
2022 entailment
|
| 2025 |
+
2023 entailment
|
| 2026 |
+
2024 entailment
|
| 2027 |
+
2025 entailment
|
| 2028 |
+
2026 entailment
|
| 2029 |
+
2027 not_entailment
|
| 2030 |
+
2028 not_entailment
|
| 2031 |
+
2029 not_entailment
|
| 2032 |
+
2030 not_entailment
|
| 2033 |
+
2031 entailment
|
| 2034 |
+
2032 entailment
|
| 2035 |
+
2033 entailment
|
| 2036 |
+
2034 entailment
|
| 2037 |
+
2035 not_entailment
|
| 2038 |
+
2036 entailment
|
| 2039 |
+
2037 entailment
|
| 2040 |
+
2038 entailment
|
| 2041 |
+
2039 not_entailment
|
| 2042 |
+
2040 entailment
|
| 2043 |
+
2041 entailment
|
| 2044 |
+
2042 entailment
|
| 2045 |
+
2043 not_entailment
|
| 2046 |
+
2044 entailment
|
| 2047 |
+
2045 entailment
|
| 2048 |
+
2046 entailment
|
| 2049 |
+
2047 not_entailment
|
| 2050 |
+
2048 not_entailment
|
| 2051 |
+
2049 entailment
|
| 2052 |
+
2050 entailment
|
| 2053 |
+
2051 entailment
|
| 2054 |
+
2052 not_entailment
|
| 2055 |
+
2053 not_entailment
|
| 2056 |
+
2054 not_entailment
|
| 2057 |
+
2055 not_entailment
|
| 2058 |
+
2056 not_entailment
|
| 2059 |
+
2057 entailment
|
| 2060 |
+
2058 not_entailment
|
| 2061 |
+
2059 not_entailment
|
| 2062 |
+
2060 not_entailment
|
| 2063 |
+
2061 not_entailment
|
| 2064 |
+
2062 not_entailment
|
| 2065 |
+
2063 entailment
|
| 2066 |
+
2064 entailment
|
| 2067 |
+
2065 entailment
|
| 2068 |
+
2066 not_entailment
|
| 2069 |
+
2067 entailment
|
| 2070 |
+
2068 not_entailment
|
| 2071 |
+
2069 entailment
|
| 2072 |
+
2070 entailment
|
| 2073 |
+
2071 not_entailment
|
| 2074 |
+
2072 entailment
|
| 2075 |
+
2073 entailment
|
| 2076 |
+
2074 not_entailment
|
| 2077 |
+
2075 entailment
|
| 2078 |
+
2076 not_entailment
|
| 2079 |
+
2077 entailment
|
| 2080 |
+
2078 entailment
|
| 2081 |
+
2079 entailment
|
| 2082 |
+
2080 entailment
|
| 2083 |
+
2081 entailment
|
| 2084 |
+
2082 not_entailment
|
| 2085 |
+
2083 entailment
|
| 2086 |
+
2084 entailment
|
| 2087 |
+
2085 not_entailment
|
| 2088 |
+
2086 entailment
|
| 2089 |
+
2087 entailment
|
| 2090 |
+
2088 entailment
|
| 2091 |
+
2089 entailment
|
| 2092 |
+
2090 not_entailment
|
| 2093 |
+
2091 entailment
|
| 2094 |
+
2092 not_entailment
|
| 2095 |
+
2093 not_entailment
|
| 2096 |
+
2094 entailment
|
| 2097 |
+
2095 entailment
|
| 2098 |
+
2096 not_entailment
|
| 2099 |
+
2097 entailment
|
| 2100 |
+
2098 not_entailment
|
| 2101 |
+
2099 not_entailment
|
| 2102 |
+
2100 entailment
|
| 2103 |
+
2101 not_entailment
|
| 2104 |
+
2102 entailment
|
| 2105 |
+
2103 not_entailment
|
| 2106 |
+
2104 entailment
|
| 2107 |
+
2105 not_entailment
|
| 2108 |
+
2106 entailment
|
| 2109 |
+
2107 entailment
|
| 2110 |
+
2108 entailment
|
| 2111 |
+
2109 not_entailment
|
| 2112 |
+
2110 entailment
|
| 2113 |
+
2111 entailment
|
| 2114 |
+
2112 entailment
|
| 2115 |
+
2113 not_entailment
|
| 2116 |
+
2114 not_entailment
|
| 2117 |
+
2115 entailment
|
| 2118 |
+
2116 not_entailment
|
| 2119 |
+
2117 entailment
|
| 2120 |
+
2118 not_entailment
|
| 2121 |
+
2119 entailment
|
| 2122 |
+
2120 entailment
|
| 2123 |
+
2121 not_entailment
|
| 2124 |
+
2122 entailment
|
| 2125 |
+
2123 entailment
|
| 2126 |
+
2124 not_entailment
|
| 2127 |
+
2125 not_entailment
|
| 2128 |
+
2126 entailment
|
| 2129 |
+
2127 entailment
|
| 2130 |
+
2128 entailment
|
| 2131 |
+
2129 not_entailment
|
| 2132 |
+
2130 entailment
|
| 2133 |
+
2131 not_entailment
|
| 2134 |
+
2132 not_entailment
|
| 2135 |
+
2133 not_entailment
|
| 2136 |
+
2134 entailment
|
| 2137 |
+
2135 entailment
|
| 2138 |
+
2136 not_entailment
|
| 2139 |
+
2137 not_entailment
|
| 2140 |
+
2138 not_entailment
|
| 2141 |
+
2139 not_entailment
|
| 2142 |
+
2140 entailment
|
| 2143 |
+
2141 not_entailment
|
| 2144 |
+
2142 entailment
|
| 2145 |
+
2143 not_entailment
|
| 2146 |
+
2144 not_entailment
|
| 2147 |
+
2145 entailment
|
| 2148 |
+
2146 entailment
|
| 2149 |
+
2147 not_entailment
|
| 2150 |
+
2148 not_entailment
|
| 2151 |
+
2149 entailment
|
| 2152 |
+
2150 not_entailment
|
| 2153 |
+
2151 not_entailment
|
| 2154 |
+
2152 entailment
|
| 2155 |
+
2153 entailment
|
| 2156 |
+
2154 entailment
|
| 2157 |
+
2155 entailment
|
| 2158 |
+
2156 entailment
|
| 2159 |
+
2157 entailment
|
| 2160 |
+
2158 not_entailment
|
| 2161 |
+
2159 entailment
|
| 2162 |
+
2160 not_entailment
|
| 2163 |
+
2161 entailment
|
| 2164 |
+
2162 entailment
|
| 2165 |
+
2163 not_entailment
|
| 2166 |
+
2164 not_entailment
|
| 2167 |
+
2165 not_entailment
|
| 2168 |
+
2166 not_entailment
|
| 2169 |
+
2167 entailment
|
| 2170 |
+
2168 entailment
|
| 2171 |
+
2169 not_entailment
|
| 2172 |
+
2170 not_entailment
|
| 2173 |
+
2171 entailment
|
| 2174 |
+
2172 entailment
|
| 2175 |
+
2173 not_entailment
|
| 2176 |
+
2174 entailment
|
| 2177 |
+
2175 not_entailment
|
| 2178 |
+
2176 not_entailment
|
| 2179 |
+
2177 entailment
|
| 2180 |
+
2178 not_entailment
|
| 2181 |
+
2179 entailment
|
| 2182 |
+
2180 not_entailment
|
| 2183 |
+
2181 not_entailment
|
| 2184 |
+
2182 entailment
|
| 2185 |
+
2183 not_entailment
|
| 2186 |
+
2184 not_entailment
|
| 2187 |
+
2185 entailment
|
| 2188 |
+
2186 entailment
|
| 2189 |
+
2187 entailment
|
| 2190 |
+
2188 entailment
|
| 2191 |
+
2189 not_entailment
|
| 2192 |
+
2190 not_entailment
|
| 2193 |
+
2191 entailment
|
| 2194 |
+
2192 entailment
|
| 2195 |
+
2193 not_entailment
|
| 2196 |
+
2194 not_entailment
|
| 2197 |
+
2195 not_entailment
|
| 2198 |
+
2196 entailment
|
| 2199 |
+
2197 entailment
|
| 2200 |
+
2198 not_entailment
|
| 2201 |
+
2199 entailment
|
| 2202 |
+
2200 entailment
|
| 2203 |
+
2201 entailment
|
| 2204 |
+
2202 not_entailment
|
| 2205 |
+
2203 not_entailment
|
| 2206 |
+
2204 not_entailment
|
| 2207 |
+
2205 entailment
|
| 2208 |
+
2206 not_entailment
|
| 2209 |
+
2207 entailment
|
| 2210 |
+
2208 not_entailment
|
| 2211 |
+
2209 entailment
|
| 2212 |
+
2210 not_entailment
|
| 2213 |
+
2211 entailment
|
| 2214 |
+
2212 entailment
|
| 2215 |
+
2213 entailment
|
| 2216 |
+
2214 entailment
|
| 2217 |
+
2215 entailment
|
| 2218 |
+
2216 not_entailment
|
| 2219 |
+
2217 not_entailment
|
| 2220 |
+
2218 not_entailment
|
| 2221 |
+
2219 not_entailment
|
| 2222 |
+
2220 not_entailment
|
| 2223 |
+
2221 entailment
|
| 2224 |
+
2222 not_entailment
|
| 2225 |
+
2223 not_entailment
|
| 2226 |
+
2224 not_entailment
|
| 2227 |
+
2225 not_entailment
|
| 2228 |
+
2226 not_entailment
|
| 2229 |
+
2227 not_entailment
|
| 2230 |
+
2228 not_entailment
|
| 2231 |
+
2229 not_entailment
|
| 2232 |
+
2230 not_entailment
|
| 2233 |
+
2231 not_entailment
|
| 2234 |
+
2232 not_entailment
|
| 2235 |
+
2233 not_entailment
|
| 2236 |
+
2234 entailment
|
| 2237 |
+
2235 not_entailment
|
| 2238 |
+
2236 entailment
|
| 2239 |
+
2237 not_entailment
|
| 2240 |
+
2238 entailment
|
| 2241 |
+
2239 not_entailment
|
| 2242 |
+
2240 not_entailment
|
| 2243 |
+
2241 not_entailment
|
| 2244 |
+
2242 entailment
|
| 2245 |
+
2243 not_entailment
|
| 2246 |
+
2244 entailment
|
| 2247 |
+
2245 entailment
|
| 2248 |
+
2246 entailment
|
| 2249 |
+
2247 not_entailment
|
| 2250 |
+
2248 entailment
|
| 2251 |
+
2249 not_entailment
|
| 2252 |
+
2250 entailment
|
| 2253 |
+
2251 not_entailment
|
| 2254 |
+
2252 entailment
|
| 2255 |
+
2253 entailment
|
| 2256 |
+
2254 entailment
|
| 2257 |
+
2255 not_entailment
|
| 2258 |
+
2256 not_entailment
|
| 2259 |
+
2257 not_entailment
|
| 2260 |
+
2258 not_entailment
|
| 2261 |
+
2259 not_entailment
|
| 2262 |
+
2260 entailment
|
| 2263 |
+
2261 entailment
|
| 2264 |
+
2262 not_entailment
|
| 2265 |
+
2263 not_entailment
|
| 2266 |
+
2264 not_entailment
|
| 2267 |
+
2265 entailment
|
| 2268 |
+
2266 not_entailment
|
| 2269 |
+
2267 entailment
|
| 2270 |
+
2268 not_entailment
|
| 2271 |
+
2269 not_entailment
|
| 2272 |
+
2270 not_entailment
|
| 2273 |
+
2271 entailment
|
| 2274 |
+
2272 entailment
|
| 2275 |
+
2273 entailment
|
| 2276 |
+
2274 entailment
|
| 2277 |
+
2275 entailment
|
| 2278 |
+
2276 entailment
|
| 2279 |
+
2277 entailment
|
| 2280 |
+
2278 not_entailment
|
| 2281 |
+
2279 entailment
|
| 2282 |
+
2280 not_entailment
|
| 2283 |
+
2281 not_entailment
|
| 2284 |
+
2282 entailment
|
| 2285 |
+
2283 not_entailment
|
| 2286 |
+
2284 not_entailment
|
| 2287 |
+
2285 entailment
|
| 2288 |
+
2286 not_entailment
|
| 2289 |
+
2287 not_entailment
|
| 2290 |
+
2288 entailment
|
| 2291 |
+
2289 entailment
|
| 2292 |
+
2290 entailment
|
| 2293 |
+
2291 entailment
|
| 2294 |
+
2292 entailment
|
| 2295 |
+
2293 entailment
|
| 2296 |
+
2294 entailment
|
| 2297 |
+
2295 entailment
|
| 2298 |
+
2296 entailment
|
| 2299 |
+
2297 entailment
|
| 2300 |
+
2298 entailment
|
| 2301 |
+
2299 entailment
|
| 2302 |
+
2300 not_entailment
|
| 2303 |
+
2301 entailment
|
| 2304 |
+
2302 not_entailment
|
| 2305 |
+
2303 entailment
|
| 2306 |
+
2304 entailment
|
| 2307 |
+
2305 entailment
|
| 2308 |
+
2306 entailment
|
| 2309 |
+
2307 not_entailment
|
| 2310 |
+
2308 not_entailment
|
| 2311 |
+
2309 not_entailment
|
| 2312 |
+
2310 entailment
|
| 2313 |
+
2311 not_entailment
|
| 2314 |
+
2312 not_entailment
|
| 2315 |
+
2313 entailment
|
| 2316 |
+
2314 not_entailment
|
| 2317 |
+
2315 entailment
|
| 2318 |
+
2316 entailment
|
| 2319 |
+
2317 not_entailment
|
| 2320 |
+
2318 entailment
|
| 2321 |
+
2319 not_entailment
|
| 2322 |
+
2320 not_entailment
|
| 2323 |
+
2321 entailment
|
| 2324 |
+
2322 entailment
|
| 2325 |
+
2323 not_entailment
|
| 2326 |
+
2324 entailment
|
| 2327 |
+
2325 not_entailment
|
| 2328 |
+
2326 not_entailment
|
| 2329 |
+
2327 not_entailment
|
| 2330 |
+
2328 not_entailment
|
| 2331 |
+
2329 entailment
|
| 2332 |
+
2330 entailment
|
| 2333 |
+
2331 entailment
|
| 2334 |
+
2332 entailment
|
| 2335 |
+
2333 entailment
|
| 2336 |
+
2334 not_entailment
|
| 2337 |
+
2335 not_entailment
|
| 2338 |
+
2336 entailment
|
| 2339 |
+
2337 entailment
|
| 2340 |
+
2338 entailment
|
| 2341 |
+
2339 not_entailment
|
| 2342 |
+
2340 not_entailment
|
| 2343 |
+
2341 entailment
|
| 2344 |
+
2342 not_entailment
|
| 2345 |
+
2343 entailment
|
| 2346 |
+
2344 not_entailment
|
| 2347 |
+
2345 entailment
|
| 2348 |
+
2346 entailment
|
| 2349 |
+
2347 entailment
|
| 2350 |
+
2348 not_entailment
|
| 2351 |
+
2349 entailment
|
| 2352 |
+
2350 entailment
|
| 2353 |
+
2351 not_entailment
|
| 2354 |
+
2352 entailment
|
| 2355 |
+
2353 entailment
|
| 2356 |
+
2354 not_entailment
|
| 2357 |
+
2355 entailment
|
| 2358 |
+
2356 entailment
|
| 2359 |
+
2357 not_entailment
|
| 2360 |
+
2358 entailment
|
| 2361 |
+
2359 entailment
|
| 2362 |
+
2360 entailment
|
| 2363 |
+
2361 not_entailment
|
| 2364 |
+
2362 not_entailment
|
| 2365 |
+
2363 not_entailment
|
| 2366 |
+
2364 not_entailment
|
| 2367 |
+
2365 not_entailment
|
| 2368 |
+
2366 not_entailment
|
| 2369 |
+
2367 entailment
|
| 2370 |
+
2368 not_entailment
|
| 2371 |
+
2369 entailment
|
| 2372 |
+
2370 entailment
|
| 2373 |
+
2371 entailment
|
| 2374 |
+
2372 entailment
|
| 2375 |
+
2373 entailment
|
| 2376 |
+
2374 entailment
|
| 2377 |
+
2375 entailment
|
| 2378 |
+
2376 not_entailment
|
| 2379 |
+
2377 entailment
|
| 2380 |
+
2378 not_entailment
|
| 2381 |
+
2379 not_entailment
|
| 2382 |
+
2380 not_entailment
|
| 2383 |
+
2381 entailment
|
| 2384 |
+
2382 entailment
|
| 2385 |
+
2383 entailment
|
| 2386 |
+
2384 entailment
|
| 2387 |
+
2385 not_entailment
|
| 2388 |
+
2386 entailment
|
| 2389 |
+
2387 entailment
|
| 2390 |
+
2388 entailment
|
| 2391 |
+
2389 entailment
|
| 2392 |
+
2390 entailment
|
| 2393 |
+
2391 entailment
|
| 2394 |
+
2392 entailment
|
| 2395 |
+
2393 entailment
|
| 2396 |
+
2394 entailment
|
| 2397 |
+
2395 entailment
|
| 2398 |
+
2396 not_entailment
|
| 2399 |
+
2397 not_entailment
|
| 2400 |
+
2398 not_entailment
|
| 2401 |
+
2399 entailment
|
| 2402 |
+
2400 not_entailment
|
| 2403 |
+
2401 not_entailment
|
| 2404 |
+
2402 not_entailment
|
| 2405 |
+
2403 not_entailment
|
| 2406 |
+
2404 not_entailment
|
| 2407 |
+
2405 entailment
|
| 2408 |
+
2406 entailment
|
| 2409 |
+
2407 entailment
|
| 2410 |
+
2408 entailment
|
| 2411 |
+
2409 not_entailment
|
| 2412 |
+
2410 not_entailment
|
| 2413 |
+
2411 not_entailment
|
| 2414 |
+
2412 not_entailment
|
| 2415 |
+
2413 entailment
|
| 2416 |
+
2414 entailment
|
| 2417 |
+
2415 entailment
|
| 2418 |
+
2416 not_entailment
|
| 2419 |
+
2417 not_entailment
|
| 2420 |
+
2418 not_entailment
|
| 2421 |
+
2419 entailment
|
| 2422 |
+
2420 entailment
|
| 2423 |
+
2421 entailment
|
| 2424 |
+
2422 entailment
|
| 2425 |
+
2423 not_entailment
|
| 2426 |
+
2424 not_entailment
|
| 2427 |
+
2425 entailment
|
| 2428 |
+
2426 entailment
|
| 2429 |
+
2427 entailment
|
| 2430 |
+
2428 not_entailment
|
| 2431 |
+
2429 not_entailment
|
| 2432 |
+
2430 entailment
|
| 2433 |
+
2431 not_entailment
|
| 2434 |
+
2432 entailment
|
| 2435 |
+
2433 not_entailment
|
| 2436 |
+
2434 not_entailment
|
| 2437 |
+
2435 entailment
|
| 2438 |
+
2436 entailment
|
| 2439 |
+
2437 entailment
|
| 2440 |
+
2438 not_entailment
|
| 2441 |
+
2439 not_entailment
|
| 2442 |
+
2440 entailment
|
| 2443 |
+
2441 entailment
|
| 2444 |
+
2442 not_entailment
|
| 2445 |
+
2443 not_entailment
|
| 2446 |
+
2444 entailment
|
| 2447 |
+
2445 entailment
|
| 2448 |
+
2446 not_entailment
|
| 2449 |
+
2447 entailment
|
| 2450 |
+
2448 not_entailment
|
| 2451 |
+
2449 entailment
|
| 2452 |
+
2450 entailment
|
| 2453 |
+
2451 entailment
|
| 2454 |
+
2452 entailment
|
| 2455 |
+
2453 entailment
|
| 2456 |
+
2454 not_entailment
|
| 2457 |
+
2455 not_entailment
|
| 2458 |
+
2456 entailment
|
| 2459 |
+
2457 entailment
|
| 2460 |
+
2458 not_entailment
|
| 2461 |
+
2459 entailment
|
| 2462 |
+
2460 entailment
|
| 2463 |
+
2461 not_entailment
|
| 2464 |
+
2462 entailment
|
| 2465 |
+
2463 not_entailment
|
| 2466 |
+
2464 entailment
|
| 2467 |
+
2465 entailment
|
| 2468 |
+
2466 not_entailment
|
| 2469 |
+
2467 entailment
|
| 2470 |
+
2468 entailment
|
| 2471 |
+
2469 entailment
|
| 2472 |
+
2470 entailment
|
| 2473 |
+
2471 not_entailment
|
| 2474 |
+
2472 not_entailment
|
| 2475 |
+
2473 not_entailment
|
| 2476 |
+
2474 not_entailment
|
| 2477 |
+
2475 not_entailment
|
| 2478 |
+
2476 not_entailment
|
| 2479 |
+
2477 entailment
|
| 2480 |
+
2478 entailment
|
| 2481 |
+
2479 entailment
|
| 2482 |
+
2480 entailment
|
| 2483 |
+
2481 entailment
|
| 2484 |
+
2482 entailment
|
| 2485 |
+
2483 not_entailment
|
| 2486 |
+
2484 not_entailment
|
| 2487 |
+
2485 entailment
|
| 2488 |
+
2486 not_entailment
|
| 2489 |
+
2487 not_entailment
|
| 2490 |
+
2488 entailment
|
| 2491 |
+
2489 not_entailment
|
| 2492 |
+
2490 entailment
|
| 2493 |
+
2491 not_entailment
|
| 2494 |
+
2492 not_entailment
|
| 2495 |
+
2493 entailment
|
| 2496 |
+
2494 not_entailment
|
| 2497 |
+
2495 entailment
|
| 2498 |
+
2496 not_entailment
|
| 2499 |
+
2497 entailment
|
| 2500 |
+
2498 not_entailment
|
| 2501 |
+
2499 not_entailment
|
| 2502 |
+
2500 entailment
|
| 2503 |
+
2501 not_entailment
|
| 2504 |
+
2502 entailment
|
| 2505 |
+
2503 entailment
|
| 2506 |
+
2504 entailment
|
| 2507 |
+
2505 entailment
|
| 2508 |
+
2506 entailment
|
| 2509 |
+
2507 entailment
|
| 2510 |
+
2508 not_entailment
|
| 2511 |
+
2509 not_entailment
|
| 2512 |
+
2510 not_entailment
|
| 2513 |
+
2511 entailment
|
| 2514 |
+
2512 entailment
|
| 2515 |
+
2513 not_entailment
|
| 2516 |
+
2514 not_entailment
|
| 2517 |
+
2515 entailment
|
| 2518 |
+
2516 not_entailment
|
| 2519 |
+
2517 not_entailment
|
| 2520 |
+
2518 not_entailment
|
| 2521 |
+
2519 entailment
|
| 2522 |
+
2520 not_entailment
|
| 2523 |
+
2521 not_entailment
|
| 2524 |
+
2522 entailment
|
| 2525 |
+
2523 entailment
|
| 2526 |
+
2524 not_entailment
|
| 2527 |
+
2525 entailment
|
| 2528 |
+
2526 not_entailment
|
| 2529 |
+
2527 not_entailment
|
| 2530 |
+
2528 not_entailment
|
| 2531 |
+
2529 entailment
|
| 2532 |
+
2530 not_entailment
|
| 2533 |
+
2531 not_entailment
|
| 2534 |
+
2532 not_entailment
|
| 2535 |
+
2533 entailment
|
| 2536 |
+
2534 not_entailment
|
| 2537 |
+
2535 not_entailment
|
| 2538 |
+
2536 entailment
|
| 2539 |
+
2537 not_entailment
|
| 2540 |
+
2538 not_entailment
|
| 2541 |
+
2539 entailment
|
| 2542 |
+
2540 not_entailment
|
| 2543 |
+
2541 entailment
|
| 2544 |
+
2542 entailment
|
| 2545 |
+
2543 entailment
|
| 2546 |
+
2544 entailment
|
| 2547 |
+
2545 entailment
|
| 2548 |
+
2546 not_entailment
|
| 2549 |
+
2547 entailment
|
| 2550 |
+
2548 not_entailment
|
| 2551 |
+
2549 not_entailment
|
| 2552 |
+
2550 entailment
|
| 2553 |
+
2551 entailment
|
| 2554 |
+
2552 entailment
|
| 2555 |
+
2553 entailment
|
| 2556 |
+
2554 entailment
|
| 2557 |
+
2555 entailment
|
| 2558 |
+
2556 entailment
|
| 2559 |
+
2557 entailment
|
| 2560 |
+
2558 entailment
|
| 2561 |
+
2559 entailment
|
| 2562 |
+
2560 not_entailment
|
| 2563 |
+
2561 not_entailment
|
| 2564 |
+
2562 entailment
|
| 2565 |
+
2563 entailment
|
| 2566 |
+
2564 entailment
|
| 2567 |
+
2565 entailment
|
| 2568 |
+
2566 not_entailment
|
| 2569 |
+
2567 entailment
|
| 2570 |
+
2568 entailment
|
| 2571 |
+
2569 entailment
|
| 2572 |
+
2570 entailment
|
| 2573 |
+
2571 not_entailment
|
| 2574 |
+
2572 not_entailment
|
| 2575 |
+
2573 entailment
|
| 2576 |
+
2574 not_entailment
|
| 2577 |
+
2575 entailment
|
| 2578 |
+
2576 not_entailment
|
| 2579 |
+
2577 entailment
|
| 2580 |
+
2578 not_entailment
|
| 2581 |
+
2579 not_entailment
|
| 2582 |
+
2580 entailment
|
| 2583 |
+
2581 not_entailment
|
| 2584 |
+
2582 not_entailment
|
| 2585 |
+
2583 entailment
|
| 2586 |
+
2584 entailment
|
| 2587 |
+
2585 not_entailment
|
| 2588 |
+
2586 entailment
|
| 2589 |
+
2587 entailment
|
| 2590 |
+
2588 not_entailment
|
| 2591 |
+
2589 entailment
|
| 2592 |
+
2590 entailment
|
| 2593 |
+
2591 not_entailment
|
| 2594 |
+
2592 not_entailment
|
| 2595 |
+
2593 entailment
|
| 2596 |
+
2594 not_entailment
|
| 2597 |
+
2595 not_entailment
|
| 2598 |
+
2596 entailment
|
| 2599 |
+
2597 not_entailment
|
| 2600 |
+
2598 entailment
|
| 2601 |
+
2599 not_entailment
|
| 2602 |
+
2600 entailment
|
| 2603 |
+
2601 entailment
|
| 2604 |
+
2602 not_entailment
|
| 2605 |
+
2603 entailment
|
| 2606 |
+
2604 entailment
|
| 2607 |
+
2605 not_entailment
|
| 2608 |
+
2606 entailment
|
| 2609 |
+
2607 not_entailment
|
| 2610 |
+
2608 not_entailment
|
| 2611 |
+
2609 not_entailment
|
| 2612 |
+
2610 not_entailment
|
| 2613 |
+
2611 entailment
|
| 2614 |
+
2612 not_entailment
|
| 2615 |
+
2613 entailment
|
| 2616 |
+
2614 not_entailment
|
| 2617 |
+
2615 entailment
|
| 2618 |
+
2616 not_entailment
|
| 2619 |
+
2617 not_entailment
|
| 2620 |
+
2618 entailment
|
| 2621 |
+
2619 entailment
|
| 2622 |
+
2620 not_entailment
|
| 2623 |
+
2621 entailment
|
| 2624 |
+
2622 entailment
|
| 2625 |
+
2623 not_entailment
|
| 2626 |
+
2624 entailment
|
| 2627 |
+
2625 entailment
|
| 2628 |
+
2626 entailment
|
| 2629 |
+
2627 entailment
|
| 2630 |
+
2628 not_entailment
|
| 2631 |
+
2629 not_entailment
|
| 2632 |
+
2630 entailment
|
| 2633 |
+
2631 entailment
|
| 2634 |
+
2632 entailment
|
| 2635 |
+
2633 not_entailment
|
| 2636 |
+
2634 not_entailment
|
| 2637 |
+
2635 entailment
|
| 2638 |
+
2636 not_entailment
|
| 2639 |
+
2637 not_entailment
|
| 2640 |
+
2638 not_entailment
|
| 2641 |
+
2639 not_entailment
|
| 2642 |
+
2640 not_entailment
|
| 2643 |
+
2641 entailment
|
| 2644 |
+
2642 not_entailment
|
| 2645 |
+
2643 not_entailment
|
| 2646 |
+
2644 entailment
|
| 2647 |
+
2645 entailment
|
| 2648 |
+
2646 entailment
|
| 2649 |
+
2647 not_entailment
|
| 2650 |
+
2648 entailment
|
| 2651 |
+
2649 entailment
|
| 2652 |
+
2650 not_entailment
|
| 2653 |
+
2651 not_entailment
|
| 2654 |
+
2652 not_entailment
|
| 2655 |
+
2653 entailment
|
| 2656 |
+
2654 entailment
|
| 2657 |
+
2655 not_entailment
|
| 2658 |
+
2656 entailment
|
| 2659 |
+
2657 not_entailment
|
| 2660 |
+
2658 not_entailment
|
| 2661 |
+
2659 entailment
|
| 2662 |
+
2660 entailment
|
| 2663 |
+
2661 not_entailment
|
| 2664 |
+
2662 not_entailment
|
| 2665 |
+
2663 entailment
|
| 2666 |
+
2664 not_entailment
|
| 2667 |
+
2665 not_entailment
|
| 2668 |
+
2666 not_entailment
|
| 2669 |
+
2667 not_entailment
|
| 2670 |
+
2668 entailment
|
| 2671 |
+
2669 entailment
|
| 2672 |
+
2670 not_entailment
|
| 2673 |
+
2671 entailment
|
| 2674 |
+
2672 entailment
|
| 2675 |
+
2673 not_entailment
|
| 2676 |
+
2674 not_entailment
|
| 2677 |
+
2675 not_entailment
|
| 2678 |
+
2676 not_entailment
|
| 2679 |
+
2677 entailment
|
| 2680 |
+
2678 not_entailment
|
| 2681 |
+
2679 not_entailment
|
| 2682 |
+
2680 not_entailment
|
| 2683 |
+
2681 not_entailment
|
| 2684 |
+
2682 not_entailment
|
| 2685 |
+
2683 not_entailment
|
| 2686 |
+
2684 entailment
|
| 2687 |
+
2685 entailment
|
| 2688 |
+
2686 not_entailment
|
| 2689 |
+
2687 entailment
|
| 2690 |
+
2688 not_entailment
|
| 2691 |
+
2689 not_entailment
|
| 2692 |
+
2690 not_entailment
|
| 2693 |
+
2691 entailment
|
| 2694 |
+
2692 entailment
|
| 2695 |
+
2693 not_entailment
|
| 2696 |
+
2694 not_entailment
|
| 2697 |
+
2695 entailment
|
| 2698 |
+
2696 entailment
|
| 2699 |
+
2697 entailment
|
| 2700 |
+
2698 entailment
|
| 2701 |
+
2699 entailment
|
| 2702 |
+
2700 entailment
|
| 2703 |
+
2701 not_entailment
|
| 2704 |
+
2702 entailment
|
| 2705 |
+
2703 not_entailment
|
| 2706 |
+
2704 entailment
|
| 2707 |
+
2705 entailment
|
| 2708 |
+
2706 not_entailment
|
| 2709 |
+
2707 not_entailment
|
| 2710 |
+
2708 entailment
|
| 2711 |
+
2709 not_entailment
|
| 2712 |
+
2710 not_entailment
|
| 2713 |
+
2711 entailment
|
| 2714 |
+
2712 entailment
|
| 2715 |
+
2713 entailment
|
| 2716 |
+
2714 entailment
|
| 2717 |
+
2715 entailment
|
| 2718 |
+
2716 entailment
|
| 2719 |
+
2717 entailment
|
| 2720 |
+
2718 entailment
|
| 2721 |
+
2719 not_entailment
|
| 2722 |
+
2720 not_entailment
|
| 2723 |
+
2721 entailment
|
| 2724 |
+
2722 not_entailment
|
| 2725 |
+
2723 not_entailment
|
| 2726 |
+
2724 not_entailment
|
| 2727 |
+
2725 entailment
|
| 2728 |
+
2726 not_entailment
|
| 2729 |
+
2727 entailment
|
| 2730 |
+
2728 not_entailment
|
| 2731 |
+
2729 entailment
|
| 2732 |
+
2730 entailment
|
| 2733 |
+
2731 not_entailment
|
| 2734 |
+
2732 entailment
|
| 2735 |
+
2733 not_entailment
|
| 2736 |
+
2734 not_entailment
|
| 2737 |
+
2735 not_entailment
|
| 2738 |
+
2736 entailment
|
| 2739 |
+
2737 entailment
|
| 2740 |
+
2738 not_entailment
|
| 2741 |
+
2739 entailment
|
| 2742 |
+
2740 not_entailment
|
| 2743 |
+
2741 not_entailment
|
| 2744 |
+
2742 entailment
|
| 2745 |
+
2743 not_entailment
|
| 2746 |
+
2744 entailment
|
| 2747 |
+
2745 entailment
|
| 2748 |
+
2746 entailment
|
| 2749 |
+
2747 entailment
|
| 2750 |
+
2748 entailment
|
| 2751 |
+
2749 not_entailment
|
| 2752 |
+
2750 not_entailment
|
| 2753 |
+
2751 entailment
|
| 2754 |
+
2752 entailment
|
| 2755 |
+
2753 entailment
|
| 2756 |
+
2754 not_entailment
|
| 2757 |
+
2755 not_entailment
|
| 2758 |
+
2756 not_entailment
|
| 2759 |
+
2757 not_entailment
|
| 2760 |
+
2758 not_entailment
|
| 2761 |
+
2759 not_entailment
|
| 2762 |
+
2760 entailment
|
| 2763 |
+
2761 not_entailment
|
| 2764 |
+
2762 entailment
|
| 2765 |
+
2763 entailment
|
| 2766 |
+
2764 not_entailment
|
| 2767 |
+
2765 entailment
|
| 2768 |
+
2766 not_entailment
|
| 2769 |
+
2767 not_entailment
|
| 2770 |
+
2768 not_entailment
|
| 2771 |
+
2769 entailment
|
| 2772 |
+
2770 entailment
|
| 2773 |
+
2771 not_entailment
|
| 2774 |
+
2772 entailment
|
| 2775 |
+
2773 entailment
|
| 2776 |
+
2774 not_entailment
|
| 2777 |
+
2775 not_entailment
|
| 2778 |
+
2776 entailment
|
| 2779 |
+
2777 entailment
|
| 2780 |
+
2778 not_entailment
|
| 2781 |
+
2779 not_entailment
|
| 2782 |
+
2780 entailment
|
| 2783 |
+
2781 not_entailment
|
| 2784 |
+
2782 entailment
|
| 2785 |
+
2783 entailment
|
| 2786 |
+
2784 not_entailment
|
| 2787 |
+
2785 not_entailment
|
| 2788 |
+
2786 not_entailment
|
| 2789 |
+
2787 entailment
|
| 2790 |
+
2788 not_entailment
|
| 2791 |
+
2789 entailment
|
| 2792 |
+
2790 not_entailment
|
| 2793 |
+
2791 not_entailment
|
| 2794 |
+
2792 not_entailment
|
| 2795 |
+
2793 entailment
|
| 2796 |
+
2794 entailment
|
| 2797 |
+
2795 not_entailment
|
| 2798 |
+
2796 entailment
|
| 2799 |
+
2797 entailment
|
| 2800 |
+
2798 not_entailment
|
| 2801 |
+
2799 not_entailment
|
| 2802 |
+
2800 entailment
|
| 2803 |
+
2801 not_entailment
|
| 2804 |
+
2802 not_entailment
|
| 2805 |
+
2803 entailment
|
| 2806 |
+
2804 not_entailment
|
| 2807 |
+
2805 entailment
|
| 2808 |
+
2806 not_entailment
|
| 2809 |
+
2807 entailment
|
| 2810 |
+
2808 not_entailment
|
| 2811 |
+
2809 entailment
|
| 2812 |
+
2810 entailment
|
| 2813 |
+
2811 entailment
|
| 2814 |
+
2812 entailment
|
| 2815 |
+
2813 entailment
|
| 2816 |
+
2814 entailment
|
| 2817 |
+
2815 entailment
|
| 2818 |
+
2816 not_entailment
|
| 2819 |
+
2817 not_entailment
|
| 2820 |
+
2818 entailment
|
| 2821 |
+
2819 not_entailment
|
| 2822 |
+
2820 not_entailment
|
| 2823 |
+
2821 entailment
|
| 2824 |
+
2822 not_entailment
|
| 2825 |
+
2823 entailment
|
| 2826 |
+
2824 entailment
|
| 2827 |
+
2825 entailment
|
| 2828 |
+
2826 entailment
|
| 2829 |
+
2827 entailment
|
| 2830 |
+
2828 entailment
|
| 2831 |
+
2829 not_entailment
|
| 2832 |
+
2830 not_entailment
|
| 2833 |
+
2831 not_entailment
|
| 2834 |
+
2832 not_entailment
|
| 2835 |
+
2833 not_entailment
|
| 2836 |
+
2834 not_entailment
|
| 2837 |
+
2835 entailment
|
| 2838 |
+
2836 not_entailment
|
| 2839 |
+
2837 not_entailment
|
| 2840 |
+
2838 not_entailment
|
| 2841 |
+
2839 not_entailment
|
| 2842 |
+
2840 not_entailment
|
| 2843 |
+
2841 entailment
|
| 2844 |
+
2842 entailment
|
| 2845 |
+
2843 entailment
|
| 2846 |
+
2844 not_entailment
|
| 2847 |
+
2845 entailment
|
| 2848 |
+
2846 entailment
|
| 2849 |
+
2847 not_entailment
|
| 2850 |
+
2848 not_entailment
|
| 2851 |
+
2849 not_entailment
|
| 2852 |
+
2850 not_entailment
|
| 2853 |
+
2851 not_entailment
|
| 2854 |
+
2852 entailment
|
| 2855 |
+
2853 entailment
|
| 2856 |
+
2854 not_entailment
|
| 2857 |
+
2855 entailment
|
| 2858 |
+
2856 not_entailment
|
| 2859 |
+
2857 entailment
|
| 2860 |
+
2858 entailment
|
| 2861 |
+
2859 not_entailment
|
| 2862 |
+
2860 entailment
|
| 2863 |
+
2861 not_entailment
|
| 2864 |
+
2862 entailment
|
| 2865 |
+
2863 not_entailment
|
| 2866 |
+
2864 not_entailment
|
| 2867 |
+
2865 entailment
|
| 2868 |
+
2866 entailment
|
| 2869 |
+
2867 not_entailment
|
| 2870 |
+
2868 entailment
|
| 2871 |
+
2869 entailment
|
| 2872 |
+
2870 not_entailment
|
| 2873 |
+
2871 entailment
|
| 2874 |
+
2872 entailment
|
| 2875 |
+
2873 entailment
|
| 2876 |
+
2874 entailment
|
| 2877 |
+
2875 not_entailment
|
| 2878 |
+
2876 entailment
|
| 2879 |
+
2877 entailment
|
| 2880 |
+
2878 not_entailment
|
| 2881 |
+
2879 entailment
|
| 2882 |
+
2880 entailment
|
| 2883 |
+
2881 not_entailment
|
| 2884 |
+
2882 not_entailment
|
| 2885 |
+
2883 not_entailment
|
| 2886 |
+
2884 entailment
|
| 2887 |
+
2885 entailment
|
| 2888 |
+
2886 not_entailment
|
| 2889 |
+
2887 entailment
|
| 2890 |
+
2888 not_entailment
|
| 2891 |
+
2889 entailment
|
| 2892 |
+
2890 entailment
|
| 2893 |
+
2891 entailment
|
| 2894 |
+
2892 not_entailment
|
| 2895 |
+
2893 entailment
|
| 2896 |
+
2894 not_entailment
|
| 2897 |
+
2895 entailment
|
| 2898 |
+
2896 not_entailment
|
| 2899 |
+
2897 not_entailment
|
| 2900 |
+
2898 not_entailment
|
| 2901 |
+
2899 entailment
|
| 2902 |
+
2900 not_entailment
|
| 2903 |
+
2901 entailment
|
| 2904 |
+
2902 entailment
|
| 2905 |
+
2903 entailment
|
| 2906 |
+
2904 not_entailment
|
| 2907 |
+
2905 not_entailment
|
| 2908 |
+
2906 not_entailment
|
| 2909 |
+
2907 not_entailment
|
| 2910 |
+
2908 entailment
|
| 2911 |
+
2909 entailment
|
| 2912 |
+
2910 entailment
|
| 2913 |
+
2911 entailment
|
| 2914 |
+
2912 not_entailment
|
| 2915 |
+
2913 entailment
|
| 2916 |
+
2914 entailment
|
| 2917 |
+
2915 not_entailment
|
| 2918 |
+
2916 not_entailment
|
| 2919 |
+
2917 not_entailment
|
| 2920 |
+
2918 entailment
|
| 2921 |
+
2919 entailment
|
| 2922 |
+
2920 not_entailment
|
| 2923 |
+
2921 not_entailment
|
| 2924 |
+
2922 entailment
|
| 2925 |
+
2923 entailment
|
| 2926 |
+
2924 not_entailment
|
| 2927 |
+
2925 not_entailment
|
| 2928 |
+
2926 not_entailment
|
| 2929 |
+
2927 not_entailment
|
| 2930 |
+
2928 not_entailment
|
| 2931 |
+
2929 not_entailment
|
| 2932 |
+
2930 not_entailment
|
| 2933 |
+
2931 entailment
|
| 2934 |
+
2932 not_entailment
|
| 2935 |
+
2933 entailment
|
| 2936 |
+
2934 entailment
|
| 2937 |
+
2935 not_entailment
|
| 2938 |
+
2936 entailment
|
| 2939 |
+
2937 not_entailment
|
| 2940 |
+
2938 not_entailment
|
| 2941 |
+
2939 entailment
|
| 2942 |
+
2940 entailment
|
| 2943 |
+
2941 not_entailment
|
| 2944 |
+
2942 not_entailment
|
| 2945 |
+
2943 entailment
|
| 2946 |
+
2944 entailment
|
| 2947 |
+
2945 entailment
|
| 2948 |
+
2946 not_entailment
|
| 2949 |
+
2947 entailment
|
| 2950 |
+
2948 entailment
|
| 2951 |
+
2949 entailment
|
| 2952 |
+
2950 not_entailment
|
| 2953 |
+
2951 entailment
|
| 2954 |
+
2952 not_entailment
|
| 2955 |
+
2953 not_entailment
|
| 2956 |
+
2954 not_entailment
|
| 2957 |
+
2955 entailment
|
| 2958 |
+
2956 not_entailment
|
| 2959 |
+
2957 not_entailment
|
| 2960 |
+
2958 not_entailment
|
| 2961 |
+
2959 not_entailment
|
| 2962 |
+
2960 entailment
|
| 2963 |
+
2961 not_entailment
|
| 2964 |
+
2962 entailment
|
| 2965 |
+
2963 not_entailment
|
| 2966 |
+
2964 not_entailment
|
| 2967 |
+
2965 not_entailment
|
| 2968 |
+
2966 not_entailment
|
| 2969 |
+
2967 entailment
|
| 2970 |
+
2968 not_entailment
|
| 2971 |
+
2969 entailment
|
| 2972 |
+
2970 entailment
|
| 2973 |
+
2971 entailment
|
| 2974 |
+
2972 not_entailment
|
| 2975 |
+
2973 entailment
|
| 2976 |
+
2974 entailment
|
| 2977 |
+
2975 not_entailment
|
| 2978 |
+
2976 entailment
|
| 2979 |
+
2977 entailment
|
| 2980 |
+
2978 entailment
|
| 2981 |
+
2979 not_entailment
|
| 2982 |
+
2980 entailment
|
| 2983 |
+
2981 entailment
|
| 2984 |
+
2982 not_entailment
|
| 2985 |
+
2983 entailment
|
| 2986 |
+
2984 entailment
|
| 2987 |
+
2985 entailment
|
| 2988 |
+
2986 not_entailment
|
| 2989 |
+
2987 not_entailment
|
| 2990 |
+
2988 entailment
|
| 2991 |
+
2989 entailment
|
| 2992 |
+
2990 not_entailment
|
| 2993 |
+
2991 entailment
|
| 2994 |
+
2992 entailment
|
| 2995 |
+
2993 not_entailment
|
| 2996 |
+
2994 entailment
|
| 2997 |
+
2995 entailment
|
| 2998 |
+
2996 not_entailment
|
| 2999 |
+
2997 entailment
|
| 3000 |
+
2998 not_entailment
|
| 3001 |
+
2999 not_entailment
|
nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/all_results.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"epoch": 30.0,
|
| 3 |
+
"eval_accuracy": 0.9025270758122743,
|
| 4 |
+
"eval_loss": 0.8071204423904419,
|
| 5 |
+
"eval_runtime": 0.4563,
|
| 6 |
+
"eval_samples": 277,
|
| 7 |
+
"eval_samples_per_second": 607.117,
|
| 8 |
+
"eval_steps_per_second": 2.192
|
| 9 |
+
}
|
nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/eval_results.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"epoch": 30.0,
|
| 3 |
+
"eval_accuracy": 0.9025270758122743,
|
| 4 |
+
"eval_loss": 0.8071204423904419,
|
| 5 |
+
"eval_runtime": 0.4563,
|
| 6 |
+
"eval_samples": 277,
|
| 7 |
+
"eval_samples_per_second": 607.117,
|
| 8 |
+
"eval_steps_per_second": 2.192
|
| 9 |
+
}
|
nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/ft/adapter_config.json
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"T": 1.0,
|
| 3 |
+
"base_model_name_or_path": "microsoft/deberta-v3-base",
|
| 4 |
+
"bias": "none",
|
| 5 |
+
"drop_out": 0.15,
|
| 6 |
+
"inference_mode": false,
|
| 7 |
+
"layers_to_transform": null,
|
| 8 |
+
"modules_to_save": [
|
| 9 |
+
"classifier",
|
| 10 |
+
"pooler"
|
| 11 |
+
],
|
| 12 |
+
"num_rotations": 1,
|
| 13 |
+
"peft_type": "ROTATION",
|
| 14 |
+
"r": 4,
|
| 15 |
+
"revision": null,
|
| 16 |
+
"target_modules": [
|
| 17 |
+
"key_proj",
|
| 18 |
+
"output.dense",
|
| 19 |
+
"intermediate.dense",
|
| 20 |
+
"query_proj",
|
| 21 |
+
"attention.output.dense",
|
| 22 |
+
"value_proj"
|
| 23 |
+
],
|
| 24 |
+
"target_modules_to_skip": null,
|
| 25 |
+
"task_type": "SEQ_CLS"
|
| 26 |
+
}
|
nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/ft/added_tokens.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"[MASK]": 128000
|
| 3 |
+
}
|
nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/ft/special_tokens_map.json
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token": "[CLS]",
|
| 3 |
+
"cls_token": "[CLS]",
|
| 4 |
+
"eos_token": "[SEP]",
|
| 5 |
+
"mask_token": "[MASK]",
|
| 6 |
+
"pad_token": "[PAD]",
|
| 7 |
+
"sep_token": "[SEP]",
|
| 8 |
+
"unk_token": {
|
| 9 |
+
"content": "[UNK]",
|
| 10 |
+
"lstrip": false,
|
| 11 |
+
"normalized": true,
|
| 12 |
+
"rstrip": false,
|
| 13 |
+
"single_word": false
|
| 14 |
+
}
|
| 15 |
+
}
|
nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/ft/spm.model
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c679fbf93643d19aab7ee10c0b99e460bdbc02fedf34b92b05af343b4af586fd
|
| 3 |
+
size 2464616
|
nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/ft/tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/ft/tokenizer_config.json
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"added_tokens_decoder": {
|
| 3 |
+
"0": {
|
| 4 |
+
"content": "[PAD]",
|
| 5 |
+
"lstrip": false,
|
| 6 |
+
"normalized": false,
|
| 7 |
+
"rstrip": false,
|
| 8 |
+
"single_word": false,
|
| 9 |
+
"special": true
|
| 10 |
+
},
|
| 11 |
+
"1": {
|
| 12 |
+
"content": "[CLS]",
|
| 13 |
+
"lstrip": false,
|
| 14 |
+
"normalized": false,
|
| 15 |
+
"rstrip": false,
|
| 16 |
+
"single_word": false,
|
| 17 |
+
"special": true
|
| 18 |
+
},
|
| 19 |
+
"2": {
|
| 20 |
+
"content": "[SEP]",
|
| 21 |
+
"lstrip": false,
|
| 22 |
+
"normalized": false,
|
| 23 |
+
"rstrip": false,
|
| 24 |
+
"single_word": false,
|
| 25 |
+
"special": true
|
| 26 |
+
},
|
| 27 |
+
"3": {
|
| 28 |
+
"content": "[UNK]",
|
| 29 |
+
"lstrip": false,
|
| 30 |
+
"normalized": true,
|
| 31 |
+
"rstrip": false,
|
| 32 |
+
"single_word": false,
|
| 33 |
+
"special": true
|
| 34 |
+
},
|
| 35 |
+
"128000": {
|
| 36 |
+
"content": "[MASK]",
|
| 37 |
+
"lstrip": false,
|
| 38 |
+
"normalized": false,
|
| 39 |
+
"rstrip": false,
|
| 40 |
+
"single_word": false,
|
| 41 |
+
"special": true
|
| 42 |
+
}
|
| 43 |
+
},
|
| 44 |
+
"bos_token": "[CLS]",
|
| 45 |
+
"clean_up_tokenization_spaces": false,
|
| 46 |
+
"cls_token": "[CLS]",
|
| 47 |
+
"do_lower_case": false,
|
| 48 |
+
"eos_token": "[SEP]",
|
| 49 |
+
"extra_special_tokens": {},
|
| 50 |
+
"mask_token": "[MASK]",
|
| 51 |
+
"model_max_length": 512,
|
| 52 |
+
"pad_token": "[PAD]",
|
| 53 |
+
"padding_side": "right",
|
| 54 |
+
"sep_token": "[SEP]",
|
| 55 |
+
"sp_model_kwargs": {},
|
| 56 |
+
"split_by_punct": false,
|
| 57 |
+
"tokenizer_class": "DebertaV2Tokenizer",
|
| 58 |
+
"unk_token": "[UNK]",
|
| 59 |
+
"vocab_type": "spm"
|
| 60 |
+
}
|
nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/ft2/adapter_config.json
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"T": 1.0,
|
| 3 |
+
"base_model_name_or_path": "microsoft/deberta-v3-base",
|
| 4 |
+
"bias": "none",
|
| 5 |
+
"drop_out": 0.15,
|
| 6 |
+
"inference_mode": true,
|
| 7 |
+
"layers_to_transform": null,
|
| 8 |
+
"modules_to_save": [
|
| 9 |
+
"classifier",
|
| 10 |
+
"pooler"
|
| 11 |
+
],
|
| 12 |
+
"num_rotations": 1,
|
| 13 |
+
"peft_type": "ROTATION",
|
| 14 |
+
"r": 4,
|
| 15 |
+
"revision": null,
|
| 16 |
+
"target_modules": [
|
| 17 |
+
"key_proj",
|
| 18 |
+
"output.dense",
|
| 19 |
+
"intermediate.dense",
|
| 20 |
+
"query_proj",
|
| 21 |
+
"attention.output.dense",
|
| 22 |
+
"value_proj"
|
| 23 |
+
],
|
| 24 |
+
"target_modules_to_skip": null,
|
| 25 |
+
"task_type": "SEQ_CLS"
|
| 26 |
+
}
|
nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/ft2/adapter_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:38f0ec60401ea6883447e8300f883d980e31d46eb9ee6edfc58bee5349bb3c6d
|
| 3 |
+
size 7449859
|
nlu/glue_exp/rte/1dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/trainer_state.json
ADDED
|
@@ -0,0 +1,411 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_global_step": 1600,
|
| 3 |
+
"best_metric": 0.9025270758122743,
|
| 4 |
+
"best_model_checkpoint": "./glue_exp/rte/dr0.15,mlr5e-04,clr2e-03,ep=30.0t=18d21h28m30/checkpoint-1600",
|
| 5 |
+
"epoch": 30.0,
|
| 6 |
+
"eval_steps": 100,
|
| 7 |
+
"global_step": 2340,
|
| 8 |
+
"is_hyper_param_search": false,
|
| 9 |
+
"is_local_process_zero": true,
|
| 10 |
+
"is_world_process_zero": true,
|
| 11 |
+
"log_history": [
|
| 12 |
+
{
|
| 13 |
+
"epoch": 1.282051282051282,
|
| 14 |
+
"grad_norm": 1.9696931838989258,
|
| 15 |
+
"learning_rate": 0.00198,
|
| 16 |
+
"loss": 0.6596,
|
| 17 |
+
"step": 100
|
| 18 |
+
},
|
| 19 |
+
{
|
| 20 |
+
"epoch": 1.282051282051282,
|
| 21 |
+
"eval_accuracy": 0.7833935018050542,
|
| 22 |
+
"eval_loss": 0.5189762115478516,
|
| 23 |
+
"eval_runtime": 0.5489,
|
| 24 |
+
"eval_samples_per_second": 504.682,
|
| 25 |
+
"eval_steps_per_second": 1.822,
|
| 26 |
+
"step": 100
|
| 27 |
+
},
|
| 28 |
+
{
|
| 29 |
+
"epoch": 2.564102564102564,
|
| 30 |
+
"grad_norm": 4.750543117523193,
|
| 31 |
+
"learning_rate": 0.0019901135883728755,
|
| 32 |
+
"loss": 0.4184,
|
| 33 |
+
"step": 200
|
| 34 |
+
},
|
| 35 |
+
{
|
| 36 |
+
"epoch": 2.564102564102564,
|
| 37 |
+
"eval_accuracy": 0.8050541516245487,
|
| 38 |
+
"eval_loss": 0.486526221036911,
|
| 39 |
+
"eval_runtime": 0.4628,
|
| 40 |
+
"eval_samples_per_second": 598.492,
|
| 41 |
+
"eval_steps_per_second": 2.161,
|
| 42 |
+
"step": 200
|
| 43 |
+
},
|
| 44 |
+
{
|
| 45 |
+
"epoch": 3.8461538461538463,
|
| 46 |
+
"grad_norm": 6.372544765472412,
|
| 47 |
+
"learning_rate": 0.001960254016408426,
|
| 48 |
+
"loss": 0.3039,
|
| 49 |
+
"step": 300
|
| 50 |
+
},
|
| 51 |
+
{
|
| 52 |
+
"epoch": 3.8461538461538463,
|
| 53 |
+
"eval_accuracy": 0.8158844765342961,
|
| 54 |
+
"eval_loss": 0.45899271965026855,
|
| 55 |
+
"eval_runtime": 0.4548,
|
| 56 |
+
"eval_samples_per_second": 609.084,
|
| 57 |
+
"eval_steps_per_second": 2.199,
|
| 58 |
+
"step": 300
|
| 59 |
+
},
|
| 60 |
+
{
|
| 61 |
+
"epoch": 5.128205128205128,
|
| 62 |
+
"grad_norm": 4.390738010406494,
|
| 63 |
+
"learning_rate": 0.0019110226492460884,
|
| 64 |
+
"loss": 0.1707,
|
| 65 |
+
"step": 400
|
| 66 |
+
},
|
| 67 |
+
{
|
| 68 |
+
"epoch": 5.128205128205128,
|
| 69 |
+
"eval_accuracy": 0.855595667870036,
|
| 70 |
+
"eval_loss": 0.46550631523132324,
|
| 71 |
+
"eval_runtime": 0.4618,
|
| 72 |
+
"eval_samples_per_second": 599.885,
|
| 73 |
+
"eval_steps_per_second": 2.166,
|
| 74 |
+
"step": 400
|
| 75 |
+
},
|
| 76 |
+
{
|
| 77 |
+
"epoch": 6.410256410256411,
|
| 78 |
+
"grad_norm": 4.9435834884643555,
|
| 79 |
+
"learning_rate": 0.0018434126615518196,
|
| 80 |
+
"loss": 0.1205,
|
| 81 |
+
"step": 500
|
| 82 |
+
},
|
| 83 |
+
{
|
| 84 |
+
"epoch": 6.410256410256411,
|
| 85 |
+
"eval_accuracy": 0.8664259927797834,
|
| 86 |
+
"eval_loss": 0.580657422542572,
|
| 87 |
+
"eval_runtime": 0.4664,
|
| 88 |
+
"eval_samples_per_second": 593.97,
|
| 89 |
+
"eval_steps_per_second": 2.144,
|
| 90 |
+
"step": 500
|
| 91 |
+
},
|
| 92 |
+
{
|
| 93 |
+
"epoch": 7.6923076923076925,
|
| 94 |
+
"grad_norm": 10.373971939086914,
|
| 95 |
+
"learning_rate": 0.0017587879912134756,
|
| 96 |
+
"loss": 0.0955,
|
| 97 |
+
"step": 600
|
| 98 |
+
},
|
| 99 |
+
{
|
| 100 |
+
"epoch": 7.6923076923076925,
|
| 101 |
+
"eval_accuracy": 0.8447653429602888,
|
| 102 |
+
"eval_loss": 0.9098978638648987,
|
| 103 |
+
"eval_runtime": 0.4594,
|
| 104 |
+
"eval_samples_per_second": 602.925,
|
| 105 |
+
"eval_steps_per_second": 2.177,
|
| 106 |
+
"step": 600
|
| 107 |
+
},
|
| 108 |
+
{
|
| 109 |
+
"epoch": 8.974358974358974,
|
| 110 |
+
"grad_norm": 0.25669166445732117,
|
| 111 |
+
"learning_rate": 0.0016588558237824534,
|
| 112 |
+
"loss": 0.0601,
|
| 113 |
+
"step": 700
|
| 114 |
+
},
|
| 115 |
+
{
|
| 116 |
+
"epoch": 8.974358974358974,
|
| 117 |
+
"eval_accuracy": 0.8592057761732852,
|
| 118 |
+
"eval_loss": 0.7891349792480469,
|
| 119 |
+
"eval_runtime": 0.46,
|
| 120 |
+
"eval_samples_per_second": 602.238,
|
| 121 |
+
"eval_steps_per_second": 2.174,
|
| 122 |
+
"step": 700
|
| 123 |
+
},
|
| 124 |
+
{
|
| 125 |
+
"epoch": 10.256410256410255,
|
| 126 |
+
"grad_norm": 0.056794606149196625,
|
| 127 |
+
"learning_rate": 0.0015456321523689876,
|
| 128 |
+
"loss": 0.0391,
|
| 129 |
+
"step": 800
|
| 130 |
+
},
|
| 131 |
+
{
|
| 132 |
+
"epoch": 10.256410256410255,
|
| 133 |
+
"eval_accuracy": 0.8700361010830325,
|
| 134 |
+
"eval_loss": 0.8846717476844788,
|
| 135 |
+
"eval_runtime": 0.4637,
|
| 136 |
+
"eval_samples_per_second": 597.326,
|
| 137 |
+
"eval_steps_per_second": 2.156,
|
| 138 |
+
"step": 800
|
| 139 |
+
},
|
| 140 |
+
{
|
| 141 |
+
"epoch": 11.538461538461538,
|
| 142 |
+
"grad_norm": 0.22149260342121124,
|
| 143 |
+
"learning_rate": 0.0014214011077725293,
|
| 144 |
+
"loss": 0.0627,
|
| 145 |
+
"step": 900
|
| 146 |
+
},
|
| 147 |
+
{
|
| 148 |
+
"epoch": 11.538461538461538,
|
| 149 |
+
"eval_accuracy": 0.8736462093862816,
|
| 150 |
+
"eval_loss": 0.5859602093696594,
|
| 151 |
+
"eval_runtime": 0.4545,
|
| 152 |
+
"eval_samples_per_second": 609.489,
|
| 153 |
+
"eval_steps_per_second": 2.2,
|
| 154 |
+
"step": 900
|
| 155 |
+
},
|
| 156 |
+
{
|
| 157 |
+
"epoch": 12.820512820512821,
|
| 158 |
+
"grad_norm": 14.073284149169922,
|
| 159 |
+
"learning_rate": 0.0012886688793055097,
|
| 160 |
+
"loss": 0.0429,
|
| 161 |
+
"step": 1000
|
| 162 |
+
},
|
| 163 |
+
{
|
| 164 |
+
"epoch": 12.820512820512821,
|
| 165 |
+
"eval_accuracy": 0.8592057761732852,
|
| 166 |
+
"eval_loss": 0.7581544518470764,
|
| 167 |
+
"eval_runtime": 0.4614,
|
| 168 |
+
"eval_samples_per_second": 600.386,
|
| 169 |
+
"eval_steps_per_second": 2.167,
|
| 170 |
+
"step": 1000
|
| 171 |
+
},
|
| 172 |
+
{
|
| 173 |
+
"epoch": 14.102564102564102,
|
| 174 |
+
"grad_norm": 0.21496029198169708,
|
| 175 |
+
"learning_rate": 0.0011501131558940736,
|
| 176 |
+
"loss": 0.0353,
|
| 177 |
+
"step": 1100
|
| 178 |
+
},
|
| 179 |
+
{
|
| 180 |
+
"epoch": 14.102564102564102,
|
| 181 |
+
"eval_accuracy": 0.8700361010830325,
|
| 182 |
+
"eval_loss": 0.6726717352867126,
|
| 183 |
+
"eval_runtime": 0.4603,
|
| 184 |
+
"eval_samples_per_second": 601.72,
|
| 185 |
+
"eval_steps_per_second": 2.172,
|
| 186 |
+
"step": 1100
|
| 187 |
+
},
|
| 188 |
+
{
|
| 189 |
+
"epoch": 15.384615384615385,
|
| 190 |
+
"grad_norm": 0.010951673611998558,
|
| 191 |
+
"learning_rate": 0.0010085291074115638,
|
| 192 |
+
"loss": 0.0226,
|
| 193 |
+
"step": 1200
|
| 194 |
+
},
|
| 195 |
+
{
|
| 196 |
+
"epoch": 15.384615384615385,
|
| 197 |
+
"eval_accuracy": 0.8880866425992779,
|
| 198 |
+
"eval_loss": 0.6311701536178589,
|
| 199 |
+
"eval_runtime": 0.458,
|
| 200 |
+
"eval_samples_per_second": 604.781,
|
| 201 |
+
"eval_steps_per_second": 2.183,
|
| 202 |
+
"step": 1200
|
| 203 |
+
},
|
| 204 |
+
{
|
| 205 |
+
"epoch": 16.666666666666668,
|
| 206 |
+
"grad_norm": 0.48148414492607117,
|
| 207 |
+
"learning_rate": 0.0008667729959965649,
|
| 208 |
+
"loss": 0.011,
|
| 209 |
+
"step": 1300
|
| 210 |
+
},
|
| 211 |
+
{
|
| 212 |
+
"epoch": 16.666666666666668,
|
| 213 |
+
"eval_accuracy": 0.8880866425992779,
|
| 214 |
+
"eval_loss": 0.8177604675292969,
|
| 215 |
+
"eval_runtime": 0.4634,
|
| 216 |
+
"eval_samples_per_second": 597.743,
|
| 217 |
+
"eval_steps_per_second": 2.158,
|
| 218 |
+
"step": 1300
|
| 219 |
+
},
|
| 220 |
+
{
|
| 221 |
+
"epoch": 17.94871794871795,
|
| 222 |
+
"grad_norm": 0.05897713080048561,
|
| 223 |
+
"learning_rate": 0.0007277045549190833,
|
| 224 |
+
"loss": 0.0239,
|
| 225 |
+
"step": 1400
|
| 226 |
+
},
|
| 227 |
+
{
|
| 228 |
+
"epoch": 17.94871794871795,
|
| 229 |
+
"eval_accuracy": 0.8664259927797834,
|
| 230 |
+
"eval_loss": 0.9213047623634338,
|
| 231 |
+
"eval_runtime": 0.4514,
|
| 232 |
+
"eval_samples_per_second": 613.595,
|
| 233 |
+
"eval_steps_per_second": 2.215,
|
| 234 |
+
"step": 1400
|
| 235 |
+
},
|
| 236 |
+
{
|
| 237 |
+
"epoch": 19.23076923076923,
|
| 238 |
+
"grad_norm": 0.000974228314589709,
|
| 239 |
+
"learning_rate": 0.0005941292974214578,
|
| 240 |
+
"loss": 0.0046,
|
| 241 |
+
"step": 1500
|
| 242 |
+
},
|
| 243 |
+
{
|
| 244 |
+
"epoch": 19.23076923076923,
|
| 245 |
+
"eval_accuracy": 0.8664259927797834,
|
| 246 |
+
"eval_loss": 0.9644713401794434,
|
| 247 |
+
"eval_runtime": 0.4582,
|
| 248 |
+
"eval_samples_per_second": 604.586,
|
| 249 |
+
"eval_steps_per_second": 2.183,
|
| 250 |
+
"step": 1500
|
| 251 |
+
},
|
| 252 |
+
{
|
| 253 |
+
"epoch": 20.51282051282051,
|
| 254 |
+
"grad_norm": 0.0010532273445278406,
|
| 255 |
+
"learning_rate": 0.00046874191937325337,
|
| 256 |
+
"loss": 0.0045,
|
| 257 |
+
"step": 1600
|
| 258 |
+
},
|
| 259 |
+
{
|
| 260 |
+
"epoch": 20.51282051282051,
|
| 261 |
+
"eval_accuracy": 0.9025270758122743,
|
| 262 |
+
"eval_loss": 0.8071204423904419,
|
| 263 |
+
"eval_runtime": 0.451,
|
| 264 |
+
"eval_samples_per_second": 614.137,
|
| 265 |
+
"eval_steps_per_second": 2.217,
|
| 266 |
+
"step": 1600
|
| 267 |
+
},
|
| 268 |
+
{
|
| 269 |
+
"epoch": 21.794871794871796,
|
| 270 |
+
"grad_norm": 0.0004142747784499079,
|
| 271 |
+
"learning_rate": 0.00035407193751321286,
|
| 272 |
+
"loss": 0.0032,
|
| 273 |
+
"step": 1700
|
| 274 |
+
},
|
| 275 |
+
{
|
| 276 |
+
"epoch": 21.794871794871796,
|
| 277 |
+
"eval_accuracy": 0.8916967509025271,
|
| 278 |
+
"eval_loss": 0.8693183064460754,
|
| 279 |
+
"eval_runtime": 0.464,
|
| 280 |
+
"eval_samples_per_second": 596.937,
|
| 281 |
+
"eval_steps_per_second": 2.155,
|
| 282 |
+
"step": 1700
|
| 283 |
+
},
|
| 284 |
+
{
|
| 285 |
+
"epoch": 23.076923076923077,
|
| 286 |
+
"grad_norm": 0.0005826250999234617,
|
| 287 |
+
"learning_rate": 0.00025243265995146156,
|
| 288 |
+
"loss": 0.0043,
|
| 289 |
+
"step": 1800
|
| 290 |
+
},
|
| 291 |
+
{
|
| 292 |
+
"epoch": 23.076923076923077,
|
| 293 |
+
"eval_accuracy": 0.8736462093862816,
|
| 294 |
+
"eval_loss": 1.0998587608337402,
|
| 295 |
+
"eval_runtime": 0.452,
|
| 296 |
+
"eval_samples_per_second": 612.852,
|
| 297 |
+
"eval_steps_per_second": 2.212,
|
| 298 |
+
"step": 1800
|
| 299 |
+
},
|
| 300 |
+
{
|
| 301 |
+
"epoch": 24.358974358974358,
|
| 302 |
+
"grad_norm": 0.0007387926452793181,
|
| 303 |
+
"learning_rate": 0.0001658745183814303,
|
| 304 |
+
"loss": 0.0034,
|
| 305 |
+
"step": 1900
|
| 306 |
+
},
|
| 307 |
+
{
|
| 308 |
+
"epoch": 24.358974358974358,
|
| 309 |
+
"eval_accuracy": 0.8916967509025271,
|
| 310 |
+
"eval_loss": 0.9102649092674255,
|
| 311 |
+
"eval_runtime": 0.4611,
|
| 312 |
+
"eval_samples_per_second": 600.729,
|
| 313 |
+
"eval_steps_per_second": 2.169,
|
| 314 |
+
"step": 1900
|
| 315 |
+
},
|
| 316 |
+
{
|
| 317 |
+
"epoch": 25.641025641025642,
|
| 318 |
+
"grad_norm": 0.0003036344132851809,
|
| 319 |
+
"learning_rate": 9.614370345949952e-05,
|
| 320 |
+
"loss": 0.0046,
|
| 321 |
+
"step": 2000
|
| 322 |
+
},
|
| 323 |
+
{
|
| 324 |
+
"epoch": 25.641025641025642,
|
| 325 |
+
"eval_accuracy": 0.8844765342960289,
|
| 326 |
+
"eval_loss": 0.9732220768928528,
|
| 327 |
+
"eval_runtime": 0.4493,
|
| 328 |
+
"eval_samples_per_second": 616.515,
|
| 329 |
+
"eval_steps_per_second": 2.226,
|
| 330 |
+
"step": 2000
|
| 331 |
+
},
|
| 332 |
+
{
|
| 333 |
+
"epoch": 26.923076923076923,
|
| 334 |
+
"grad_norm": 0.000459953211247921,
|
| 335 |
+
"learning_rate": 4.4646937826311684e-05,
|
| 336 |
+
"loss": 0.0059,
|
| 337 |
+
"step": 2100
|
| 338 |
+
},
|
| 339 |
+
{
|
| 340 |
+
"epoch": 26.923076923076923,
|
| 341 |
+
"eval_accuracy": 0.8916967509025271,
|
| 342 |
+
"eval_loss": 0.9090432524681091,
|
| 343 |
+
"eval_runtime": 0.4561,
|
| 344 |
+
"eval_samples_per_second": 607.359,
|
| 345 |
+
"eval_steps_per_second": 2.193,
|
| 346 |
+
"step": 2100
|
| 347 |
+
},
|
| 348 |
+
{
|
| 349 |
+
"epoch": 28.205128205128204,
|
| 350 |
+
"grad_norm": 0.0002905978763010353,
|
| 351 |
+
"learning_rate": 1.2423097425266594e-05,
|
| 352 |
+
"loss": 0.0009,
|
| 353 |
+
"step": 2200
|
| 354 |
+
},
|
| 355 |
+
{
|
| 356 |
+
"epoch": 28.205128205128204,
|
| 357 |
+
"eval_accuracy": 0.8880866425992779,
|
| 358 |
+
"eval_loss": 0.8938511610031128,
|
| 359 |
+
"eval_runtime": 0.4591,
|
| 360 |
+
"eval_samples_per_second": 603.397,
|
| 361 |
+
"eval_steps_per_second": 2.178,
|
| 362 |
+
"step": 2200
|
| 363 |
+
},
|
| 364 |
+
{
|
| 365 |
+
"epoch": 29.487179487179485,
|
| 366 |
+
"grad_norm": 0.054287414997816086,
|
| 367 |
+
"learning_rate": 1.2225361880779938e-07,
|
| 368 |
+
"loss": 0.0009,
|
| 369 |
+
"step": 2300
|
| 370 |
+
},
|
| 371 |
+
{
|
| 372 |
+
"epoch": 29.487179487179485,
|
| 373 |
+
"eval_accuracy": 0.8880866425992779,
|
| 374 |
+
"eval_loss": 0.8920240998268127,
|
| 375 |
+
"eval_runtime": 0.4639,
|
| 376 |
+
"eval_samples_per_second": 597.164,
|
| 377 |
+
"eval_steps_per_second": 2.156,
|
| 378 |
+
"step": 2300
|
| 379 |
+
},
|
| 380 |
+
{
|
| 381 |
+
"epoch": 30.0,
|
| 382 |
+
"step": 2340,
|
| 383 |
+
"total_flos": 1.2464312631552e+16,
|
| 384 |
+
"train_loss": 0.08967419704850206,
|
| 385 |
+
"train_runtime": 576.0074,
|
| 386 |
+
"train_samples_per_second": 129.686,
|
| 387 |
+
"train_steps_per_second": 4.062
|
| 388 |
+
}
|
| 389 |
+
],
|
| 390 |
+
"logging_steps": 100,
|
| 391 |
+
"max_steps": 2340,
|
| 392 |
+
"num_input_tokens_seen": 0,
|
| 393 |
+
"num_train_epochs": 30,
|
| 394 |
+
"save_steps": 100,
|
| 395 |
+
"stateful_callbacks": {
|
| 396 |
+
"TrainerControl": {
|
| 397 |
+
"args": {
|
| 398 |
+
"should_epoch_stop": false,
|
| 399 |
+
"should_evaluate": false,
|
| 400 |
+
"should_log": false,
|
| 401 |
+
"should_save": true,
|
| 402 |
+
"should_training_stop": true
|
| 403 |
+
},
|
| 404 |
+
"attributes": {}
|
| 405 |
+
}
|
| 406 |
+
},
|
| 407 |
+
"total_flos": 1.2464312631552e+16,
|
| 408 |
+
"train_batch_size": 32,
|
| 409 |
+
"trial_name": null,
|
| 410 |
+
"trial_params": null
|
| 411 |
+
}
|
nlu/glue_exp/rte/2dr0.15,mlr5e-04,clr1e-03,ep=30.0t=18d21h08m36/RTE.tsv
ADDED
|
@@ -0,0 +1,3001 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
index prediction
|
| 2 |
+
0 not_entailment
|
| 3 |
+
1 not_entailment
|
| 4 |
+
2 entailment
|
| 5 |
+
3 not_entailment
|
| 6 |
+
4 entailment
|
| 7 |
+
5 entailment
|
| 8 |
+
6 entailment
|
| 9 |
+
7 not_entailment
|
| 10 |
+
8 not_entailment
|
| 11 |
+
9 entailment
|
| 12 |
+
10 not_entailment
|
| 13 |
+
11 entailment
|
| 14 |
+
12 not_entailment
|
| 15 |
+
13 entailment
|
| 16 |
+
14 not_entailment
|
| 17 |
+
15 not_entailment
|
| 18 |
+
16 not_entailment
|
| 19 |
+
17 entailment
|
| 20 |
+
18 entailment
|
| 21 |
+
19 not_entailment
|
| 22 |
+
20 entailment
|
| 23 |
+
21 entailment
|
| 24 |
+
22 not_entailment
|
| 25 |
+
23 not_entailment
|
| 26 |
+
24 not_entailment
|
| 27 |
+
25 not_entailment
|
| 28 |
+
26 entailment
|
| 29 |
+
27 entailment
|
| 30 |
+
28 entailment
|
| 31 |
+
29 entailment
|
| 32 |
+
30 not_entailment
|
| 33 |
+
31 not_entailment
|
| 34 |
+
32 not_entailment
|
| 35 |
+
33 not_entailment
|
| 36 |
+
34 not_entailment
|
| 37 |
+
35 entailment
|
| 38 |
+
36 not_entailment
|
| 39 |
+
37 entailment
|
| 40 |
+
38 entailment
|
| 41 |
+
39 entailment
|
| 42 |
+
40 not_entailment
|
| 43 |
+
41 not_entailment
|
| 44 |
+
42 entailment
|
| 45 |
+
43 not_entailment
|
| 46 |
+
44 not_entailment
|
| 47 |
+
45 entailment
|
| 48 |
+
46 entailment
|
| 49 |
+
47 not_entailment
|
| 50 |
+
48 entailment
|
| 51 |
+
49 not_entailment
|
| 52 |
+
50 not_entailment
|
| 53 |
+
51 entailment
|
| 54 |
+
52 not_entailment
|
| 55 |
+
53 entailment
|
| 56 |
+
54 entailment
|
| 57 |
+
55 not_entailment
|
| 58 |
+
56 entailment
|
| 59 |
+
57 not_entailment
|
| 60 |
+
58 not_entailment
|
| 61 |
+
59 entailment
|
| 62 |
+
60 entailment
|
| 63 |
+
61 entailment
|
| 64 |
+
62 not_entailment
|
| 65 |
+
63 entailment
|
| 66 |
+
64 entailment
|
| 67 |
+
65 not_entailment
|
| 68 |
+
66 not_entailment
|
| 69 |
+
67 entailment
|
| 70 |
+
68 not_entailment
|
| 71 |
+
69 entailment
|
| 72 |
+
70 entailment
|
| 73 |
+
71 entailment
|
| 74 |
+
72 entailment
|
| 75 |
+
73 entailment
|
| 76 |
+
74 entailment
|
| 77 |
+
75 entailment
|
| 78 |
+
76 not_entailment
|
| 79 |
+
77 entailment
|
| 80 |
+
78 entailment
|
| 81 |
+
79 not_entailment
|
| 82 |
+
80 not_entailment
|
| 83 |
+
81 entailment
|
| 84 |
+
82 not_entailment
|
| 85 |
+
83 not_entailment
|
| 86 |
+
84 entailment
|
| 87 |
+
85 not_entailment
|
| 88 |
+
86 entailment
|
| 89 |
+
87 not_entailment
|
| 90 |
+
88 entailment
|
| 91 |
+
89 entailment
|
| 92 |
+
90 not_entailment
|
| 93 |
+
91 entailment
|
| 94 |
+
92 not_entailment
|
| 95 |
+
93 entailment
|
| 96 |
+
94 entailment
|
| 97 |
+
95 not_entailment
|
| 98 |
+
96 entailment
|
| 99 |
+
97 not_entailment
|
| 100 |
+
98 not_entailment
|
| 101 |
+
99 entailment
|
| 102 |
+
100 entailment
|
| 103 |
+
101 entailment
|
| 104 |
+
102 not_entailment
|
| 105 |
+
103 entailment
|
| 106 |
+
104 entailment
|
| 107 |
+
105 not_entailment
|
| 108 |
+
106 entailment
|
| 109 |
+
107 not_entailment
|
| 110 |
+
108 not_entailment
|
| 111 |
+
109 entailment
|
| 112 |
+
110 entailment
|
| 113 |
+
111 not_entailment
|
| 114 |
+
112 not_entailment
|
| 115 |
+
113 not_entailment
|
| 116 |
+
114 entailment
|
| 117 |
+
115 not_entailment
|
| 118 |
+
116 entailment
|
| 119 |
+
117 not_entailment
|
| 120 |
+
118 not_entailment
|
| 121 |
+
119 not_entailment
|
| 122 |
+
120 not_entailment
|
| 123 |
+
121 not_entailment
|
| 124 |
+
122 not_entailment
|
| 125 |
+
123 not_entailment
|
| 126 |
+
124 entailment
|
| 127 |
+
125 entailment
|
| 128 |
+
126 entailment
|
| 129 |
+
127 entailment
|
| 130 |
+
128 entailment
|
| 131 |
+
129 entailment
|
| 132 |
+
130 not_entailment
|
| 133 |
+
131 entailment
|
| 134 |
+
132 not_entailment
|
| 135 |
+
133 not_entailment
|
| 136 |
+
134 entailment
|
| 137 |
+
135 not_entailment
|
| 138 |
+
136 not_entailment
|
| 139 |
+
137 not_entailment
|
| 140 |
+
138 entailment
|
| 141 |
+
139 not_entailment
|
| 142 |
+
140 entailment
|
| 143 |
+
141 not_entailment
|
| 144 |
+
142 not_entailment
|
| 145 |
+
143 entailment
|
| 146 |
+
144 entailment
|
| 147 |
+
145 not_entailment
|
| 148 |
+
146 not_entailment
|
| 149 |
+
147 not_entailment
|
| 150 |
+
148 not_entailment
|
| 151 |
+
149 entailment
|
| 152 |
+
150 entailment
|
| 153 |
+
151 entailment
|
| 154 |
+
152 not_entailment
|
| 155 |
+
153 entailment
|
| 156 |
+
154 entailment
|
| 157 |
+
155 not_entailment
|
| 158 |
+
156 not_entailment
|
| 159 |
+
157 entailment
|
| 160 |
+
158 not_entailment
|
| 161 |
+
159 not_entailment
|
| 162 |
+
160 entailment
|
| 163 |
+
161 not_entailment
|
| 164 |
+
162 entailment
|
| 165 |
+
163 not_entailment
|
| 166 |
+
164 not_entailment
|
| 167 |
+
165 entailment
|
| 168 |
+
166 entailment
|
| 169 |
+
167 not_entailment
|
| 170 |
+
168 entailment
|
| 171 |
+
169 not_entailment
|
| 172 |
+
170 entailment
|
| 173 |
+
171 entailment
|
| 174 |
+
172 not_entailment
|
| 175 |
+
173 entailment
|
| 176 |
+
174 entailment
|
| 177 |
+
175 not_entailment
|
| 178 |
+
176 not_entailment
|
| 179 |
+
177 not_entailment
|
| 180 |
+
178 entailment
|
| 181 |
+
179 not_entailment
|
| 182 |
+
180 not_entailment
|
| 183 |
+
181 not_entailment
|
| 184 |
+
182 entailment
|
| 185 |
+
183 not_entailment
|
| 186 |
+
184 entailment
|
| 187 |
+
185 not_entailment
|
| 188 |
+
186 entailment
|
| 189 |
+
187 entailment
|
| 190 |
+
188 not_entailment
|
| 191 |
+
189 not_entailment
|
| 192 |
+
190 entailment
|
| 193 |
+
191 entailment
|
| 194 |
+
192 entailment
|
| 195 |
+
193 not_entailment
|
| 196 |
+
194 entailment
|
| 197 |
+
195 entailment
|
| 198 |
+
196 entailment
|
| 199 |
+
197 not_entailment
|
| 200 |
+
198 not_entailment
|
| 201 |
+
199 not_entailment
|
| 202 |
+
200 entailment
|
| 203 |
+
201 entailment
|
| 204 |
+
202 entailment
|
| 205 |
+
203 not_entailment
|
| 206 |
+
204 not_entailment
|
| 207 |
+
205 entailment
|
| 208 |
+
206 not_entailment
|
| 209 |
+
207 not_entailment
|
| 210 |
+
208 not_entailment
|
| 211 |
+
209 entailment
|
| 212 |
+
210 not_entailment
|
| 213 |
+
211 not_entailment
|
| 214 |
+
212 entailment
|
| 215 |
+
213 entailment
|
| 216 |
+
214 entailment
|
| 217 |
+
215 entailment
|
| 218 |
+
216 not_entailment
|
| 219 |
+
217 not_entailment
|
| 220 |
+
218 entailment
|
| 221 |
+
219 not_entailment
|
| 222 |
+
220 entailment
|
| 223 |
+
221 entailment
|
| 224 |
+
222 entailment
|
| 225 |
+
223 not_entailment
|
| 226 |
+
224 entailment
|
| 227 |
+
225 not_entailment
|
| 228 |
+
226 entailment
|
| 229 |
+
227 entailment
|
| 230 |
+
228 not_entailment
|
| 231 |
+
229 not_entailment
|
| 232 |
+
230 entailment
|
| 233 |
+
231 entailment
|
| 234 |
+
232 not_entailment
|
| 235 |
+
233 not_entailment
|
| 236 |
+
234 entailment
|
| 237 |
+
235 not_entailment
|
| 238 |
+
236 entailment
|
| 239 |
+
237 entailment
|
| 240 |
+
238 not_entailment
|
| 241 |
+
239 entailment
|
| 242 |
+
240 entailment
|
| 243 |
+
241 entailment
|
| 244 |
+
242 entailment
|
| 245 |
+
243 entailment
|
| 246 |
+
244 not_entailment
|
| 247 |
+
245 not_entailment
|
| 248 |
+
246 entailment
|
| 249 |
+
247 not_entailment
|
| 250 |
+
248 entailment
|
| 251 |
+
249 entailment
|
| 252 |
+
250 not_entailment
|
| 253 |
+
251 not_entailment
|
| 254 |
+
252 entailment
|
| 255 |
+
253 not_entailment
|
| 256 |
+
254 entailment
|
| 257 |
+
255 entailment
|
| 258 |
+
256 not_entailment
|
| 259 |
+
257 not_entailment
|
| 260 |
+
258 entailment
|
| 261 |
+
259 entailment
|
| 262 |
+
260 not_entailment
|
| 263 |
+
261 not_entailment
|
| 264 |
+
262 entailment
|
| 265 |
+
263 entailment
|
| 266 |
+
264 entailment
|
| 267 |
+
265 entailment
|
| 268 |
+
266 entailment
|
| 269 |
+
267 not_entailment
|
| 270 |
+
268 entailment
|
| 271 |
+
269 not_entailment
|
| 272 |
+
270 not_entailment
|
| 273 |
+
271 entailment
|
| 274 |
+
272 not_entailment
|
| 275 |
+
273 entailment
|
| 276 |
+
274 entailment
|
| 277 |
+
275 not_entailment
|
| 278 |
+
276 not_entailment
|
| 279 |
+
277 entailment
|
| 280 |
+
278 not_entailment
|
| 281 |
+
279 not_entailment
|
| 282 |
+
280 not_entailment
|
| 283 |
+
281 entailment
|
| 284 |
+
282 entailment
|
| 285 |
+
283 entailment
|
| 286 |
+
284 not_entailment
|
| 287 |
+
285 not_entailment
|
| 288 |
+
286 entailment
|
| 289 |
+
287 not_entailment
|
| 290 |
+
288 not_entailment
|
| 291 |
+
289 entailment
|
| 292 |
+
290 not_entailment
|
| 293 |
+
291 not_entailment
|
| 294 |
+
292 not_entailment
|
| 295 |
+
293 entailment
|
| 296 |
+
294 entailment
|
| 297 |
+
295 not_entailment
|
| 298 |
+
296 not_entailment
|
| 299 |
+
297 not_entailment
|
| 300 |
+
298 entailment
|
| 301 |
+
299 entailment
|
| 302 |
+
300 not_entailment
|
| 303 |
+
301 entailment
|
| 304 |
+
302 entailment
|
| 305 |
+
303 entailment
|
| 306 |
+
304 entailment
|
| 307 |
+
305 entailment
|
| 308 |
+
306 entailment
|
| 309 |
+
307 not_entailment
|
| 310 |
+
308 not_entailment
|
| 311 |
+
309 entailment
|
| 312 |
+
310 not_entailment
|
| 313 |
+
311 entailment
|
| 314 |
+
312 not_entailment
|
| 315 |
+
313 not_entailment
|
| 316 |
+
314 entailment
|
| 317 |
+
315 not_entailment
|
| 318 |
+
316 entailment
|
| 319 |
+
317 not_entailment
|
| 320 |
+
318 entailment
|
| 321 |
+
319 entailment
|
| 322 |
+
320 not_entailment
|
| 323 |
+
321 not_entailment
|
| 324 |
+
322 entailment
|
| 325 |
+
323 entailment
|
| 326 |
+
324 entailment
|
| 327 |
+
325 not_entailment
|
| 328 |
+
326 entailment
|
| 329 |
+
327 entailment
|
| 330 |
+
328 entailment
|
| 331 |
+
329 entailment
|
| 332 |
+
330 not_entailment
|
| 333 |
+
331 not_entailment
|
| 334 |
+
332 not_entailment
|
| 335 |
+
333 not_entailment
|
| 336 |
+
334 not_entailment
|
| 337 |
+
335 entailment
|
| 338 |
+
336 entailment
|
| 339 |
+
337 entailment
|
| 340 |
+
338 entailment
|
| 341 |
+
339 entailment
|
| 342 |
+
340 entailment
|
| 343 |
+
341 entailment
|
| 344 |
+
342 entailment
|
| 345 |
+
343 entailment
|
| 346 |
+
344 not_entailment
|
| 347 |
+
345 entailment
|
| 348 |
+
346 entailment
|
| 349 |
+
347 entailment
|
| 350 |
+
348 not_entailment
|
| 351 |
+
349 not_entailment
|
| 352 |
+
350 entailment
|
| 353 |
+
351 entailment
|
| 354 |
+
352 not_entailment
|
| 355 |
+
353 entailment
|
| 356 |
+
354 entailment
|
| 357 |
+
355 not_entailment
|
| 358 |
+
356 entailment
|
| 359 |
+
357 not_entailment
|
| 360 |
+
358 entailment
|
| 361 |
+
359 entailment
|
| 362 |
+
360 entailment
|
| 363 |
+
361 entailment
|
| 364 |
+
362 entailment
|
| 365 |
+
363 not_entailment
|
| 366 |
+
364 entailment
|
| 367 |
+
365 entailment
|
| 368 |
+
366 not_entailment
|
| 369 |
+
367 entailment
|
| 370 |
+
368 entailment
|
| 371 |
+
369 not_entailment
|
| 372 |
+
370 not_entailment
|
| 373 |
+
371 entailment
|
| 374 |
+
372 not_entailment
|
| 375 |
+
373 entailment
|
| 376 |
+
374 not_entailment
|
| 377 |
+
375 entailment
|
| 378 |
+
376 not_entailment
|
| 379 |
+
377 entailment
|
| 380 |
+
378 not_entailment
|
| 381 |
+
379 not_entailment
|
| 382 |
+
380 not_entailment
|
| 383 |
+
381 not_entailment
|
| 384 |
+
382 entailment
|
| 385 |
+
383 entailment
|
| 386 |
+
384 not_entailment
|
| 387 |
+
385 entailment
|
| 388 |
+
386 not_entailment
|
| 389 |
+
387 entailment
|
| 390 |
+
388 entailment
|
| 391 |
+
389 not_entailment
|
| 392 |
+
390 not_entailment
|
| 393 |
+
391 entailment
|
| 394 |
+
392 entailment
|
| 395 |
+
393 entailment
|
| 396 |
+
394 entailment
|
| 397 |
+
395 entailment
|
| 398 |
+
396 entailment
|
| 399 |
+
397 entailment
|
| 400 |
+
398 entailment
|
| 401 |
+
399 not_entailment
|
| 402 |
+
400 entailment
|
| 403 |
+
401 entailment
|
| 404 |
+
402 entailment
|
| 405 |
+
403 entailment
|
| 406 |
+
404 entailment
|
| 407 |
+
405 entailment
|
| 408 |
+
406 not_entailment
|
| 409 |
+
407 entailment
|
| 410 |
+
408 not_entailment
|
| 411 |
+
409 entailment
|
| 412 |
+
410 not_entailment
|
| 413 |
+
411 entailment
|
| 414 |
+
412 entailment
|
| 415 |
+
413 entailment
|
| 416 |
+
414 not_entailment
|
| 417 |
+
415 not_entailment
|
| 418 |
+
416 not_entailment
|
| 419 |
+
417 not_entailment
|
| 420 |
+
418 not_entailment
|
| 421 |
+
419 not_entailment
|
| 422 |
+
420 entailment
|
| 423 |
+
421 entailment
|
| 424 |
+
422 not_entailment
|
| 425 |
+
423 not_entailment
|
| 426 |
+
424 entailment
|
| 427 |
+
425 entailment
|
| 428 |
+
426 entailment
|
| 429 |
+
427 not_entailment
|
| 430 |
+
428 entailment
|
| 431 |
+
429 entailment
|
| 432 |
+
430 not_entailment
|
| 433 |
+
431 not_entailment
|
| 434 |
+
432 entailment
|
| 435 |
+
433 entailment
|
| 436 |
+
434 not_entailment
|
| 437 |
+
435 not_entailment
|
| 438 |
+
436 not_entailment
|
| 439 |
+
437 entailment
|
| 440 |
+
438 entailment
|
| 441 |
+
439 not_entailment
|
| 442 |
+
440 not_entailment
|
| 443 |
+
441 entailment
|
| 444 |
+
442 entailment
|
| 445 |
+
443 entailment
|
| 446 |
+
444 not_entailment
|
| 447 |
+
445 entailment
|
| 448 |
+
446 entailment
|
| 449 |
+
447 not_entailment
|
| 450 |
+
448 entailment
|
| 451 |
+
449 entailment
|
| 452 |
+
450 entailment
|
| 453 |
+
451 entailment
|
| 454 |
+
452 not_entailment
|
| 455 |
+
453 entailment
|
| 456 |
+
454 not_entailment
|
| 457 |
+
455 not_entailment
|
| 458 |
+
456 entailment
|
| 459 |
+
457 entailment
|
| 460 |
+
458 entailment
|
| 461 |
+
459 entailment
|
| 462 |
+
460 entailment
|
| 463 |
+
461 entailment
|
| 464 |
+
462 entailment
|
| 465 |
+
463 entailment
|
| 466 |
+
464 entailment
|
| 467 |
+
465 entailment
|
| 468 |
+
466 entailment
|
| 469 |
+
467 not_entailment
|
| 470 |
+
468 entailment
|
| 471 |
+
469 entailment
|
| 472 |
+
470 not_entailment
|
| 473 |
+
471 entailment
|
| 474 |
+
472 not_entailment
|
| 475 |
+
473 entailment
|
| 476 |
+
474 entailment
|
| 477 |
+
475 not_entailment
|
| 478 |
+
476 not_entailment
|
| 479 |
+
477 not_entailment
|
| 480 |
+
478 not_entailment
|
| 481 |
+
479 entailment
|
| 482 |
+
480 not_entailment
|
| 483 |
+
481 entailment
|
| 484 |
+
482 entailment
|
| 485 |
+
483 entailment
|
| 486 |
+
484 entailment
|
| 487 |
+
485 entailment
|
| 488 |
+
486 entailment
|
| 489 |
+
487 entailment
|
| 490 |
+
488 not_entailment
|
| 491 |
+
489 entailment
|
| 492 |
+
490 entailment
|
| 493 |
+
491 entailment
|
| 494 |
+
492 not_entailment
|
| 495 |
+
493 entailment
|
| 496 |
+
494 entailment
|
| 497 |
+
495 not_entailment
|
| 498 |
+
496 not_entailment
|
| 499 |
+
497 entailment
|
| 500 |
+
498 entailment
|
| 501 |
+
499 entailment
|
| 502 |
+
500 not_entailment
|
| 503 |
+
501 entailment
|
| 504 |
+
502 entailment
|
| 505 |
+
503 entailment
|
| 506 |
+
504 not_entailment
|
| 507 |
+
505 entailment
|
| 508 |
+
506 entailment
|
| 509 |
+
507 not_entailment
|
| 510 |
+
508 entailment
|
| 511 |
+
509 not_entailment
|
| 512 |
+
510 not_entailment
|
| 513 |
+
511 entailment
|
| 514 |
+
512 not_entailment
|
| 515 |
+
513 entailment
|
| 516 |
+
514 entailment
|
| 517 |
+
515 entailment
|
| 518 |
+
516 not_entailment
|
| 519 |
+
517 entailment
|
| 520 |
+
518 not_entailment
|
| 521 |
+
519 entailment
|
| 522 |
+
520 not_entailment
|
| 523 |
+
521 not_entailment
|
| 524 |
+
522 not_entailment
|
| 525 |
+
523 entailment
|
| 526 |
+
524 entailment
|
| 527 |
+
525 not_entailment
|
| 528 |
+
526 not_entailment
|
| 529 |
+
527 entailment
|
| 530 |
+
528 entailment
|
| 531 |
+
529 not_entailment
|
| 532 |
+
530 entailment
|
| 533 |
+
531 not_entailment
|
| 534 |
+
532 not_entailment
|
| 535 |
+
533 entailment
|
| 536 |
+
534 entailment
|
| 537 |
+
535 entailment
|
| 538 |
+
536 not_entailment
|
| 539 |
+
537 entailment
|
| 540 |
+
538 not_entailment
|
| 541 |
+
539 not_entailment
|
| 542 |
+
540 entailment
|
| 543 |
+
541 not_entailment
|
| 544 |
+
542 not_entailment
|
| 545 |
+
543 entailment
|
| 546 |
+
544 entailment
|
| 547 |
+
545 not_entailment
|
| 548 |
+
546 entailment
|
| 549 |
+
547 entailment
|
| 550 |
+
548 entailment
|
| 551 |
+
549 entailment
|
| 552 |
+
550 not_entailment
|
| 553 |
+
551 entailment
|
| 554 |
+
552 entailment
|
| 555 |
+
553 not_entailment
|
| 556 |
+
554 entailment
|
| 557 |
+
555 not_entailment
|
| 558 |
+
556 not_entailment
|
| 559 |
+
557 entailment
|
| 560 |
+
558 not_entailment
|
| 561 |
+
559 not_entailment
|
| 562 |
+
560 entailment
|
| 563 |
+
561 entailment
|
| 564 |
+
562 not_entailment
|
| 565 |
+
563 entailment
|
| 566 |
+
564 entailment
|
| 567 |
+
565 entailment
|
| 568 |
+
566 entailment
|
| 569 |
+
567 entailment
|
| 570 |
+
568 not_entailment
|
| 571 |
+
569 entailment
|
| 572 |
+
570 not_entailment
|
| 573 |
+
571 entailment
|
| 574 |
+
572 entailment
|
| 575 |
+
573 not_entailment
|
| 576 |
+
574 entailment
|
| 577 |
+
575 entailment
|
| 578 |
+
576 entailment
|
| 579 |
+
577 entailment
|
| 580 |
+
578 not_entailment
|
| 581 |
+
579 entailment
|
| 582 |
+
580 entailment
|
| 583 |
+
581 not_entailment
|
| 584 |
+
582 not_entailment
|
| 585 |
+
583 entailment
|
| 586 |
+
584 entailment
|
| 587 |
+
585 entailment
|
| 588 |
+
586 entailment
|
| 589 |
+
587 not_entailment
|
| 590 |
+
588 entailment
|
| 591 |
+
589 not_entailment
|
| 592 |
+
590 entailment
|
| 593 |
+
591 not_entailment
|
| 594 |
+
592 not_entailment
|
| 595 |
+
593 not_entailment
|
| 596 |
+
594 entailment
|
| 597 |
+
595 entailment
|
| 598 |
+
596 entailment
|
| 599 |
+
597 not_entailment
|
| 600 |
+
598 entailment
|
| 601 |
+
599 not_entailment
|
| 602 |
+
600 entailment
|
| 603 |
+
601 entailment
|
| 604 |
+
602 entailment
|
| 605 |
+
603 not_entailment
|
| 606 |
+
604 not_entailment
|
| 607 |
+
605 entailment
|
| 608 |
+
606 entailment
|
| 609 |
+
607 entailment
|
| 610 |
+
608 not_entailment
|
| 611 |
+
609 entailment
|
| 612 |
+
610 entailment
|
| 613 |
+
611 not_entailment
|
| 614 |
+
612 entailment
|
| 615 |
+
613 entailment
|
| 616 |
+
614 not_entailment
|
| 617 |
+
615 entailment
|
| 618 |
+
616 not_entailment
|
| 619 |
+
617 entailment
|
| 620 |
+
618 not_entailment
|
| 621 |
+
619 entailment
|
| 622 |
+
620 entailment
|
| 623 |
+
621 entailment
|
| 624 |
+
622 entailment
|
| 625 |
+
623 not_entailment
|
| 626 |
+
624 entailment
|
| 627 |
+
625 entailment
|
| 628 |
+
626 entailment
|
| 629 |
+
627 entailment
|
| 630 |
+
628 not_entailment
|
| 631 |
+
629 not_entailment
|
| 632 |
+
630 entailment
|
| 633 |
+
631 entailment
|
| 634 |
+
632 not_entailment
|
| 635 |
+
633 not_entailment
|
| 636 |
+
634 not_entailment
|
| 637 |
+
635 entailment
|
| 638 |
+
636 entailment
|
| 639 |
+
637 entailment
|
| 640 |
+
638 not_entailment
|
| 641 |
+
639 entailment
|
| 642 |
+
640 entailment
|
| 643 |
+
641 not_entailment
|
| 644 |
+
642 entailment
|
| 645 |
+
643 not_entailment
|
| 646 |
+
644 entailment
|
| 647 |
+
645 not_entailment
|
| 648 |
+
646 entailment
|
| 649 |
+
647 entailment
|
| 650 |
+
648 entailment
|
| 651 |
+
649 not_entailment
|
| 652 |
+
650 not_entailment
|
| 653 |
+
651 entailment
|
| 654 |
+
652 entailment
|
| 655 |
+
653 not_entailment
|
| 656 |
+
654 entailment
|
| 657 |
+
655 entailment
|
| 658 |
+
656 entailment
|
| 659 |
+
657 entailment
|
| 660 |
+
658 not_entailment
|
| 661 |
+
659 not_entailment
|
| 662 |
+
660 not_entailment
|
| 663 |
+
661 entailment
|
| 664 |
+
662 entailment
|
| 665 |
+
663 entailment
|
| 666 |
+
664 not_entailment
|
| 667 |
+
665 not_entailment
|
| 668 |
+
666 entailment
|
| 669 |
+
667 entailment
|
| 670 |
+
668 entailment
|
| 671 |
+
669 not_entailment
|
| 672 |
+
670 entailment
|
| 673 |
+
671 entailment
|
| 674 |
+
672 not_entailment
|
| 675 |
+
673 not_entailment
|
| 676 |
+
674 not_entailment
|
| 677 |
+
675 not_entailment
|
| 678 |
+
676 entailment
|
| 679 |
+
677 entailment
|
| 680 |
+
678 entailment
|
| 681 |
+
679 entailment
|
| 682 |
+
680 entailment
|
| 683 |
+
681 entailment
|
| 684 |
+
682 entailment
|
| 685 |
+
683 not_entailment
|
| 686 |
+
684 not_entailment
|
| 687 |
+
685 entailment
|
| 688 |
+
686 entailment
|
| 689 |
+
687 entailment
|
| 690 |
+
688 not_entailment
|
| 691 |
+
689 entailment
|
| 692 |
+
690 entailment
|
| 693 |
+
691 not_entailment
|
| 694 |
+
692 entailment
|
| 695 |
+
693 entailment
|
| 696 |
+
694 entailment
|
| 697 |
+
695 not_entailment
|
| 698 |
+
696 not_entailment
|
| 699 |
+
697 entailment
|
| 700 |
+
698 entailment
|
| 701 |
+
699 entailment
|
| 702 |
+
700 entailment
|
| 703 |
+
701 not_entailment
|
| 704 |
+
702 entailment
|
| 705 |
+
703 not_entailment
|
| 706 |
+
704 not_entailment
|
| 707 |
+
705 not_entailment
|
| 708 |
+
706 not_entailment
|
| 709 |
+
707 entailment
|
| 710 |
+
708 entailment
|
| 711 |
+
709 entailment
|
| 712 |
+
710 not_entailment
|
| 713 |
+
711 entailment
|
| 714 |
+
712 not_entailment
|
| 715 |
+
713 entailment
|
| 716 |
+
714 not_entailment
|
| 717 |
+
715 not_entailment
|
| 718 |
+
716 not_entailment
|
| 719 |
+
717 entailment
|
| 720 |
+
718 not_entailment
|
| 721 |
+
719 not_entailment
|
| 722 |
+
720 entailment
|
| 723 |
+
721 entailment
|
| 724 |
+
722 entailment
|
| 725 |
+
723 not_entailment
|
| 726 |
+
724 entailment
|
| 727 |
+
725 entailment
|
| 728 |
+
726 not_entailment
|
| 729 |
+
727 entailment
|
| 730 |
+
728 entailment
|
| 731 |
+
729 entailment
|
| 732 |
+
730 entailment
|
| 733 |
+
731 entailment
|
| 734 |
+
732 not_entailment
|
| 735 |
+
733 entailment
|
| 736 |
+
734 entailment
|
| 737 |
+
735 entailment
|
| 738 |
+
736 not_entailment
|
| 739 |
+
737 not_entailment
|
| 740 |
+
738 entailment
|
| 741 |
+
739 entailment
|
| 742 |
+
740 not_entailment
|
| 743 |
+
741 entailment
|
| 744 |
+
742 not_entailment
|
| 745 |
+
743 entailment
|
| 746 |
+
744 entailment
|
| 747 |
+
745 entailment
|
| 748 |
+
746 not_entailment
|
| 749 |
+
747 not_entailment
|
| 750 |
+
748 not_entailment
|
| 751 |
+
749 not_entailment
|
| 752 |
+
750 entailment
|
| 753 |
+
751 entailment
|
| 754 |
+
752 not_entailment
|
| 755 |
+
753 not_entailment
|
| 756 |
+
754 entailment
|
| 757 |
+
755 entailment
|
| 758 |
+
756 entailment
|
| 759 |
+
757 entailment
|
| 760 |
+
758 entailment
|
| 761 |
+
759 entailment
|
| 762 |
+
760 entailment
|
| 763 |
+
761 entailment
|
| 764 |
+
762 entailment
|
| 765 |
+
763 entailment
|
| 766 |
+
764 not_entailment
|
| 767 |
+
765 not_entailment
|
| 768 |
+
766 entailment
|
| 769 |
+
767 not_entailment
|
| 770 |
+
768 entailment
|
| 771 |
+
769 not_entailment
|
| 772 |
+
770 entailment
|
| 773 |
+
771 not_entailment
|
| 774 |
+
772 not_entailment
|
| 775 |
+
773 not_entailment
|
| 776 |
+
774 not_entailment
|
| 777 |
+
775 entailment
|
| 778 |
+
776 entailment
|
| 779 |
+
777 entailment
|
| 780 |
+
778 entailment
|
| 781 |
+
779 not_entailment
|
| 782 |
+
780 entailment
|
| 783 |
+
781 entailment
|
| 784 |
+
782 entailment
|
| 785 |
+
783 entailment
|
| 786 |
+
784 entailment
|
| 787 |
+
785 entailment
|
| 788 |
+
786 entailment
|
| 789 |
+
787 entailment
|
| 790 |
+
788 entailment
|
| 791 |
+
789 entailment
|
| 792 |
+
790 not_entailment
|
| 793 |
+
791 entailment
|
| 794 |
+
792 not_entailment
|
| 795 |
+
793 entailment
|
| 796 |
+
794 not_entailment
|
| 797 |
+
795 entailment
|
| 798 |
+
796 not_entailment
|
| 799 |
+
797 entailment
|
| 800 |
+
798 not_entailment
|
| 801 |
+
799 not_entailment
|
| 802 |
+
800 entailment
|
| 803 |
+
801 entailment
|
| 804 |
+
802 not_entailment
|
| 805 |
+
803 entailment
|
| 806 |
+
804 not_entailment
|
| 807 |
+
805 entailment
|
| 808 |
+
806 not_entailment
|
| 809 |
+
807 not_entailment
|
| 810 |
+
808 entailment
|
| 811 |
+
809 not_entailment
|
| 812 |
+
810 not_entailment
|
| 813 |
+
811 entailment
|
| 814 |
+
812 not_entailment
|
| 815 |
+
813 entailment
|
| 816 |
+
814 entailment
|
| 817 |
+
815 entailment
|
| 818 |
+
816 not_entailment
|
| 819 |
+
817 entailment
|
| 820 |
+
818 entailment
|
| 821 |
+
819 not_entailment
|
| 822 |
+
820 entailment
|
| 823 |
+
821 not_entailment
|
| 824 |
+
822 entailment
|
| 825 |
+
823 entailment
|
| 826 |
+
824 entailment
|
| 827 |
+
825 entailment
|
| 828 |
+
826 entailment
|
| 829 |
+
827 entailment
|
| 830 |
+
828 entailment
|
| 831 |
+
829 entailment
|
| 832 |
+
830 entailment
|
| 833 |
+
831 entailment
|
| 834 |
+
832 entailment
|
| 835 |
+
833 not_entailment
|
| 836 |
+
834 entailment
|
| 837 |
+
835 entailment
|
| 838 |
+
836 entailment
|
| 839 |
+
837 not_entailment
|
| 840 |
+
838 not_entailment
|
| 841 |
+
839 entailment
|
| 842 |
+
840 entailment
|
| 843 |
+
841 entailment
|
| 844 |
+
842 entailment
|
| 845 |
+
843 entailment
|
| 846 |
+
844 entailment
|
| 847 |
+
845 entailment
|
| 848 |
+
846 entailment
|
| 849 |
+
847 not_entailment
|
| 850 |
+
848 entailment
|
| 851 |
+
849 not_entailment
|
| 852 |
+
850 not_entailment
|
| 853 |
+
851 entailment
|
| 854 |
+
852 entailment
|
| 855 |
+
853 not_entailment
|
| 856 |
+
854 not_entailment
|
| 857 |
+
855 not_entailment
|
| 858 |
+
856 entailment
|
| 859 |
+
857 not_entailment
|
| 860 |
+
858 entailment
|
| 861 |
+
859 entailment
|
| 862 |
+
860 entailment
|
| 863 |
+
861 not_entailment
|
| 864 |
+
862 entailment
|
| 865 |
+
863 not_entailment
|
| 866 |
+
864 entailment
|
| 867 |
+
865 entailment
|
| 868 |
+
866 entailment
|
| 869 |
+
867 entailment
|
| 870 |
+
868 not_entailment
|
| 871 |
+
869 entailment
|
| 872 |
+
870 entailment
|
| 873 |
+
871 entailment
|
| 874 |
+
872 entailment
|
| 875 |
+
873 entailment
|
| 876 |
+
874 entailment
|
| 877 |
+
875 entailment
|
| 878 |
+
876 not_entailment
|
| 879 |
+
877 entailment
|
| 880 |
+
878 entailment
|
| 881 |
+
879 not_entailment
|
| 882 |
+
880 entailment
|
| 883 |
+
881 entailment
|
| 884 |
+
882 entailment
|
| 885 |
+
883 entailment
|
| 886 |
+
884 entailment
|
| 887 |
+
885 entailment
|
| 888 |
+
886 entailment
|
| 889 |
+
887 entailment
|
| 890 |
+
888 not_entailment
|
| 891 |
+
889 entailment
|
| 892 |
+
890 entailment
|
| 893 |
+
891 entailment
|
| 894 |
+
892 entailment
|
| 895 |
+
893 entailment
|
| 896 |
+
894 entailment
|
| 897 |
+
895 entailment
|
| 898 |
+
896 entailment
|
| 899 |
+
897 not_entailment
|
| 900 |
+
898 not_entailment
|
| 901 |
+
899 not_entailment
|
| 902 |
+
900 not_entailment
|
| 903 |
+
901 not_entailment
|
| 904 |
+
902 entailment
|
| 905 |
+
903 entailment
|
| 906 |
+
904 entailment
|
| 907 |
+
905 entailment
|
| 908 |
+
906 entailment
|
| 909 |
+
907 not_entailment
|
| 910 |
+
908 entailment
|
| 911 |
+
909 entailment
|
| 912 |
+
910 not_entailment
|
| 913 |
+
911 entailment
|
| 914 |
+
912 entailment
|
| 915 |
+
913 entailment
|
| 916 |
+
914 entailment
|
| 917 |
+
915 entailment
|
| 918 |
+
916 entailment
|
| 919 |
+
917 entailment
|
| 920 |
+
918 entailment
|
| 921 |
+
919 not_entailment
|
| 922 |
+
920 entailment
|
| 923 |
+
921 entailment
|
| 924 |
+
922 entailment
|
| 925 |
+
923 not_entailment
|
| 926 |
+
924 not_entailment
|
| 927 |
+
925 entailment
|
| 928 |
+
926 entailment
|
| 929 |
+
927 entailment
|
| 930 |
+
928 entailment
|
| 931 |
+
929 not_entailment
|
| 932 |
+
930 not_entailment
|
| 933 |
+
931 entailment
|
| 934 |
+
932 not_entailment
|
| 935 |
+
933 not_entailment
|
| 936 |
+
934 entailment
|
| 937 |
+
935 entailment
|
| 938 |
+
936 entailment
|
| 939 |
+
937 not_entailment
|
| 940 |
+
938 not_entailment
|
| 941 |
+
939 not_entailment
|
| 942 |
+
940 entailment
|
| 943 |
+
941 entailment
|
| 944 |
+
942 not_entailment
|
| 945 |
+
943 entailment
|
| 946 |
+
944 not_entailment
|
| 947 |
+
945 not_entailment
|
| 948 |
+
946 not_entailment
|
| 949 |
+
947 entailment
|
| 950 |
+
948 not_entailment
|
| 951 |
+
949 not_entailment
|
| 952 |
+
950 entailment
|
| 953 |
+
951 entailment
|
| 954 |
+
952 entailment
|
| 955 |
+
953 not_entailment
|
| 956 |
+
954 entailment
|
| 957 |
+
955 not_entailment
|
| 958 |
+
956 entailment
|
| 959 |
+
957 entailment
|
| 960 |
+
958 not_entailment
|
| 961 |
+
959 entailment
|
| 962 |
+
960 entailment
|
| 963 |
+
961 entailment
|
| 964 |
+
962 entailment
|
| 965 |
+
963 entailment
|
| 966 |
+
964 entailment
|
| 967 |
+
965 entailment
|
| 968 |
+
966 entailment
|
| 969 |
+
967 entailment
|
| 970 |
+
968 entailment
|
| 971 |
+
969 not_entailment
|
| 972 |
+
970 entailment
|
| 973 |
+
971 entailment
|
| 974 |
+
972 not_entailment
|
| 975 |
+
973 entailment
|
| 976 |
+
974 entailment
|
| 977 |
+
975 entailment
|
| 978 |
+
976 entailment
|
| 979 |
+
977 entailment
|
| 980 |
+
978 not_entailment
|
| 981 |
+
979 not_entailment
|
| 982 |
+
980 not_entailment
|
| 983 |
+
981 not_entailment
|
| 984 |
+
982 entailment
|
| 985 |
+
983 entailment
|
| 986 |
+
984 not_entailment
|
| 987 |
+
985 entailment
|
| 988 |
+
986 entailment
|
| 989 |
+
987 entailment
|
| 990 |
+
988 not_entailment
|
| 991 |
+
989 entailment
|
| 992 |
+
990 entailment
|
| 993 |
+
991 not_entailment
|
| 994 |
+
992 not_entailment
|
| 995 |
+
993 not_entailment
|
| 996 |
+
994 entailment
|
| 997 |
+
995 entailment
|
| 998 |
+
996 entailment
|
| 999 |
+
997 not_entailment
|
| 1000 |
+
998 entailment
|
| 1001 |
+
999 not_entailment
|
| 1002 |
+
1000 entailment
|
| 1003 |
+
1001 not_entailment
|
| 1004 |
+
1002 entailment
|
| 1005 |
+
1003 not_entailment
|
| 1006 |
+
1004 not_entailment
|
| 1007 |
+
1005 entailment
|
| 1008 |
+
1006 entailment
|
| 1009 |
+
1007 entailment
|
| 1010 |
+
1008 entailment
|
| 1011 |
+
1009 entailment
|
| 1012 |
+
1010 entailment
|
| 1013 |
+
1011 not_entailment
|
| 1014 |
+
1012 not_entailment
|
| 1015 |
+
1013 not_entailment
|
| 1016 |
+
1014 not_entailment
|
| 1017 |
+
1015 entailment
|
| 1018 |
+
1016 entailment
|
| 1019 |
+
1017 entailment
|
| 1020 |
+
1018 entailment
|
| 1021 |
+
1019 entailment
|
| 1022 |
+
1020 entailment
|
| 1023 |
+
1021 entailment
|
| 1024 |
+
1022 not_entailment
|
| 1025 |
+
1023 entailment
|
| 1026 |
+
1024 entailment
|
| 1027 |
+
1025 not_entailment
|
| 1028 |
+
1026 not_entailment
|
| 1029 |
+
1027 entailment
|
| 1030 |
+
1028 entailment
|
| 1031 |
+
1029 not_entailment
|
| 1032 |
+
1030 not_entailment
|
| 1033 |
+
1031 not_entailment
|
| 1034 |
+
1032 entailment
|
| 1035 |
+
1033 entailment
|
| 1036 |
+
1034 entailment
|
| 1037 |
+
1035 entailment
|
| 1038 |
+
1036 not_entailment
|
| 1039 |
+
1037 not_entailment
|
| 1040 |
+
1038 entailment
|
| 1041 |
+
1039 entailment
|
| 1042 |
+
1040 not_entailment
|
| 1043 |
+
1041 not_entailment
|
| 1044 |
+
1042 entailment
|
| 1045 |
+
1043 not_entailment
|
| 1046 |
+
1044 not_entailment
|
| 1047 |
+
1045 entailment
|
| 1048 |
+
1046 not_entailment
|
| 1049 |
+
1047 entailment
|
| 1050 |
+
1048 entailment
|
| 1051 |
+
1049 entailment
|
| 1052 |
+
1050 not_entailment
|
| 1053 |
+
1051 entailment
|
| 1054 |
+
1052 not_entailment
|
| 1055 |
+
1053 not_entailment
|
| 1056 |
+
1054 entailment
|
| 1057 |
+
1055 entailment
|
| 1058 |
+
1056 not_entailment
|
| 1059 |
+
1057 not_entailment
|
| 1060 |
+
1058 not_entailment
|
| 1061 |
+
1059 entailment
|
| 1062 |
+
1060 entailment
|
| 1063 |
+
1061 not_entailment
|
| 1064 |
+
1062 not_entailment
|
| 1065 |
+
1063 entailment
|
| 1066 |
+
1064 not_entailment
|
| 1067 |
+
1065 entailment
|
| 1068 |
+
1066 entailment
|
| 1069 |
+
1067 entailment
|
| 1070 |
+
1068 entailment
|
| 1071 |
+
1069 not_entailment
|
| 1072 |
+
1070 entailment
|
| 1073 |
+
1071 entailment
|
| 1074 |
+
1072 entailment
|
| 1075 |
+
1073 entailment
|
| 1076 |
+
1074 not_entailment
|
| 1077 |
+
1075 not_entailment
|
| 1078 |
+
1076 not_entailment
|
| 1079 |
+
1077 entailment
|
| 1080 |
+
1078 not_entailment
|
| 1081 |
+
1079 not_entailment
|
| 1082 |
+
1080 entailment
|
| 1083 |
+
1081 not_entailment
|
| 1084 |
+
1082 not_entailment
|
| 1085 |
+
1083 entailment
|
| 1086 |
+
1084 entailment
|
| 1087 |
+
1085 entailment
|
| 1088 |
+
1086 entailment
|
| 1089 |
+
1087 not_entailment
|
| 1090 |
+
1088 entailment
|
| 1091 |
+
1089 not_entailment
|
| 1092 |
+
1090 not_entailment
|
| 1093 |
+
1091 entailment
|
| 1094 |
+
1092 not_entailment
|
| 1095 |
+
1093 not_entailment
|
| 1096 |
+
1094 not_entailment
|
| 1097 |
+
1095 not_entailment
|
| 1098 |
+
1096 entailment
|
| 1099 |
+
1097 entailment
|
| 1100 |
+
1098 not_entailment
|
| 1101 |
+
1099 not_entailment
|
| 1102 |
+
1100 not_entailment
|
| 1103 |
+
1101 entailment
|
| 1104 |
+
1102 not_entailment
|
| 1105 |
+
1103 not_entailment
|
| 1106 |
+
1104 not_entailment
|
| 1107 |
+
1105 not_entailment
|
| 1108 |
+
1106 not_entailment
|
| 1109 |
+
1107 not_entailment
|
| 1110 |
+
1108 not_entailment
|
| 1111 |
+
1109 not_entailment
|
| 1112 |
+
1110 not_entailment
|
| 1113 |
+
1111 not_entailment
|
| 1114 |
+
1112 not_entailment
|
| 1115 |
+
1113 entailment
|
| 1116 |
+
1114 entailment
|
| 1117 |
+
1115 not_entailment
|
| 1118 |
+
1116 not_entailment
|
| 1119 |
+
1117 not_entailment
|
| 1120 |
+
1118 not_entailment
|
| 1121 |
+
1119 entailment
|
| 1122 |
+
1120 not_entailment
|
| 1123 |
+
1121 not_entailment
|
| 1124 |
+
1122 not_entailment
|
| 1125 |
+
1123 entailment
|
| 1126 |
+
1124 entailment
|
| 1127 |
+
1125 not_entailment
|
| 1128 |
+
1126 entailment
|
| 1129 |
+
1127 entailment
|
| 1130 |
+
1128 not_entailment
|
| 1131 |
+
1129 not_entailment
|
| 1132 |
+
1130 not_entailment
|
| 1133 |
+
1131 not_entailment
|
| 1134 |
+
1132 not_entailment
|
| 1135 |
+
1133 not_entailment
|
| 1136 |
+
1134 not_entailment
|
| 1137 |
+
1135 not_entailment
|
| 1138 |
+
1136 not_entailment
|
| 1139 |
+
1137 not_entailment
|
| 1140 |
+
1138 entailment
|
| 1141 |
+
1139 entailment
|
| 1142 |
+
1140 not_entailment
|
| 1143 |
+
1141 entailment
|
| 1144 |
+
1142 entailment
|
| 1145 |
+
1143 not_entailment
|
| 1146 |
+
1144 not_entailment
|
| 1147 |
+
1145 entailment
|
| 1148 |
+
1146 entailment
|
| 1149 |
+
1147 entailment
|
| 1150 |
+
1148 not_entailment
|
| 1151 |
+
1149 not_entailment
|
| 1152 |
+
1150 not_entailment
|
| 1153 |
+
1151 not_entailment
|
| 1154 |
+
1152 not_entailment
|
| 1155 |
+
1153 entailment
|
| 1156 |
+
1154 entailment
|
| 1157 |
+
1155 entailment
|
| 1158 |
+
1156 entailment
|
| 1159 |
+
1157 entailment
|
| 1160 |
+
1158 not_entailment
|
| 1161 |
+
1159 not_entailment
|
| 1162 |
+
1160 entailment
|
| 1163 |
+
1161 not_entailment
|
| 1164 |
+
1162 not_entailment
|
| 1165 |
+
1163 not_entailment
|
| 1166 |
+
1164 not_entailment
|
| 1167 |
+
1165 entailment
|
| 1168 |
+
1166 entailment
|
| 1169 |
+
1167 entailment
|
| 1170 |
+
1168 entailment
|
| 1171 |
+
1169 not_entailment
|
| 1172 |
+
1170 entailment
|
| 1173 |
+
1171 entailment
|
| 1174 |
+
1172 entailment
|
| 1175 |
+
1173 entailment
|
| 1176 |
+
1174 not_entailment
|
| 1177 |
+
1175 entailment
|
| 1178 |
+
1176 entailment
|
| 1179 |
+
1177 entailment
|
| 1180 |
+
1178 not_entailment
|
| 1181 |
+
1179 not_entailment
|
| 1182 |
+
1180 not_entailment
|
| 1183 |
+
1181 entailment
|
| 1184 |
+
1182 not_entailment
|
| 1185 |
+
1183 not_entailment
|
| 1186 |
+
1184 not_entailment
|
| 1187 |
+
1185 not_entailment
|
| 1188 |
+
1186 not_entailment
|
| 1189 |
+
1187 not_entailment
|
| 1190 |
+
1188 not_entailment
|
| 1191 |
+
1189 not_entailment
|
| 1192 |
+
1190 not_entailment
|
| 1193 |
+
1191 not_entailment
|
| 1194 |
+
1192 not_entailment
|
| 1195 |
+
1193 not_entailment
|
| 1196 |
+
1194 not_entailment
|
| 1197 |
+
1195 entailment
|
| 1198 |
+
1196 not_entailment
|
| 1199 |
+
1197 not_entailment
|
| 1200 |
+
1198 not_entailment
|
| 1201 |
+
1199 not_entailment
|
| 1202 |
+
1200 entailment
|
| 1203 |
+
1201 entailment
|
| 1204 |
+
1202 not_entailment
|
| 1205 |
+
1203 not_entailment
|
| 1206 |
+
1204 entailment
|
| 1207 |
+
1205 entailment
|
| 1208 |
+
1206 entailment
|
| 1209 |
+
1207 entailment
|
| 1210 |
+
1208 not_entailment
|
| 1211 |
+
1209 not_entailment
|
| 1212 |
+
1210 entailment
|
| 1213 |
+
1211 entailment
|
| 1214 |
+
1212 not_entailment
|
| 1215 |
+
1213 entailment
|
| 1216 |
+
1214 entailment
|
| 1217 |
+
1215 not_entailment
|
| 1218 |
+
1216 entailment
|
| 1219 |
+
1217 not_entailment
|
| 1220 |
+
1218 entailment
|
| 1221 |
+
1219 entailment
|
| 1222 |
+
1220 entailment
|
| 1223 |
+
1221 entailment
|
| 1224 |
+
1222 entailment
|
| 1225 |
+
1223 entailment
|
| 1226 |
+
1224 not_entailment
|
| 1227 |
+
1225 entailment
|
| 1228 |
+
1226 entailment
|
| 1229 |
+
1227 not_entailment
|
| 1230 |
+
1228 entailment
|
| 1231 |
+
1229 entailment
|
| 1232 |
+
1230 entailment
|
| 1233 |
+
1231 entailment
|
| 1234 |
+
1232 entailment
|
| 1235 |
+
1233 not_entailment
|
| 1236 |
+
1234 entailment
|
| 1237 |
+
1235 entailment
|
| 1238 |
+
1236 entailment
|
| 1239 |
+
1237 entailment
|
| 1240 |
+
1238 entailment
|
| 1241 |
+
1239 not_entailment
|
| 1242 |
+
1240 not_entailment
|
| 1243 |
+
1241 entailment
|
| 1244 |
+
1242 entailment
|
| 1245 |
+
1243 entailment
|
| 1246 |
+
1244 entailment
|
| 1247 |
+
1245 entailment
|
| 1248 |
+
1246 entailment
|
| 1249 |
+
1247 not_entailment
|
| 1250 |
+
1248 entailment
|
| 1251 |
+
1249 entailment
|
| 1252 |
+
1250 not_entailment
|
| 1253 |
+
1251 entailment
|
| 1254 |
+
1252 entailment
|
| 1255 |
+
1253 entailment
|
| 1256 |
+
1254 not_entailment
|
| 1257 |
+
1255 entailment
|
| 1258 |
+
1256 not_entailment
|
| 1259 |
+
1257 not_entailment
|
| 1260 |
+
1258 entailment
|
| 1261 |
+
1259 entailment
|
| 1262 |
+
1260 not_entailment
|
| 1263 |
+
1261 entailment
|
| 1264 |
+
1262 not_entailment
|
| 1265 |
+
1263 not_entailment
|
| 1266 |
+
1264 not_entailment
|
| 1267 |
+
1265 entailment
|
| 1268 |
+
1266 entailment
|
| 1269 |
+
1267 entailment
|
| 1270 |
+
1268 not_entailment
|
| 1271 |
+
1269 not_entailment
|
| 1272 |
+
1270 entailment
|
| 1273 |
+
1271 entailment
|
| 1274 |
+
1272 entailment
|
| 1275 |
+
1273 entailment
|
| 1276 |
+
1274 entailment
|
| 1277 |
+
1275 entailment
|
| 1278 |
+
1276 not_entailment
|
| 1279 |
+
1277 entailment
|
| 1280 |
+
1278 entailment
|
| 1281 |
+
1279 entailment
|
| 1282 |
+
1280 entailment
|
| 1283 |
+
1281 not_entailment
|
| 1284 |
+
1282 entailment
|
| 1285 |
+
1283 entailment
|
| 1286 |
+
1284 not_entailment
|
| 1287 |
+
1285 entailment
|
| 1288 |
+
1286 not_entailment
|
| 1289 |
+
1287 entailment
|
| 1290 |
+
1288 entailment
|
| 1291 |
+
1289 not_entailment
|
| 1292 |
+
1290 entailment
|
| 1293 |
+
1291 entailment
|
| 1294 |
+
1292 entailment
|
| 1295 |
+
1293 not_entailment
|
| 1296 |
+
1294 not_entailment
|
| 1297 |
+
1295 not_entailment
|
| 1298 |
+
1296 not_entailment
|
| 1299 |
+
1297 not_entailment
|
| 1300 |
+
1298 entailment
|
| 1301 |
+
1299 not_entailment
|
| 1302 |
+
1300 entailment
|
| 1303 |
+
1301 not_entailment
|
| 1304 |
+
1302 entailment
|
| 1305 |
+
1303 not_entailment
|
| 1306 |
+
1304 entailment
|
| 1307 |
+
1305 entailment
|
| 1308 |
+
1306 not_entailment
|
| 1309 |
+
1307 not_entailment
|
| 1310 |
+
1308 entailment
|
| 1311 |
+
1309 not_entailment
|
| 1312 |
+
1310 not_entailment
|
| 1313 |
+
1311 not_entailment
|
| 1314 |
+
1312 not_entailment
|
| 1315 |
+
1313 entailment
|
| 1316 |
+
1314 entailment
|
| 1317 |
+
1315 not_entailment
|
| 1318 |
+
1316 not_entailment
|
| 1319 |
+
1317 entailment
|
| 1320 |
+
1318 not_entailment
|
| 1321 |
+
1319 not_entailment
|
| 1322 |
+
1320 entailment
|
| 1323 |
+
1321 entailment
|
| 1324 |
+
1322 not_entailment
|
| 1325 |
+
1323 not_entailment
|
| 1326 |
+
1324 not_entailment
|
| 1327 |
+
1325 not_entailment
|
| 1328 |
+
1326 entailment
|
| 1329 |
+
1327 entailment
|
| 1330 |
+
1328 not_entailment
|
| 1331 |
+
1329 entailment
|
| 1332 |
+
1330 entailment
|
| 1333 |
+
1331 not_entailment
|
| 1334 |
+
1332 not_entailment
|
| 1335 |
+
1333 entailment
|
| 1336 |
+
1334 entailment
|
| 1337 |
+
1335 entailment
|
| 1338 |
+
1336 not_entailment
|
| 1339 |
+
1337 entailment
|
| 1340 |
+
1338 entailment
|
| 1341 |
+
1339 entailment
|
| 1342 |
+
1340 entailment
|
| 1343 |
+
1341 entailment
|
| 1344 |
+
1342 entailment
|
| 1345 |
+
1343 entailment
|
| 1346 |
+
1344 not_entailment
|
| 1347 |
+
1345 not_entailment
|
| 1348 |
+
1346 entailment
|
| 1349 |
+
1347 entailment
|
| 1350 |
+
1348 entailment
|
| 1351 |
+
1349 entailment
|
| 1352 |
+
1350 not_entailment
|
| 1353 |
+
1351 entailment
|
| 1354 |
+
1352 entailment
|
| 1355 |
+
1353 not_entailment
|
| 1356 |
+
1354 entailment
|
| 1357 |
+
1355 entailment
|
| 1358 |
+
1356 entailment
|
| 1359 |
+
1357 entailment
|
| 1360 |
+
1358 entailment
|
| 1361 |
+
1359 entailment
|
| 1362 |
+
1360 not_entailment
|
| 1363 |
+
1361 not_entailment
|
| 1364 |
+
1362 not_entailment
|
| 1365 |
+
1363 not_entailment
|
| 1366 |
+
1364 not_entailment
|
| 1367 |
+
1365 entailment
|
| 1368 |
+
1366 entailment
|
| 1369 |
+
1367 not_entailment
|
| 1370 |
+
1368 not_entailment
|
| 1371 |
+
1369 not_entailment
|
| 1372 |
+
1370 not_entailment
|
| 1373 |
+
1371 entailment
|
| 1374 |
+
1372 not_entailment
|
| 1375 |
+
1373 entailment
|
| 1376 |
+
1374 entailment
|
| 1377 |
+
1375 entailment
|
| 1378 |
+
1376 not_entailment
|
| 1379 |
+
1377 not_entailment
|
| 1380 |
+
1378 not_entailment
|
| 1381 |
+
1379 not_entailment
|
| 1382 |
+
1380 not_entailment
|
| 1383 |
+
1381 entailment
|
| 1384 |
+
1382 entailment
|
| 1385 |
+
1383 not_entailment
|
| 1386 |
+
1384 not_entailment
|
| 1387 |
+
1385 not_entailment
|
| 1388 |
+
1386 entailment
|
| 1389 |
+
1387 entailment
|
| 1390 |
+
1388 not_entailment
|
| 1391 |
+
1389 not_entailment
|
| 1392 |
+
1390 entailment
|
| 1393 |
+
1391 not_entailment
|
| 1394 |
+
1392 not_entailment
|
| 1395 |
+
1393 entailment
|
| 1396 |
+
1394 entailment
|
| 1397 |
+
1395 not_entailment
|
| 1398 |
+
1396 not_entailment
|
| 1399 |
+
1397 entailment
|
| 1400 |
+
1398 not_entailment
|
| 1401 |
+
1399 entailment
|
| 1402 |
+
1400 entailment
|
| 1403 |
+
1401 not_entailment
|
| 1404 |
+
1402 entailment
|
| 1405 |
+
1403 entailment
|
| 1406 |
+
1404 entailment
|
| 1407 |
+
1405 entailment
|
| 1408 |
+
1406 entailment
|
| 1409 |
+
1407 entailment
|
| 1410 |
+
1408 not_entailment
|
| 1411 |
+
1409 not_entailment
|
| 1412 |
+
1410 entailment
|
| 1413 |
+
1411 entailment
|
| 1414 |
+
1412 not_entailment
|
| 1415 |
+
1413 entailment
|
| 1416 |
+
1414 entailment
|
| 1417 |
+
1415 entailment
|
| 1418 |
+
1416 entailment
|
| 1419 |
+
1417 entailment
|
| 1420 |
+
1418 entailment
|
| 1421 |
+
1419 entailment
|
| 1422 |
+
1420 not_entailment
|
| 1423 |
+
1421 entailment
|
| 1424 |
+
1422 entailment
|
| 1425 |
+
1423 entailment
|
| 1426 |
+
1424 entailment
|
| 1427 |
+
1425 entailment
|
| 1428 |
+
1426 entailment
|
| 1429 |
+
1427 entailment
|
| 1430 |
+
1428 entailment
|
| 1431 |
+
1429 not_entailment
|
| 1432 |
+
1430 entailment
|
| 1433 |
+
1431 entailment
|
| 1434 |
+
1432 entailment
|
| 1435 |
+
1433 entailment
|
| 1436 |
+
1434 entailment
|
| 1437 |
+
1435 not_entailment
|
| 1438 |
+
1436 entailment
|
| 1439 |
+
1437 not_entailment
|
| 1440 |
+
1438 entailment
|
| 1441 |
+
1439 entailment
|
| 1442 |
+
1440 entailment
|
| 1443 |
+
1441 entailment
|
| 1444 |
+
1442 entailment
|
| 1445 |
+
1443 entailment
|
| 1446 |
+
1444 entailment
|
| 1447 |
+
1445 entailment
|
| 1448 |
+
1446 not_entailment
|
| 1449 |
+
1447 entailment
|
| 1450 |
+
1448 not_entailment
|
| 1451 |
+
1449 entailment
|
| 1452 |
+
1450 entailment
|
| 1453 |
+
1451 not_entailment
|
| 1454 |
+
1452 not_entailment
|
| 1455 |
+
1453 not_entailment
|
| 1456 |
+
1454 not_entailment
|
| 1457 |
+
1455 entailment
|
| 1458 |
+
1456 entailment
|
| 1459 |
+
1457 entailment
|
| 1460 |
+
1458 entailment
|
| 1461 |
+
1459 entailment
|
| 1462 |
+
1460 not_entailment
|
| 1463 |
+
1461 entailment
|
| 1464 |
+
1462 entailment
|
| 1465 |
+
1463 entailment
|
| 1466 |
+
1464 not_entailment
|
| 1467 |
+
1465 entailment
|
| 1468 |
+
1466 entailment
|
| 1469 |
+
1467 entailment
|
| 1470 |
+
1468 entailment
|
| 1471 |
+
1469 not_entailment
|
| 1472 |
+
1470 not_entailment
|
| 1473 |
+
1471 not_entailment
|
| 1474 |
+
1472 not_entailment
|
| 1475 |
+
1473 entailment
|
| 1476 |
+
1474 entailment
|
| 1477 |
+
1475 entailment
|
| 1478 |
+
1476 entailment
|
| 1479 |
+
1477 entailment
|
| 1480 |
+
1478 entailment
|
| 1481 |
+
1479 not_entailment
|
| 1482 |
+
1480 entailment
|
| 1483 |
+
1481 not_entailment
|
| 1484 |
+
1482 entailment
|
| 1485 |
+
1483 entailment
|
| 1486 |
+
1484 entailment
|
| 1487 |
+
1485 entailment
|
| 1488 |
+
1486 entailment
|
| 1489 |
+
1487 entailment
|
| 1490 |
+
1488 entailment
|
| 1491 |
+
1489 entailment
|
| 1492 |
+
1490 entailment
|
| 1493 |
+
1491 entailment
|
| 1494 |
+
1492 entailment
|
| 1495 |
+
1493 entailment
|
| 1496 |
+
1494 entailment
|
| 1497 |
+
1495 not_entailment
|
| 1498 |
+
1496 entailment
|
| 1499 |
+
1497 not_entailment
|
| 1500 |
+
1498 entailment
|
| 1501 |
+
1499 not_entailment
|
| 1502 |
+
1500 not_entailment
|
| 1503 |
+
1501 entailment
|
| 1504 |
+
1502 not_entailment
|
| 1505 |
+
1503 not_entailment
|
| 1506 |
+
1504 entailment
|
| 1507 |
+
1505 entailment
|
| 1508 |
+
1506 not_entailment
|
| 1509 |
+
1507 entailment
|
| 1510 |
+
1508 not_entailment
|
| 1511 |
+
1509 not_entailment
|
| 1512 |
+
1510 not_entailment
|
| 1513 |
+
1511 entailment
|
| 1514 |
+
1512 entailment
|
| 1515 |
+
1513 not_entailment
|
| 1516 |
+
1514 entailment
|
| 1517 |
+
1515 entailment
|
| 1518 |
+
1516 entailment
|
| 1519 |
+
1517 entailment
|
| 1520 |
+
1518 entailment
|
| 1521 |
+
1519 entailment
|
| 1522 |
+
1520 entailment
|
| 1523 |
+
1521 entailment
|
| 1524 |
+
1522 entailment
|
| 1525 |
+
1523 entailment
|
| 1526 |
+
1524 entailment
|
| 1527 |
+
1525 entailment
|
| 1528 |
+
1526 not_entailment
|
| 1529 |
+
1527 entailment
|
| 1530 |
+
1528 not_entailment
|
| 1531 |
+
1529 entailment
|
| 1532 |
+
1530 entailment
|
| 1533 |
+
1531 entailment
|
| 1534 |
+
1532 entailment
|
| 1535 |
+
1533 entailment
|
| 1536 |
+
1534 entailment
|
| 1537 |
+
1535 entailment
|
| 1538 |
+
1536 entailment
|
| 1539 |
+
1537 not_entailment
|
| 1540 |
+
1538 entailment
|
| 1541 |
+
1539 not_entailment
|
| 1542 |
+
1540 not_entailment
|
| 1543 |
+
1541 not_entailment
|
| 1544 |
+
1542 not_entailment
|
| 1545 |
+
1543 entailment
|
| 1546 |
+
1544 entailment
|
| 1547 |
+
1545 not_entailment
|
| 1548 |
+
1546 not_entailment
|
| 1549 |
+
1547 entailment
|
| 1550 |
+
1548 entailment
|
| 1551 |
+
1549 entailment
|
| 1552 |
+
1550 entailment
|
| 1553 |
+
1551 entailment
|
| 1554 |
+
1552 entailment
|
| 1555 |
+
1553 entailment
|
| 1556 |
+
1554 not_entailment
|
| 1557 |
+
1555 entailment
|
| 1558 |
+
1556 entailment
|
| 1559 |
+
1557 entailment
|
| 1560 |
+
1558 not_entailment
|
| 1561 |
+
1559 not_entailment
|
| 1562 |
+
1560 entailment
|
| 1563 |
+
1561 not_entailment
|
| 1564 |
+
1562 entailment
|
| 1565 |
+
1563 entailment
|
| 1566 |
+
1564 not_entailment
|
| 1567 |
+
1565 entailment
|
| 1568 |
+
1566 not_entailment
|
| 1569 |
+
1567 entailment
|
| 1570 |
+
1568 not_entailment
|
| 1571 |
+
1569 entailment
|
| 1572 |
+
1570 entailment
|
| 1573 |
+
1571 not_entailment
|
| 1574 |
+
1572 entailment
|
| 1575 |
+
1573 entailment
|
| 1576 |
+
1574 not_entailment
|
| 1577 |
+
1575 not_entailment
|
| 1578 |
+
1576 entailment
|
| 1579 |
+
1577 entailment
|
| 1580 |
+
1578 entailment
|
| 1581 |
+
1579 not_entailment
|
| 1582 |
+
1580 entailment
|
| 1583 |
+
1581 not_entailment
|
| 1584 |
+
1582 not_entailment
|
| 1585 |
+
1583 not_entailment
|
| 1586 |
+
1584 not_entailment
|
| 1587 |
+
1585 entailment
|
| 1588 |
+
1586 entailment
|
| 1589 |
+
1587 entailment
|
| 1590 |
+
1588 entailment
|
| 1591 |
+
1589 not_entailment
|
| 1592 |
+
1590 entailment
|
| 1593 |
+
1591 not_entailment
|
| 1594 |
+
1592 not_entailment
|
| 1595 |
+
1593 not_entailment
|
| 1596 |
+
1594 entailment
|
| 1597 |
+
1595 entailment
|
| 1598 |
+
1596 entailment
|
| 1599 |
+
1597 entailment
|
| 1600 |
+
1598 entailment
|
| 1601 |
+
1599 entailment
|
| 1602 |
+
1600 not_entailment
|
| 1603 |
+
1601 entailment
|
| 1604 |
+
1602 entailment
|
| 1605 |
+
1603 not_entailment
|
| 1606 |
+
1604 not_entailment
|
| 1607 |
+
1605 entailment
|
| 1608 |
+
1606 entailment
|
| 1609 |
+
1607 not_entailment
|
| 1610 |
+
1608 entailment
|
| 1611 |
+
1609 entailment
|
| 1612 |
+
1610 entailment
|
| 1613 |
+
1611 not_entailment
|
| 1614 |
+
1612 entailment
|
| 1615 |
+
1613 entailment
|
| 1616 |
+
1614 not_entailment
|
| 1617 |
+
1615 not_entailment
|
| 1618 |
+
1616 not_entailment
|
| 1619 |
+
1617 entailment
|
| 1620 |
+
1618 not_entailment
|
| 1621 |
+
1619 not_entailment
|
| 1622 |
+
1620 not_entailment
|
| 1623 |
+
1621 entailment
|
| 1624 |
+
1622 not_entailment
|
| 1625 |
+
1623 entailment
|
| 1626 |
+
1624 entailment
|
| 1627 |
+
1625 not_entailment
|
| 1628 |
+
1626 not_entailment
|
| 1629 |
+
1627 not_entailment
|
| 1630 |
+
1628 not_entailment
|
| 1631 |
+
1629 entailment
|
| 1632 |
+
1630 not_entailment
|
| 1633 |
+
1631 not_entailment
|
| 1634 |
+
1632 entailment
|
| 1635 |
+
1633 entailment
|
| 1636 |
+
1634 entailment
|
| 1637 |
+
1635 entailment
|
| 1638 |
+
1636 entailment
|
| 1639 |
+
1637 not_entailment
|
| 1640 |
+
1638 not_entailment
|
| 1641 |
+
1639 not_entailment
|
| 1642 |
+
1640 entailment
|
| 1643 |
+
1641 not_entailment
|
| 1644 |
+
1642 not_entailment
|
| 1645 |
+
1643 entailment
|
| 1646 |
+
1644 not_entailment
|
| 1647 |
+
1645 not_entailment
|
| 1648 |
+
1646 not_entailment
|
| 1649 |
+
1647 not_entailment
|
| 1650 |
+
1648 not_entailment
|
| 1651 |
+
1649 not_entailment
|
| 1652 |
+
1650 entailment
|
| 1653 |
+
1651 entailment
|
| 1654 |
+
1652 not_entailment
|
| 1655 |
+
1653 not_entailment
|
| 1656 |
+
1654 entailment
|
| 1657 |
+
1655 entailment
|
| 1658 |
+
1656 entailment
|
| 1659 |
+
1657 entailment
|
| 1660 |
+
1658 entailment
|
| 1661 |
+
1659 entailment
|
| 1662 |
+
1660 not_entailment
|
| 1663 |
+
1661 entailment
|
| 1664 |
+
1662 entailment
|
| 1665 |
+
1663 not_entailment
|
| 1666 |
+
1664 entailment
|
| 1667 |
+
1665 not_entailment
|
| 1668 |
+
1666 entailment
|
| 1669 |
+
1667 not_entailment
|
| 1670 |
+
1668 entailment
|
| 1671 |
+
1669 entailment
|
| 1672 |
+
1670 entailment
|
| 1673 |
+
1671 entailment
|
| 1674 |
+
1672 not_entailment
|
| 1675 |
+
1673 not_entailment
|
| 1676 |
+
1674 entailment
|
| 1677 |
+
1675 not_entailment
|
| 1678 |
+
1676 entailment
|
| 1679 |
+
1677 entailment
|
| 1680 |
+
1678 not_entailment
|
| 1681 |
+
1679 entailment
|
| 1682 |
+
1680 entailment
|
| 1683 |
+
1681 not_entailment
|
| 1684 |
+
1682 not_entailment
|
| 1685 |
+
1683 entailment
|
| 1686 |
+
1684 not_entailment
|
| 1687 |
+
1685 not_entailment
|
| 1688 |
+
1686 entailment
|
| 1689 |
+
1687 entailment
|
| 1690 |
+
1688 not_entailment
|
| 1691 |
+
1689 not_entailment
|
| 1692 |
+
1690 entailment
|
| 1693 |
+
1691 entailment
|
| 1694 |
+
1692 not_entailment
|
| 1695 |
+
1693 not_entailment
|
| 1696 |
+
1694 not_entailment
|
| 1697 |
+
1695 entailment
|
| 1698 |
+
1696 entailment
|
| 1699 |
+
1697 not_entailment
|
| 1700 |
+
1698 entailment
|
| 1701 |
+
1699 not_entailment
|
| 1702 |
+
1700 entailment
|
| 1703 |
+
1701 entailment
|
| 1704 |
+
1702 entailment
|
| 1705 |
+
1703 entailment
|
| 1706 |
+
1704 entailment
|
| 1707 |
+
1705 not_entailment
|
| 1708 |
+
1706 not_entailment
|
| 1709 |
+
1707 entailment
|
| 1710 |
+
1708 not_entailment
|
| 1711 |
+
1709 not_entailment
|
| 1712 |
+
1710 not_entailment
|
| 1713 |
+
1711 not_entailment
|
| 1714 |
+
1712 entailment
|
| 1715 |
+
1713 entailment
|
| 1716 |
+
1714 not_entailment
|
| 1717 |
+
1715 entailment
|
| 1718 |
+
1716 entailment
|
| 1719 |
+
1717 entailment
|
| 1720 |
+
1718 not_entailment
|
| 1721 |
+
1719 not_entailment
|
| 1722 |
+
1720 not_entailment
|
| 1723 |
+
1721 entailment
|
| 1724 |
+
1722 entailment
|
| 1725 |
+
1723 not_entailment
|
| 1726 |
+
1724 entailment
|
| 1727 |
+
1725 entailment
|
| 1728 |
+
1726 entailment
|
| 1729 |
+
1727 entailment
|
| 1730 |
+
1728 entailment
|
| 1731 |
+
1729 entailment
|
| 1732 |
+
1730 not_entailment
|
| 1733 |
+
1731 entailment
|
| 1734 |
+
1732 not_entailment
|
| 1735 |
+
1733 entailment
|
| 1736 |
+
1734 entailment
|
| 1737 |
+
1735 not_entailment
|
| 1738 |
+
1736 entailment
|
| 1739 |
+
1737 entailment
|
| 1740 |
+
1738 not_entailment
|
| 1741 |
+
1739 not_entailment
|
| 1742 |
+
1740 entailment
|
| 1743 |
+
1741 entailment
|
| 1744 |
+
1742 not_entailment
|
| 1745 |
+
1743 not_entailment
|
| 1746 |
+
1744 entailment
|
| 1747 |
+
1745 not_entailment
|
| 1748 |
+
1746 entailment
|
| 1749 |
+
1747 not_entailment
|
| 1750 |
+
1748 not_entailment
|
| 1751 |
+
1749 not_entailment
|
| 1752 |
+
1750 not_entailment
|
| 1753 |
+
1751 not_entailment
|
| 1754 |
+
1752 entailment
|
| 1755 |
+
1753 entailment
|
| 1756 |
+
1754 not_entailment
|
| 1757 |
+
1755 not_entailment
|
| 1758 |
+
1756 not_entailment
|
| 1759 |
+
1757 not_entailment
|
| 1760 |
+
1758 entailment
|
| 1761 |
+
1759 entailment
|
| 1762 |
+
1760 entailment
|
| 1763 |
+
1761 not_entailment
|
| 1764 |
+
1762 entailment
|
| 1765 |
+
1763 not_entailment
|
| 1766 |
+
1764 entailment
|
| 1767 |
+
1765 entailment
|
| 1768 |
+
1766 not_entailment
|
| 1769 |
+
1767 not_entailment
|
| 1770 |
+
1768 entailment
|
| 1771 |
+
1769 entailment
|
| 1772 |
+
1770 entailment
|
| 1773 |
+
1771 not_entailment
|
| 1774 |
+
1772 not_entailment
|
| 1775 |
+
1773 entailment
|
| 1776 |
+
1774 entailment
|
| 1777 |
+
1775 entailment
|
| 1778 |
+
1776 not_entailment
|
| 1779 |
+
1777 entailment
|
| 1780 |
+
1778 entailment
|
| 1781 |
+
1779 entailment
|
| 1782 |
+
1780 entailment
|
| 1783 |
+
1781 not_entailment
|
| 1784 |
+
1782 not_entailment
|
| 1785 |
+
1783 entailment
|
| 1786 |
+
1784 not_entailment
|
| 1787 |
+
1785 entailment
|
| 1788 |
+
1786 entailment
|
| 1789 |
+
1787 not_entailment
|
| 1790 |
+
1788 entailment
|
| 1791 |
+
1789 entailment
|
| 1792 |
+
1790 entailment
|
| 1793 |
+
1791 entailment
|
| 1794 |
+
1792 entailment
|
| 1795 |
+
1793 not_entailment
|
| 1796 |
+
1794 entailment
|
| 1797 |
+
1795 entailment
|
| 1798 |
+
1796 not_entailment
|
| 1799 |
+
1797 not_entailment
|
| 1800 |
+
1798 entailment
|
| 1801 |
+
1799 not_entailment
|
| 1802 |
+
1800 entailment
|
| 1803 |
+
1801 entailment
|
| 1804 |
+
1802 entailment
|
| 1805 |
+
1803 entailment
|
| 1806 |
+
1804 not_entailment
|
| 1807 |
+
1805 entailment
|
| 1808 |
+
1806 not_entailment
|
| 1809 |
+
1807 not_entailment
|
| 1810 |
+
1808 not_entailment
|
| 1811 |
+
1809 entailment
|
| 1812 |
+
1810 entailment
|
| 1813 |
+
1811 entailment
|
| 1814 |
+
1812 not_entailment
|
| 1815 |
+
1813 entailment
|
| 1816 |
+
1814 entailment
|
| 1817 |
+
1815 entailment
|
| 1818 |
+
1816 entailment
|
| 1819 |
+
1817 entailment
|
| 1820 |
+
1818 entailment
|
| 1821 |
+
1819 entailment
|
| 1822 |
+
1820 not_entailment
|
| 1823 |
+
1821 not_entailment
|
| 1824 |
+
1822 entailment
|
| 1825 |
+
1823 not_entailment
|
| 1826 |
+
1824 entailment
|
| 1827 |
+
1825 entailment
|
| 1828 |
+
1826 entailment
|
| 1829 |
+
1827 not_entailment
|
| 1830 |
+
1828 entailment
|
| 1831 |
+
1829 entailment
|
| 1832 |
+
1830 entailment
|
| 1833 |
+
1831 not_entailment
|
| 1834 |
+
1832 not_entailment
|
| 1835 |
+
1833 not_entailment
|
| 1836 |
+
1834 entailment
|
| 1837 |
+
1835 entailment
|
| 1838 |
+
1836 not_entailment
|
| 1839 |
+
1837 entailment
|
| 1840 |
+
1838 not_entailment
|
| 1841 |
+
1839 not_entailment
|
| 1842 |
+
1840 entailment
|
| 1843 |
+
1841 entailment
|
| 1844 |
+
1842 not_entailment
|
| 1845 |
+
1843 entailment
|
| 1846 |
+
1844 not_entailment
|
| 1847 |
+
1845 not_entailment
|
| 1848 |
+
1846 not_entailment
|
| 1849 |
+
1847 not_entailment
|
| 1850 |
+
1848 not_entailment
|
| 1851 |
+
1849 entailment
|
| 1852 |
+
1850 not_entailment
|
| 1853 |
+
1851 not_entailment
|
| 1854 |
+
1852 entailment
|
| 1855 |
+
1853 not_entailment
|
| 1856 |
+
1854 entailment
|
| 1857 |
+
1855 not_entailment
|
| 1858 |
+
1856 not_entailment
|
| 1859 |
+
1857 not_entailment
|
| 1860 |
+
1858 entailment
|
| 1861 |
+
1859 not_entailment
|
| 1862 |
+
1860 entailment
|
| 1863 |
+
1861 entailment
|
| 1864 |
+
1862 not_entailment
|
| 1865 |
+
1863 entailment
|
| 1866 |
+
1864 entailment
|
| 1867 |
+
1865 not_entailment
|
| 1868 |
+
1866 entailment
|
| 1869 |
+
1867 not_entailment
|
| 1870 |
+
1868 entailment
|
| 1871 |
+
1869 entailment
|
| 1872 |
+
1870 entailment
|
| 1873 |
+
1871 entailment
|
| 1874 |
+
1872 entailment
|
| 1875 |
+
1873 not_entailment
|
| 1876 |
+
1874 entailment
|
| 1877 |
+
1875 entailment
|
| 1878 |
+
1876 not_entailment
|
| 1879 |
+
1877 not_entailment
|
| 1880 |
+
1878 not_entailment
|
| 1881 |
+
1879 not_entailment
|
| 1882 |
+
1880 not_entailment
|
| 1883 |
+
1881 entailment
|
| 1884 |
+
1882 not_entailment
|
| 1885 |
+
1883 not_entailment
|
| 1886 |
+
1884 entailment
|
| 1887 |
+
1885 entailment
|
| 1888 |
+
1886 entailment
|
| 1889 |
+
1887 entailment
|
| 1890 |
+
1888 not_entailment
|
| 1891 |
+
1889 entailment
|
| 1892 |
+
1890 not_entailment
|
| 1893 |
+
1891 not_entailment
|
| 1894 |
+
1892 entailment
|
| 1895 |
+
1893 entailment
|
| 1896 |
+
1894 entailment
|
| 1897 |
+
1895 not_entailment
|
| 1898 |
+
1896 entailment
|
| 1899 |
+
1897 entailment
|
| 1900 |
+
1898 not_entailment
|
| 1901 |
+
1899 entailment
|
| 1902 |
+
1900 not_entailment
|
| 1903 |
+
1901 entailment
|
| 1904 |
+
1902 not_entailment
|
| 1905 |
+
1903 not_entailment
|
| 1906 |
+
1904 not_entailment
|
| 1907 |
+
1905 entailment
|
| 1908 |
+
1906 entailment
|
| 1909 |
+
1907 not_entailment
|
| 1910 |
+
1908 not_entailment
|
| 1911 |
+
1909 entailment
|
| 1912 |
+
1910 entailment
|
| 1913 |
+
1911 entailment
|
| 1914 |
+
1912 not_entailment
|
| 1915 |
+
1913 not_entailment
|
| 1916 |
+
1914 entailment
|
| 1917 |
+
1915 not_entailment
|
| 1918 |
+
1916 not_entailment
|
| 1919 |
+
1917 entailment
|
| 1920 |
+
1918 not_entailment
|
| 1921 |
+
1919 entailment
|
| 1922 |
+
1920 not_entailment
|
| 1923 |
+
1921 not_entailment
|
| 1924 |
+
1922 entailment
|
| 1925 |
+
1923 entailment
|
| 1926 |
+
1924 entailment
|
| 1927 |
+
1925 entailment
|
| 1928 |
+
1926 not_entailment
|
| 1929 |
+
1927 not_entailment
|
| 1930 |
+
1928 not_entailment
|
| 1931 |
+
1929 entailment
|
| 1932 |
+
1930 entailment
|
| 1933 |
+
1931 entailment
|
| 1934 |
+
1932 entailment
|
| 1935 |
+
1933 not_entailment
|
| 1936 |
+
1934 not_entailment
|
| 1937 |
+
1935 entailment
|
| 1938 |
+
1936 not_entailment
|
| 1939 |
+
1937 not_entailment
|
| 1940 |
+
1938 not_entailment
|
| 1941 |
+
1939 entailment
|
| 1942 |
+
1940 entailment
|
| 1943 |
+
1941 not_entailment
|
| 1944 |
+
1942 not_entailment
|
| 1945 |
+
1943 entailment
|
| 1946 |
+
1944 entailment
|
| 1947 |
+
1945 not_entailment
|
| 1948 |
+
1946 entailment
|
| 1949 |
+
1947 entailment
|
| 1950 |
+
1948 entailment
|
| 1951 |
+
1949 not_entailment
|
| 1952 |
+
1950 not_entailment
|
| 1953 |
+
1951 not_entailment
|
| 1954 |
+
1952 not_entailment
|
| 1955 |
+
1953 not_entailment
|
| 1956 |
+
1954 not_entailment
|
| 1957 |
+
1955 not_entailment
|
| 1958 |
+
1956 entailment
|
| 1959 |
+
1957 entailment
|
| 1960 |
+
1958 entailment
|
| 1961 |
+
1959 entailment
|
| 1962 |
+
1960 entailment
|
| 1963 |
+
1961 not_entailment
|
| 1964 |
+
1962 entailment
|
| 1965 |
+
1963 not_entailment
|
| 1966 |
+
1964 not_entailment
|
| 1967 |
+
1965 entailment
|
| 1968 |
+
1966 entailment
|
| 1969 |
+
1967 entailment
|
| 1970 |
+
1968 not_entailment
|
| 1971 |
+
1969 not_entailment
|
| 1972 |
+
1970 entailment
|
| 1973 |
+
1971 entailment
|
| 1974 |
+
1972 not_entailment
|
| 1975 |
+
1973 entailment
|
| 1976 |
+
1974 entailment
|
| 1977 |
+
1975 not_entailment
|
| 1978 |
+
1976 not_entailment
|
| 1979 |
+
1977 not_entailment
|
| 1980 |
+
1978 entailment
|
| 1981 |
+
1979 entailment
|
| 1982 |
+
1980 entailment
|
| 1983 |
+
1981 entailment
|
| 1984 |
+
1982 not_entailment
|
| 1985 |
+
1983 entailment
|
| 1986 |
+
1984 entailment
|
| 1987 |
+
1985 entailment
|
| 1988 |
+
1986 not_entailment
|
| 1989 |
+
1987 not_entailment
|
| 1990 |
+
1988 not_entailment
|
| 1991 |
+
1989 entailment
|
| 1992 |
+
1990 entailment
|
| 1993 |
+
1991 entailment
|
| 1994 |
+
1992 not_entailment
|
| 1995 |
+
1993 not_entailment
|
| 1996 |
+
1994 not_entailment
|
| 1997 |
+
1995 entailment
|
| 1998 |
+
1996 entailment
|
| 1999 |
+
1997 not_entailment
|
| 2000 |
+
1998 entailment
|
| 2001 |
+
1999 entailment
|
| 2002 |
+
2000 not_entailment
|
| 2003 |
+
2001 not_entailment
|
| 2004 |
+
2002 entailment
|
| 2005 |
+
2003 entailment
|
| 2006 |
+
2004 not_entailment
|
| 2007 |
+
2005 entailment
|
| 2008 |
+
2006 not_entailment
|
| 2009 |
+
2007 not_entailment
|
| 2010 |
+
2008 not_entailment
|
| 2011 |
+
2009 not_entailment
|
| 2012 |
+
2010 entailment
|
| 2013 |
+
2011 entailment
|
| 2014 |
+
2012 entailment
|
| 2015 |
+
2013 entailment
|
| 2016 |
+
2014 entailment
|
| 2017 |
+
2015 entailment
|
| 2018 |
+
2016 entailment
|
| 2019 |
+
2017 entailment
|
| 2020 |
+
2018 not_entailment
|
| 2021 |
+
2019 entailment
|
| 2022 |
+
2020 not_entailment
|
| 2023 |
+
2021 not_entailment
|
| 2024 |
+
2022 entailment
|
| 2025 |
+
2023 entailment
|
| 2026 |
+
2024 entailment
|
| 2027 |
+
2025 entailment
|
| 2028 |
+
2026 entailment
|
| 2029 |
+
2027 not_entailment
|
| 2030 |
+
2028 not_entailment
|
| 2031 |
+
2029 entailment
|
| 2032 |
+
2030 not_entailment
|
| 2033 |
+
2031 not_entailment
|
| 2034 |
+
2032 entailment
|
| 2035 |
+
2033 entailment
|
| 2036 |
+
2034 not_entailment
|
| 2037 |
+
2035 not_entailment
|
| 2038 |
+
2036 entailment
|
| 2039 |
+
2037 entailment
|
| 2040 |
+
2038 entailment
|
| 2041 |
+
2039 not_entailment
|
| 2042 |
+
2040 entailment
|
| 2043 |
+
2041 entailment
|
| 2044 |
+
2042 entailment
|
| 2045 |
+
2043 not_entailment
|
| 2046 |
+
2044 entailment
|
| 2047 |
+
2045 entailment
|
| 2048 |
+
2046 entailment
|
| 2049 |
+
2047 not_entailment
|
| 2050 |
+
2048 not_entailment
|
| 2051 |
+
2049 entailment
|
| 2052 |
+
2050 entailment
|
| 2053 |
+
2051 entailment
|
| 2054 |
+
2052 not_entailment
|
| 2055 |
+
2053 not_entailment
|
| 2056 |
+
2054 not_entailment
|
| 2057 |
+
2055 entailment
|
| 2058 |
+
2056 not_entailment
|
| 2059 |
+
2057 not_entailment
|
| 2060 |
+
2058 not_entailment
|
| 2061 |
+
2059 not_entailment
|
| 2062 |
+
2060 not_entailment
|
| 2063 |
+
2061 entailment
|
| 2064 |
+
2062 entailment
|
| 2065 |
+
2063 entailment
|
| 2066 |
+
2064 entailment
|
| 2067 |
+
2065 entailment
|
| 2068 |
+
2066 not_entailment
|
| 2069 |
+
2067 entailment
|
| 2070 |
+
2068 entailment
|
| 2071 |
+
2069 entailment
|
| 2072 |
+
2070 entailment
|
| 2073 |
+
2071 not_entailment
|
| 2074 |
+
2072 entailment
|
| 2075 |
+
2073 entailment
|
| 2076 |
+
2074 not_entailment
|
| 2077 |
+
2075 entailment
|
| 2078 |
+
2076 not_entailment
|
| 2079 |
+
2077 entailment
|
| 2080 |
+
2078 entailment
|
| 2081 |
+
2079 entailment
|
| 2082 |
+
2080 entailment
|
| 2083 |
+
2081 entailment
|
| 2084 |
+
2082 not_entailment
|
| 2085 |
+
2083 entailment
|
| 2086 |
+
2084 not_entailment
|
| 2087 |
+
2085 not_entailment
|
| 2088 |
+
2086 not_entailment
|
| 2089 |
+
2087 entailment
|
| 2090 |
+
2088 entailment
|
| 2091 |
+
2089 entailment
|
| 2092 |
+
2090 not_entailment
|
| 2093 |
+
2091 entailment
|
| 2094 |
+
2092 not_entailment
|
| 2095 |
+
2093 not_entailment
|
| 2096 |
+
2094 entailment
|
| 2097 |
+
2095 not_entailment
|
| 2098 |
+
2096 not_entailment
|
| 2099 |
+
2097 entailment
|
| 2100 |
+
2098 not_entailment
|
| 2101 |
+
2099 entailment
|
| 2102 |
+
2100 not_entailment
|
| 2103 |
+
2101 not_entailment
|
| 2104 |
+
2102 entailment
|
| 2105 |
+
2103 not_entailment
|
| 2106 |
+
2104 not_entailment
|
| 2107 |
+
2105 not_entailment
|
| 2108 |
+
2106 entailment
|
| 2109 |
+
2107 entailment
|
| 2110 |
+
2108 entailment
|
| 2111 |
+
2109 not_entailment
|
| 2112 |
+
2110 entailment
|
| 2113 |
+
2111 entailment
|
| 2114 |
+
2112 entailment
|
| 2115 |
+
2113 not_entailment
|
| 2116 |
+
2114 not_entailment
|
| 2117 |
+
2115 entailment
|
| 2118 |
+
2116 not_entailment
|
| 2119 |
+
2117 entailment
|
| 2120 |
+
2118 not_entailment
|
| 2121 |
+
2119 entailment
|
| 2122 |
+
2120 entailment
|
| 2123 |
+
2121 not_entailment
|
| 2124 |
+
2122 entailment
|
| 2125 |
+
2123 entailment
|
| 2126 |
+
2124 not_entailment
|
| 2127 |
+
2125 not_entailment
|
| 2128 |
+
2126 entailment
|
| 2129 |
+
2127 entailment
|
| 2130 |
+
2128 entailment
|
| 2131 |
+
2129 not_entailment
|
| 2132 |
+
2130 entailment
|
| 2133 |
+
2131 not_entailment
|
| 2134 |
+
2132 not_entailment
|
| 2135 |
+
2133 not_entailment
|
| 2136 |
+
2134 entailment
|
| 2137 |
+
2135 entailment
|
| 2138 |
+
2136 not_entailment
|
| 2139 |
+
2137 not_entailment
|
| 2140 |
+
2138 not_entailment
|
| 2141 |
+
2139 not_entailment
|
| 2142 |
+
2140 entailment
|
| 2143 |
+
2141 not_entailment
|
| 2144 |
+
2142 entailment
|
| 2145 |
+
2143 not_entailment
|
| 2146 |
+
2144 not_entailment
|
| 2147 |
+
2145 entailment
|
| 2148 |
+
2146 entailment
|
| 2149 |
+
2147 not_entailment
|
| 2150 |
+
2148 not_entailment
|
| 2151 |
+
2149 entailment
|
| 2152 |
+
2150 entailment
|
| 2153 |
+
2151 not_entailment
|
| 2154 |
+
2152 entailment
|
| 2155 |
+
2153 entailment
|
| 2156 |
+
2154 entailment
|
| 2157 |
+
2155 entailment
|
| 2158 |
+
2156 entailment
|
| 2159 |
+
2157 entailment
|
| 2160 |
+
2158 not_entailment
|
| 2161 |
+
2159 entailment
|
| 2162 |
+
2160 not_entailment
|
| 2163 |
+
2161 entailment
|
| 2164 |
+
2162 entailment
|
| 2165 |
+
2163 not_entailment
|
| 2166 |
+
2164 not_entailment
|
| 2167 |
+
2165 not_entailment
|
| 2168 |
+
2166 not_entailment
|
| 2169 |
+
2167 entailment
|
| 2170 |
+
2168 not_entailment
|
| 2171 |
+
2169 not_entailment
|
| 2172 |
+
2170 not_entailment
|
| 2173 |
+
2171 entailment
|
| 2174 |
+
2172 entailment
|
| 2175 |
+
2173 not_entailment
|
| 2176 |
+
2174 entailment
|
| 2177 |
+
2175 not_entailment
|
| 2178 |
+
2176 not_entailment
|
| 2179 |
+
2177 entailment
|
| 2180 |
+
2178 entailment
|
| 2181 |
+
2179 entailment
|
| 2182 |
+
2180 not_entailment
|
| 2183 |
+
2181 not_entailment
|
| 2184 |
+
2182 not_entailment
|
| 2185 |
+
2183 not_entailment
|
| 2186 |
+
2184 not_entailment
|
| 2187 |
+
2185 not_entailment
|
| 2188 |
+
2186 entailment
|
| 2189 |
+
2187 entailment
|
| 2190 |
+
2188 not_entailment
|
| 2191 |
+
2189 not_entailment
|
| 2192 |
+
2190 not_entailment
|
| 2193 |
+
2191 entailment
|
| 2194 |
+
2192 entailment
|
| 2195 |
+
2193 not_entailment
|
| 2196 |
+
2194 not_entailment
|
| 2197 |
+
2195 not_entailment
|
| 2198 |
+
2196 not_entailment
|
| 2199 |
+
2197 entailment
|
| 2200 |
+
2198 not_entailment
|
| 2201 |
+
2199 entailment
|
| 2202 |
+
2200 entailment
|
| 2203 |
+
2201 entailment
|
| 2204 |
+
2202 not_entailment
|
| 2205 |
+
2203 not_entailment
|
| 2206 |
+
2204 not_entailment
|
| 2207 |
+
2205 entailment
|
| 2208 |
+
2206 not_entailment
|
| 2209 |
+
2207 not_entailment
|
| 2210 |
+
2208 not_entailment
|
| 2211 |
+
2209 not_entailment
|
| 2212 |
+
2210 not_entailment
|
| 2213 |
+
2211 entailment
|
| 2214 |
+
2212 entailment
|
| 2215 |
+
2213 entailment
|
| 2216 |
+
2214 entailment
|
| 2217 |
+
2215 entailment
|
| 2218 |
+
2216 not_entailment
|
| 2219 |
+
2217 not_entailment
|
| 2220 |
+
2218 not_entailment
|
| 2221 |
+
2219 not_entailment
|
| 2222 |
+
2220 not_entailment
|
| 2223 |
+
2221 entailment
|
| 2224 |
+
2222 not_entailment
|
| 2225 |
+
2223 not_entailment
|
| 2226 |
+
2224 not_entailment
|
| 2227 |
+
2225 not_entailment
|
| 2228 |
+
2226 not_entailment
|
| 2229 |
+
2227 not_entailment
|
| 2230 |
+
2228 not_entailment
|
| 2231 |
+
2229 not_entailment
|
| 2232 |
+
2230 not_entailment
|
| 2233 |
+
2231 entailment
|
| 2234 |
+
2232 not_entailment
|
| 2235 |
+
2233 not_entailment
|
| 2236 |
+
2234 entailment
|
| 2237 |
+
2235 not_entailment
|
| 2238 |
+
2236 entailment
|
| 2239 |
+
2237 not_entailment
|
| 2240 |
+
2238 entailment
|
| 2241 |
+
2239 not_entailment
|
| 2242 |
+
2240 not_entailment
|
| 2243 |
+
2241 not_entailment
|
| 2244 |
+
2242 entailment
|
| 2245 |
+
2243 not_entailment
|
| 2246 |
+
2244 entailment
|
| 2247 |
+
2245 entailment
|
| 2248 |
+
2246 not_entailment
|
| 2249 |
+
2247 not_entailment
|
| 2250 |
+
2248 not_entailment
|
| 2251 |
+
2249 not_entailment
|
| 2252 |
+
2250 entailment
|
| 2253 |
+
2251 entailment
|
| 2254 |
+
2252 entailment
|
| 2255 |
+
2253 entailment
|
| 2256 |
+
2254 entailment
|
| 2257 |
+
2255 entailment
|
| 2258 |
+
2256 entailment
|
| 2259 |
+
2257 not_entailment
|
| 2260 |
+
2258 not_entailment
|
| 2261 |
+
2259 entailment
|
| 2262 |
+
2260 entailment
|
| 2263 |
+
2261 entailment
|
| 2264 |
+
2262 not_entailment
|
| 2265 |
+
2263 not_entailment
|
| 2266 |
+
2264 not_entailment
|
| 2267 |
+
2265 entailment
|
| 2268 |
+
2266 not_entailment
|
| 2269 |
+
2267 entailment
|
| 2270 |
+
2268 not_entailment
|
| 2271 |
+
2269 entailment
|
| 2272 |
+
2270 not_entailment
|
| 2273 |
+
2271 entailment
|
| 2274 |
+
2272 entailment
|
| 2275 |
+
2273 entailment
|
| 2276 |
+
2274 entailment
|
| 2277 |
+
2275 entailment
|
| 2278 |
+
2276 entailment
|
| 2279 |
+
2277 not_entailment
|
| 2280 |
+
2278 not_entailment
|
| 2281 |
+
2279 entailment
|
| 2282 |
+
2280 entailment
|
| 2283 |
+
2281 not_entailment
|
| 2284 |
+
2282 entailment
|
| 2285 |
+
2283 not_entailment
|
| 2286 |
+
2284 not_entailment
|
| 2287 |
+
2285 entailment
|
| 2288 |
+
2286 not_entailment
|
| 2289 |
+
2287 not_entailment
|
| 2290 |
+
2288 entailment
|
| 2291 |
+
2289 entailment
|
| 2292 |
+
2290 entailment
|
| 2293 |
+
2291 entailment
|
| 2294 |
+
2292 entailment
|
| 2295 |
+
2293 entailment
|
| 2296 |
+
2294 entailment
|
| 2297 |
+
2295 entailment
|
| 2298 |
+
2296 entailment
|
| 2299 |
+
2297 entailment
|
| 2300 |
+
2298 entailment
|
| 2301 |
+
2299 entailment
|
| 2302 |
+
2300 entailment
|
| 2303 |
+
2301 entailment
|
| 2304 |
+
2302 not_entailment
|
| 2305 |
+
2303 entailment
|
| 2306 |
+
2304 entailment
|
| 2307 |
+
2305 entailment
|
| 2308 |
+
2306 entailment
|
| 2309 |
+
2307 not_entailment
|
| 2310 |
+
2308 not_entailment
|
| 2311 |
+
2309 not_entailment
|
| 2312 |
+
2310 entailment
|
| 2313 |
+
2311 not_entailment
|
| 2314 |
+
2312 not_entailment
|
| 2315 |
+
2313 entailment
|
| 2316 |
+
2314 not_entailment
|
| 2317 |
+
2315 entailment
|
| 2318 |
+
2316 entailment
|
| 2319 |
+
2317 entailment
|
| 2320 |
+
2318 entailment
|
| 2321 |
+
2319 entailment
|
| 2322 |
+
2320 not_entailment
|
| 2323 |
+
2321 entailment
|
| 2324 |
+
2322 entailment
|
| 2325 |
+
2323 not_entailment
|
| 2326 |
+
2324 entailment
|
| 2327 |
+
2325 entailment
|
| 2328 |
+
2326 not_entailment
|
| 2329 |
+
2327 not_entailment
|
| 2330 |
+
2328 not_entailment
|
| 2331 |
+
2329 not_entailment
|
| 2332 |
+
2330 entailment
|
| 2333 |
+
2331 entailment
|
| 2334 |
+
2332 entailment
|
| 2335 |
+
2333 entailment
|
| 2336 |
+
2334 not_entailment
|
| 2337 |
+
2335 not_entailment
|
| 2338 |
+
2336 entailment
|
| 2339 |
+
2337 entailment
|
| 2340 |
+
2338 entailment
|
| 2341 |
+
2339 not_entailment
|
| 2342 |
+
2340 not_entailment
|
| 2343 |
+
2341 entailment
|
| 2344 |
+
2342 not_entailment
|
| 2345 |
+
2343 entailment
|
| 2346 |
+
2344 not_entailment
|
| 2347 |
+
2345 entailment
|
| 2348 |
+
2346 entailment
|
| 2349 |
+
2347 entailment
|
| 2350 |
+
2348 not_entailment
|
| 2351 |
+
2349 entailment
|
| 2352 |
+
2350 entailment
|
| 2353 |
+
2351 not_entailment
|
| 2354 |
+
2352 entailment
|
| 2355 |
+
2353 not_entailment
|
| 2356 |
+
2354 entailment
|
| 2357 |
+
2355 entailment
|
| 2358 |
+
2356 entailment
|
| 2359 |
+
2357 entailment
|
| 2360 |
+
2358 entailment
|
| 2361 |
+
2359 entailment
|
| 2362 |
+
2360 entailment
|
| 2363 |
+
2361 not_entailment
|
| 2364 |
+
2362 not_entailment
|
| 2365 |
+
2363 not_entailment
|
| 2366 |
+
2364 not_entailment
|
| 2367 |
+
2365 not_entailment
|
| 2368 |
+
2366 not_entailment
|
| 2369 |
+
2367 entailment
|
| 2370 |
+
2368 entailment
|
| 2371 |
+
2369 entailment
|
| 2372 |
+
2370 entailment
|
| 2373 |
+
2371 entailment
|
| 2374 |
+
2372 entailment
|
| 2375 |
+
2373 entailment
|
| 2376 |
+
2374 entailment
|
| 2377 |
+
2375 entailment
|
| 2378 |
+
2376 entailment
|
| 2379 |
+
2377 entailment
|
| 2380 |
+
2378 not_entailment
|
| 2381 |
+
2379 not_entailment
|
| 2382 |
+
2380 not_entailment
|
| 2383 |
+
2381 not_entailment
|
| 2384 |
+
2382 entailment
|
| 2385 |
+
2383 entailment
|
| 2386 |
+
2384 entailment
|
| 2387 |
+
2385 not_entailment
|
| 2388 |
+
2386 entailment
|
| 2389 |
+
2387 entailment
|
| 2390 |
+
2388 not_entailment
|
| 2391 |
+
2389 entailment
|
| 2392 |
+
2390 entailment
|
| 2393 |
+
2391 entailment
|
| 2394 |
+
2392 entailment
|
| 2395 |
+
2393 entailment
|
| 2396 |
+
2394 entailment
|
| 2397 |
+
2395 entailment
|
| 2398 |
+
2396 not_entailment
|
| 2399 |
+
2397 entailment
|
| 2400 |
+
2398 not_entailment
|
| 2401 |
+
2399 entailment
|
| 2402 |
+
2400 not_entailment
|
| 2403 |
+
2401 not_entailment
|
| 2404 |
+
2402 entailment
|
| 2405 |
+
2403 not_entailment
|
| 2406 |
+
2404 not_entailment
|
| 2407 |
+
2405 entailment
|
| 2408 |
+
2406 entailment
|
| 2409 |
+
2407 entailment
|
| 2410 |
+
2408 entailment
|
| 2411 |
+
2409 entailment
|
| 2412 |
+
2410 not_entailment
|
| 2413 |
+
2411 not_entailment
|
| 2414 |
+
2412 not_entailment
|
| 2415 |
+
2413 entailment
|
| 2416 |
+
2414 entailment
|
| 2417 |
+
2415 entailment
|
| 2418 |
+
2416 entailment
|
| 2419 |
+
2417 not_entailment
|
| 2420 |
+
2418 not_entailment
|
| 2421 |
+
2419 entailment
|
| 2422 |
+
2420 entailment
|
| 2423 |
+
2421 entailment
|
| 2424 |
+
2422 entailment
|
| 2425 |
+
2423 not_entailment
|
| 2426 |
+
2424 not_entailment
|
| 2427 |
+
2425 entailment
|
| 2428 |
+
2426 entailment
|
| 2429 |
+
2427 entailment
|
| 2430 |
+
2428 not_entailment
|
| 2431 |
+
2429 not_entailment
|
| 2432 |
+
2430 entailment
|
| 2433 |
+
2431 not_entailment
|
| 2434 |
+
2432 entailment
|
| 2435 |
+
2433 not_entailment
|
| 2436 |
+
2434 entailment
|
| 2437 |
+
2435 entailment
|
| 2438 |
+
2436 entailment
|
| 2439 |
+
2437 entailment
|
| 2440 |
+
2438 not_entailment
|
| 2441 |
+
2439 not_entailment
|
| 2442 |
+
2440 not_entailment
|
| 2443 |
+
2441 entailment
|
| 2444 |
+
2442 not_entailment
|
| 2445 |
+
2443 not_entailment
|
| 2446 |
+
2444 not_entailment
|
| 2447 |
+
2445 entailment
|
| 2448 |
+
2446 not_entailment
|
| 2449 |
+
2447 entailment
|
| 2450 |
+
2448 not_entailment
|
| 2451 |
+
2449 entailment
|
| 2452 |
+
2450 entailment
|
| 2453 |
+
2451 entailment
|
| 2454 |
+
2452 entailment
|
| 2455 |
+
2453 entailment
|
| 2456 |
+
2454 not_entailment
|
| 2457 |
+
2455 not_entailment
|
| 2458 |
+
2456 entailment
|
| 2459 |
+
2457 entailment
|
| 2460 |
+
2458 not_entailment
|
| 2461 |
+
2459 entailment
|
| 2462 |
+
2460 entailment
|
| 2463 |
+
2461 not_entailment
|
| 2464 |
+
2462 entailment
|
| 2465 |
+
2463 not_entailment
|
| 2466 |
+
2464 entailment
|
| 2467 |
+
2465 entailment
|
| 2468 |
+
2466 not_entailment
|
| 2469 |
+
2467 entailment
|
| 2470 |
+
2468 entailment
|
| 2471 |
+
2469 entailment
|
| 2472 |
+
2470 entailment
|
| 2473 |
+
2471 not_entailment
|
| 2474 |
+
2472 not_entailment
|
| 2475 |
+
2473 not_entailment
|
| 2476 |
+
2474 not_entailment
|
| 2477 |
+
2475 not_entailment
|
| 2478 |
+
2476 entailment
|
| 2479 |
+
2477 entailment
|
| 2480 |
+
2478 entailment
|
| 2481 |
+
2479 entailment
|
| 2482 |
+
2480 entailment
|
| 2483 |
+
2481 not_entailment
|
| 2484 |
+
2482 entailment
|
| 2485 |
+
2483 not_entailment
|
| 2486 |
+
2484 not_entailment
|
| 2487 |
+
2485 entailment
|
| 2488 |
+
2486 not_entailment
|
| 2489 |
+
2487 not_entailment
|
| 2490 |
+
2488 entailment
|
| 2491 |
+
2489 not_entailment
|
| 2492 |
+
2490 entailment
|
| 2493 |
+
2491 not_entailment
|
| 2494 |
+
2492 not_entailment
|
| 2495 |
+
2493 entailment
|
| 2496 |
+
2494 not_entailment
|
| 2497 |
+
2495 entailment
|
| 2498 |
+
2496 not_entailment
|
| 2499 |
+
2497 entailment
|
| 2500 |
+
2498 not_entailment
|
| 2501 |
+
2499 not_entailment
|
| 2502 |
+
2500 entailment
|
| 2503 |
+
2501 not_entailment
|
| 2504 |
+
2502 entailment
|
| 2505 |
+
2503 entailment
|
| 2506 |
+
2504 entailment
|
| 2507 |
+
2505 entailment
|
| 2508 |
+
2506 entailment
|
| 2509 |
+
2507 entailment
|
| 2510 |
+
2508 not_entailment
|
| 2511 |
+
2509 not_entailment
|
| 2512 |
+
2510 not_entailment
|
| 2513 |
+
2511 entailment
|
| 2514 |
+
2512 entailment
|
| 2515 |
+
2513 not_entailment
|
| 2516 |
+
2514 not_entailment
|
| 2517 |
+
2515 entailment
|
| 2518 |
+
2516 not_entailment
|
| 2519 |
+
2517 not_entailment
|
| 2520 |
+
2518 not_entailment
|
| 2521 |
+
2519 entailment
|
| 2522 |
+
2520 not_entailment
|
| 2523 |
+
2521 not_entailment
|
| 2524 |
+
2522 entailment
|
| 2525 |
+
2523 entailment
|
| 2526 |
+
2524 not_entailment
|
| 2527 |
+
2525 entailment
|
| 2528 |
+
2526 entailment
|
| 2529 |
+
2527 entailment
|
| 2530 |
+
2528 not_entailment
|
| 2531 |
+
2529 entailment
|
| 2532 |
+
2530 not_entailment
|
| 2533 |
+
2531 not_entailment
|
| 2534 |
+
2532 not_entailment
|
| 2535 |
+
2533 entailment
|
| 2536 |
+
2534 not_entailment
|
| 2537 |
+
2535 entailment
|
| 2538 |
+
2536 entailment
|
| 2539 |
+
2537 not_entailment
|
| 2540 |
+
2538 not_entailment
|
| 2541 |
+
2539 entailment
|
| 2542 |
+
2540 not_entailment
|
| 2543 |
+
2541 entailment
|
| 2544 |
+
2542 entailment
|
| 2545 |
+
2543 entailment
|
| 2546 |
+
2544 entailment
|
| 2547 |
+
2545 entailment
|
| 2548 |
+
2546 not_entailment
|
| 2549 |
+
2547 entailment
|
| 2550 |
+
2548 not_entailment
|
| 2551 |
+
2549 not_entailment
|
| 2552 |
+
2550 entailment
|
| 2553 |
+
2551 entailment
|
| 2554 |
+
2552 entailment
|
| 2555 |
+
2553 entailment
|
| 2556 |
+
2554 entailment
|
| 2557 |
+
2555 entailment
|
| 2558 |
+
2556 entailment
|
| 2559 |
+
2557 entailment
|
| 2560 |
+
2558 entailment
|
| 2561 |
+
2559 entailment
|
| 2562 |
+
2560 not_entailment
|
| 2563 |
+
2561 not_entailment
|
| 2564 |
+
2562 entailment
|
| 2565 |
+
2563 entailment
|
| 2566 |
+
2564 entailment
|
| 2567 |
+
2565 entailment
|
| 2568 |
+
2566 not_entailment
|
| 2569 |
+
2567 entailment
|
| 2570 |
+
2568 entailment
|
| 2571 |
+
2569 entailment
|
| 2572 |
+
2570 entailment
|
| 2573 |
+
2571 not_entailment
|
| 2574 |
+
2572 not_entailment
|
| 2575 |
+
2573 entailment
|
| 2576 |
+
2574 not_entailment
|
| 2577 |
+
2575 entailment
|
| 2578 |
+
2576 not_entailment
|
| 2579 |
+
2577 entailment
|
| 2580 |
+
2578 not_entailment
|
| 2581 |
+
2579 not_entailment
|
| 2582 |
+
2580 entailment
|
| 2583 |
+
2581 not_entailment
|
| 2584 |
+
2582 not_entailment
|
| 2585 |
+
2583 entailment
|
| 2586 |
+
2584 entailment
|
| 2587 |
+
2585 not_entailment
|
| 2588 |
+
2586 entailment
|
| 2589 |
+
2587 entailment
|
| 2590 |
+
2588 not_entailment
|
| 2591 |
+
2589 entailment
|
| 2592 |
+
2590 entailment
|
| 2593 |
+
2591 not_entailment
|
| 2594 |
+
2592 not_entailment
|
| 2595 |
+
2593 entailment
|
| 2596 |
+
2594 not_entailment
|
| 2597 |
+
2595 not_entailment
|
| 2598 |
+
2596 not_entailment
|
| 2599 |
+
2597 not_entailment
|
| 2600 |
+
2598 entailment
|
| 2601 |
+
2599 not_entailment
|
| 2602 |
+
2600 entailment
|
| 2603 |
+
2601 entailment
|
| 2604 |
+
2602 not_entailment
|
| 2605 |
+
2603 entailment
|
| 2606 |
+
2604 entailment
|
| 2607 |
+
2605 not_entailment
|
| 2608 |
+
2606 not_entailment
|
| 2609 |
+
2607 not_entailment
|
| 2610 |
+
2608 not_entailment
|
| 2611 |
+
2609 not_entailment
|
| 2612 |
+
2610 not_entailment
|
| 2613 |
+
2611 entailment
|
| 2614 |
+
2612 not_entailment
|
| 2615 |
+
2613 entailment
|
| 2616 |
+
2614 not_entailment
|
| 2617 |
+
2615 entailment
|
| 2618 |
+
2616 entailment
|
| 2619 |
+
2617 entailment
|
| 2620 |
+
2618 entailment
|
| 2621 |
+
2619 entailment
|
| 2622 |
+
2620 not_entailment
|
| 2623 |
+
2621 entailment
|
| 2624 |
+
2622 entailment
|
| 2625 |
+
2623 not_entailment
|
| 2626 |
+
2624 entailment
|
| 2627 |
+
2625 entailment
|
| 2628 |
+
2626 entailment
|
| 2629 |
+
2627 entailment
|
| 2630 |
+
2628 not_entailment
|
| 2631 |
+
2629 not_entailment
|
| 2632 |
+
2630 entailment
|
| 2633 |
+
2631 entailment
|
| 2634 |
+
2632 entailment
|
| 2635 |
+
2633 not_entailment
|
| 2636 |
+
2634 not_entailment
|
| 2637 |
+
2635 entailment
|
| 2638 |
+
2636 not_entailment
|
| 2639 |
+
2637 not_entailment
|
| 2640 |
+
2638 not_entailment
|
| 2641 |
+
2639 not_entailment
|
| 2642 |
+
2640 not_entailment
|
| 2643 |
+
2641 entailment
|
| 2644 |
+
2642 not_entailment
|
| 2645 |
+
2643 not_entailment
|
| 2646 |
+
2644 entailment
|
| 2647 |
+
2645 entailment
|
| 2648 |
+
2646 entailment
|
| 2649 |
+
2647 not_entailment
|
| 2650 |
+
2648 entailment
|
| 2651 |
+
2649 entailment
|
| 2652 |
+
2650 not_entailment
|
| 2653 |
+
2651 not_entailment
|
| 2654 |
+
2652 not_entailment
|
| 2655 |
+
2653 not_entailment
|
| 2656 |
+
2654 entailment
|
| 2657 |
+
2655 not_entailment
|
| 2658 |
+
2656 entailment
|
| 2659 |
+
2657 not_entailment
|
| 2660 |
+
2658 not_entailment
|
| 2661 |
+
2659 entailment
|
| 2662 |
+
2660 entailment
|
| 2663 |
+
2661 not_entailment
|
| 2664 |
+
2662 entailment
|
| 2665 |
+
2663 entailment
|
| 2666 |
+
2664 not_entailment
|
| 2667 |
+
2665 not_entailment
|
| 2668 |
+
2666 not_entailment
|
| 2669 |
+
2667 not_entailment
|
| 2670 |
+
2668 entailment
|
| 2671 |
+
2669 entailment
|
| 2672 |
+
2670 not_entailment
|
| 2673 |
+
2671 entailment
|
| 2674 |
+
2672 not_entailment
|
| 2675 |
+
2673 not_entailment
|
| 2676 |
+
2674 not_entailment
|
| 2677 |
+
2675 not_entailment
|
| 2678 |
+
2676 not_entailment
|
| 2679 |
+
2677 entailment
|
| 2680 |
+
2678 not_entailment
|
| 2681 |
+
2679 not_entailment
|
| 2682 |
+
2680 not_entailment
|
| 2683 |
+
2681 not_entailment
|
| 2684 |
+
2682 not_entailment
|
| 2685 |
+
2683 not_entailment
|
| 2686 |
+
2684 not_entailment
|
| 2687 |
+
2685 not_entailment
|
| 2688 |
+
2686 not_entailment
|
| 2689 |
+
2687 entailment
|
| 2690 |
+
2688 not_entailment
|
| 2691 |
+
2689 not_entailment
|
| 2692 |
+
2690 not_entailment
|
| 2693 |
+
2691 entailment
|
| 2694 |
+
2692 entailment
|
| 2695 |
+
2693 not_entailment
|
| 2696 |
+
2694 not_entailment
|
| 2697 |
+
2695 entailment
|
| 2698 |
+
2696 entailment
|
| 2699 |
+
2697 entailment
|
| 2700 |
+
2698 entailment
|
| 2701 |
+
2699 entailment
|
| 2702 |
+
2700 entailment
|
| 2703 |
+
2701 not_entailment
|
| 2704 |
+
2702 entailment
|
| 2705 |
+
2703 not_entailment
|
| 2706 |
+
2704 entailment
|
| 2707 |
+
2705 entailment
|
| 2708 |
+
2706 not_entailment
|
| 2709 |
+
2707 not_entailment
|
| 2710 |
+
2708 entailment
|
| 2711 |
+
2709 not_entailment
|
| 2712 |
+
2710 not_entailment
|
| 2713 |
+
2711 entailment
|
| 2714 |
+
2712 entailment
|
| 2715 |
+
2713 not_entailment
|
| 2716 |
+
2714 entailment
|
| 2717 |
+
2715 entailment
|
| 2718 |
+
2716 entailment
|
| 2719 |
+
2717 entailment
|
| 2720 |
+
2718 entailment
|
| 2721 |
+
2719 not_entailment
|
| 2722 |
+
2720 not_entailment
|
| 2723 |
+
2721 entailment
|
| 2724 |
+
2722 not_entailment
|
| 2725 |
+
2723 not_entailment
|
| 2726 |
+
2724 not_entailment
|
| 2727 |
+
2725 not_entailment
|
| 2728 |
+
2726 not_entailment
|
| 2729 |
+
2727 not_entailment
|
| 2730 |
+
2728 not_entailment
|
| 2731 |
+
2729 entailment
|
| 2732 |
+
2730 entailment
|
| 2733 |
+
2731 not_entailment
|
| 2734 |
+
2732 entailment
|
| 2735 |
+
2733 not_entailment
|
| 2736 |
+
2734 not_entailment
|
| 2737 |
+
2735 not_entailment
|
| 2738 |
+
2736 entailment
|
| 2739 |
+
2737 entailment
|
| 2740 |
+
2738 entailment
|
| 2741 |
+
2739 not_entailment
|
| 2742 |
+
2740 not_entailment
|
| 2743 |
+
2741 not_entailment
|
| 2744 |
+
2742 entailment
|
| 2745 |
+
2743 not_entailment
|
| 2746 |
+
2744 entailment
|
| 2747 |
+
2745 entailment
|
| 2748 |
+
2746 entailment
|
| 2749 |
+
2747 entailment
|
| 2750 |
+
2748 entailment
|
| 2751 |
+
2749 not_entailment
|
| 2752 |
+
2750 entailment
|
| 2753 |
+
2751 entailment
|
| 2754 |
+
2752 entailment
|
| 2755 |
+
2753 entailment
|
| 2756 |
+
2754 not_entailment
|
| 2757 |
+
2755 not_entailment
|
| 2758 |
+
2756 not_entailment
|
| 2759 |
+
2757 not_entailment
|
| 2760 |
+
2758 not_entailment
|
| 2761 |
+
2759 not_entailment
|
| 2762 |
+
2760 entailment
|
| 2763 |
+
2761 not_entailment
|
| 2764 |
+
2762 entailment
|
| 2765 |
+
2763 entailment
|
| 2766 |
+
2764 not_entailment
|
| 2767 |
+
2765 entailment
|
| 2768 |
+
2766 not_entailment
|
| 2769 |
+
2767 not_entailment
|
| 2770 |
+
2768 not_entailment
|
| 2771 |
+
2769 not_entailment
|
| 2772 |
+
2770 entailment
|
| 2773 |
+
2771 not_entailment
|
| 2774 |
+
2772 entailment
|
| 2775 |
+
2773 entailment
|
| 2776 |
+
2774 entailment
|
| 2777 |
+
2775 not_entailment
|
| 2778 |
+
2776 entailment
|
| 2779 |
+
2777 entailment
|
| 2780 |
+
2778 not_entailment
|
| 2781 |
+
2779 entailment
|
| 2782 |
+
2780 entailment
|
| 2783 |
+
2781 not_entailment
|
| 2784 |
+
2782 entailment
|
| 2785 |
+
2783 entailment
|
| 2786 |
+
2784 not_entailment
|
| 2787 |
+
2785 not_entailment
|
| 2788 |
+
2786 entailment
|
| 2789 |
+
2787 entailment
|
| 2790 |
+
2788 not_entailment
|
| 2791 |
+
2789 entailment
|
| 2792 |
+
2790 not_entailment
|
| 2793 |
+
2791 not_entailment
|
| 2794 |
+
2792 not_entailment
|
| 2795 |
+
2793 entailment
|
| 2796 |
+
2794 entailment
|
| 2797 |
+
2795 entailment
|
| 2798 |
+
2796 entailment
|
| 2799 |
+
2797 entailment
|
| 2800 |
+
2798 not_entailment
|
| 2801 |
+
2799 not_entailment
|
| 2802 |
+
2800 entailment
|
| 2803 |
+
2801 not_entailment
|
| 2804 |
+
2802 not_entailment
|
| 2805 |
+
2803 entailment
|
| 2806 |
+
2804 not_entailment
|
| 2807 |
+
2805 entailment
|
| 2808 |
+
2806 not_entailment
|
| 2809 |
+
2807 entailment
|
| 2810 |
+
2808 not_entailment
|
| 2811 |
+
2809 entailment
|
| 2812 |
+
2810 entailment
|
| 2813 |
+
2811 entailment
|
| 2814 |
+
2812 entailment
|
| 2815 |
+
2813 entailment
|
| 2816 |
+
2814 entailment
|
| 2817 |
+
2815 entailment
|
| 2818 |
+
2816 not_entailment
|
| 2819 |
+
2817 not_entailment
|
| 2820 |
+
2818 entailment
|
| 2821 |
+
2819 not_entailment
|
| 2822 |
+
2820 not_entailment
|
| 2823 |
+
2821 not_entailment
|
| 2824 |
+
2822 not_entailment
|
| 2825 |
+
2823 entailment
|
| 2826 |
+
2824 entailment
|
| 2827 |
+
2825 entailment
|
| 2828 |
+
2826 entailment
|
| 2829 |
+
2827 entailment
|
| 2830 |
+
2828 entailment
|
| 2831 |
+
2829 not_entailment
|
| 2832 |
+
2830 not_entailment
|
| 2833 |
+
2831 not_entailment
|
| 2834 |
+
2832 not_entailment
|
| 2835 |
+
2833 not_entailment
|
| 2836 |
+
2834 not_entailment
|
| 2837 |
+
2835 entailment
|
| 2838 |
+
2836 not_entailment
|
| 2839 |
+
2837 not_entailment
|
| 2840 |
+
2838 not_entailment
|
| 2841 |
+
2839 not_entailment
|
| 2842 |
+
2840 not_entailment
|
| 2843 |
+
2841 entailment
|
| 2844 |
+
2842 not_entailment
|
| 2845 |
+
2843 entailment
|
| 2846 |
+
2844 not_entailment
|
| 2847 |
+
2845 entailment
|
| 2848 |
+
2846 entailment
|
| 2849 |
+
2847 not_entailment
|
| 2850 |
+
2848 not_entailment
|
| 2851 |
+
2849 not_entailment
|
| 2852 |
+
2850 entailment
|
| 2853 |
+
2851 not_entailment
|
| 2854 |
+
2852 entailment
|
| 2855 |
+
2853 entailment
|
| 2856 |
+
2854 not_entailment
|
| 2857 |
+
2855 entailment
|
| 2858 |
+
2856 not_entailment
|
| 2859 |
+
2857 entailment
|
| 2860 |
+
2858 entailment
|
| 2861 |
+
2859 entailment
|
| 2862 |
+
2860 entailment
|
| 2863 |
+
2861 not_entailment
|
| 2864 |
+
2862 entailment
|
| 2865 |
+
2863 not_entailment
|
| 2866 |
+
2864 not_entailment
|
| 2867 |
+
2865 entailment
|
| 2868 |
+
2866 entailment
|
| 2869 |
+
2867 not_entailment
|
| 2870 |
+
2868 entailment
|
| 2871 |
+
2869 entailment
|
| 2872 |
+
2870 not_entailment
|
| 2873 |
+
2871 not_entailment
|
| 2874 |
+
2872 entailment
|
| 2875 |
+
2873 entailment
|
| 2876 |
+
2874 entailment
|
| 2877 |
+
2875 not_entailment
|
| 2878 |
+
2876 entailment
|
| 2879 |
+
2877 entailment
|
| 2880 |
+
2878 not_entailment
|
| 2881 |
+
2879 entailment
|
| 2882 |
+
2880 entailment
|
| 2883 |
+
2881 not_entailment
|
| 2884 |
+
2882 not_entailment
|
| 2885 |
+
2883 not_entailment
|
| 2886 |
+
2884 entailment
|
| 2887 |
+
2885 entailment
|
| 2888 |
+
2886 not_entailment
|
| 2889 |
+
2887 entailment
|
| 2890 |
+
2888 not_entailment
|
| 2891 |
+
2889 entailment
|
| 2892 |
+
2890 entailment
|
| 2893 |
+
2891 entailment
|
| 2894 |
+
2892 not_entailment
|
| 2895 |
+
2893 not_entailment
|
| 2896 |
+
2894 not_entailment
|
| 2897 |
+
2895 entailment
|
| 2898 |
+
2896 entailment
|
| 2899 |
+
2897 not_entailment
|
| 2900 |
+
2898 not_entailment
|
| 2901 |
+
2899 entailment
|
| 2902 |
+
2900 not_entailment
|
| 2903 |
+
2901 entailment
|
| 2904 |
+
2902 entailment
|
| 2905 |
+
2903 entailment
|
| 2906 |
+
2904 not_entailment
|
| 2907 |
+
2905 not_entailment
|
| 2908 |
+
2906 not_entailment
|
| 2909 |
+
2907 not_entailment
|
| 2910 |
+
2908 entailment
|
| 2911 |
+
2909 entailment
|
| 2912 |
+
2910 entailment
|
| 2913 |
+
2911 entailment
|
| 2914 |
+
2912 not_entailment
|
| 2915 |
+
2913 not_entailment
|
| 2916 |
+
2914 entailment
|
| 2917 |
+
2915 not_entailment
|
| 2918 |
+
2916 not_entailment
|
| 2919 |
+
2917 not_entailment
|
| 2920 |
+
2918 entailment
|
| 2921 |
+
2919 entailment
|
| 2922 |
+
2920 not_entailment
|
| 2923 |
+
2921 not_entailment
|
| 2924 |
+
2922 entailment
|
| 2925 |
+
2923 entailment
|
| 2926 |
+
2924 not_entailment
|
| 2927 |
+
2925 not_entailment
|
| 2928 |
+
2926 not_entailment
|
| 2929 |
+
2927 not_entailment
|
| 2930 |
+
2928 not_entailment
|
| 2931 |
+
2929 not_entailment
|
| 2932 |
+
2930 not_entailment
|
| 2933 |
+
2931 entailment
|
| 2934 |
+
2932 not_entailment
|
| 2935 |
+
2933 entailment
|
| 2936 |
+
2934 entailment
|
| 2937 |
+
2935 entailment
|
| 2938 |
+
2936 entailment
|
| 2939 |
+
2937 not_entailment
|
| 2940 |
+
2938 not_entailment
|
| 2941 |
+
2939 entailment
|
| 2942 |
+
2940 not_entailment
|
| 2943 |
+
2941 not_entailment
|
| 2944 |
+
2942 not_entailment
|
| 2945 |
+
2943 entailment
|
| 2946 |
+
2944 entailment
|
| 2947 |
+
2945 entailment
|
| 2948 |
+
2946 not_entailment
|
| 2949 |
+
2947 entailment
|
| 2950 |
+
2948 entailment
|
| 2951 |
+
2949 entailment
|
| 2952 |
+
2950 not_entailment
|
| 2953 |
+
2951 entailment
|
| 2954 |
+
2952 not_entailment
|
| 2955 |
+
2953 not_entailment
|
| 2956 |
+
2954 not_entailment
|
| 2957 |
+
2955 entailment
|
| 2958 |
+
2956 not_entailment
|
| 2959 |
+
2957 not_entailment
|
| 2960 |
+
2958 not_entailment
|
| 2961 |
+
2959 not_entailment
|
| 2962 |
+
2960 entailment
|
| 2963 |
+
2961 not_entailment
|
| 2964 |
+
2962 entailment
|
| 2965 |
+
2963 not_entailment
|
| 2966 |
+
2964 not_entailment
|
| 2967 |
+
2965 not_entailment
|
| 2968 |
+
2966 not_entailment
|
| 2969 |
+
2967 entailment
|
| 2970 |
+
2968 not_entailment
|
| 2971 |
+
2969 entailment
|
| 2972 |
+
2970 entailment
|
| 2973 |
+
2971 entailment
|
| 2974 |
+
2972 not_entailment
|
| 2975 |
+
2973 entailment
|
| 2976 |
+
2974 entailment
|
| 2977 |
+
2975 not_entailment
|
| 2978 |
+
2976 entailment
|
| 2979 |
+
2977 entailment
|
| 2980 |
+
2978 entailment
|
| 2981 |
+
2979 not_entailment
|
| 2982 |
+
2980 entailment
|
| 2983 |
+
2981 entailment
|
| 2984 |
+
2982 not_entailment
|
| 2985 |
+
2983 entailment
|
| 2986 |
+
2984 entailment
|
| 2987 |
+
2985 entailment
|
| 2988 |
+
2986 not_entailment
|
| 2989 |
+
2987 not_entailment
|
| 2990 |
+
2988 entailment
|
| 2991 |
+
2989 entailment
|
| 2992 |
+
2990 not_entailment
|
| 2993 |
+
2991 entailment
|
| 2994 |
+
2992 entailment
|
| 2995 |
+
2993 not_entailment
|
| 2996 |
+
2994 entailment
|
| 2997 |
+
2995 entailment
|
| 2998 |
+
2996 not_entailment
|
| 2999 |
+
2997 entailment
|
| 3000 |
+
2998 not_entailment
|
| 3001 |
+
2999 not_entailment
|
nlu/glue_exp/rte/2dr0.15,mlr5e-04,clr1e-03,ep=30.0t=18d21h08m36/all_results.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"epoch": 30.0,
|
| 3 |
+
"eval_accuracy": 0.8916967509025271,
|
| 4 |
+
"eval_loss": 0.9903465509414673,
|
| 5 |
+
"eval_runtime": 0.4597,
|
| 6 |
+
"eval_samples": 277,
|
| 7 |
+
"eval_samples_per_second": 602.522,
|
| 8 |
+
"eval_steps_per_second": 2.175
|
| 9 |
+
}
|
nlu/glue_exp/rte/2dr0.15,mlr5e-04,clr1e-03,ep=30.0t=18d21h08m36/eval_results.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"epoch": 30.0,
|
| 3 |
+
"eval_accuracy": 0.8916967509025271,
|
| 4 |
+
"eval_loss": 0.9903465509414673,
|
| 5 |
+
"eval_runtime": 0.4597,
|
| 6 |
+
"eval_samples": 277,
|
| 7 |
+
"eval_samples_per_second": 602.522,
|
| 8 |
+
"eval_steps_per_second": 2.175
|
| 9 |
+
}
|
nlu/glue_exp/rte/2dr0.15,mlr5e-04,clr1e-03,ep=30.0t=18d21h08m36/ft/added_tokens.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"[MASK]": 128000
|
| 3 |
+
}
|
nlu/glue_exp/rte/2dr0.15,mlr5e-04,clr1e-03,ep=30.0t=18d21h08m36/ft/special_tokens_map.json
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token": "[CLS]",
|
| 3 |
+
"cls_token": "[CLS]",
|
| 4 |
+
"eos_token": "[SEP]",
|
| 5 |
+
"mask_token": "[MASK]",
|
| 6 |
+
"pad_token": "[PAD]",
|
| 7 |
+
"sep_token": "[SEP]",
|
| 8 |
+
"unk_token": {
|
| 9 |
+
"content": "[UNK]",
|
| 10 |
+
"lstrip": false,
|
| 11 |
+
"normalized": true,
|
| 12 |
+
"rstrip": false,
|
| 13 |
+
"single_word": false
|
| 14 |
+
}
|
| 15 |
+
}
|
nlu/glue_exp/rte/2dr0.15,mlr5e-04,clr1e-03,ep=30.0t=18d21h08m36/ft/spm.model
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c679fbf93643d19aab7ee10c0b99e460bdbc02fedf34b92b05af343b4af586fd
|
| 3 |
+
size 2464616
|
nlu/glue_exp/rte/2dr0.15,mlr5e-04,clr1e-03,ep=30.0t=18d21h08m36/ft/tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
nlu/glue_exp/rte/2dr0.15,mlr5e-04,clr1e-03,ep=30.0t=18d21h08m36/ft/tokenizer_config.json
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"added_tokens_decoder": {
|
| 3 |
+
"0": {
|
| 4 |
+
"content": "[PAD]",
|
| 5 |
+
"lstrip": false,
|
| 6 |
+
"normalized": false,
|
| 7 |
+
"rstrip": false,
|
| 8 |
+
"single_word": false,
|
| 9 |
+
"special": true
|
| 10 |
+
},
|
| 11 |
+
"1": {
|
| 12 |
+
"content": "[CLS]",
|
| 13 |
+
"lstrip": false,
|
| 14 |
+
"normalized": false,
|
| 15 |
+
"rstrip": false,
|
| 16 |
+
"single_word": false,
|
| 17 |
+
"special": true
|
| 18 |
+
},
|
| 19 |
+
"2": {
|
| 20 |
+
"content": "[SEP]",
|
| 21 |
+
"lstrip": false,
|
| 22 |
+
"normalized": false,
|
| 23 |
+
"rstrip": false,
|
| 24 |
+
"single_word": false,
|
| 25 |
+
"special": true
|
| 26 |
+
},
|
| 27 |
+
"3": {
|
| 28 |
+
"content": "[UNK]",
|
| 29 |
+
"lstrip": false,
|
| 30 |
+
"normalized": true,
|
| 31 |
+
"rstrip": false,
|
| 32 |
+
"single_word": false,
|
| 33 |
+
"special": true
|
| 34 |
+
},
|
| 35 |
+
"128000": {
|
| 36 |
+
"content": "[MASK]",
|
| 37 |
+
"lstrip": false,
|
| 38 |
+
"normalized": false,
|
| 39 |
+
"rstrip": false,
|
| 40 |
+
"single_word": false,
|
| 41 |
+
"special": true
|
| 42 |
+
}
|
| 43 |
+
},
|
| 44 |
+
"bos_token": "[CLS]",
|
| 45 |
+
"clean_up_tokenization_spaces": false,
|
| 46 |
+
"cls_token": "[CLS]",
|
| 47 |
+
"do_lower_case": false,
|
| 48 |
+
"eos_token": "[SEP]",
|
| 49 |
+
"extra_special_tokens": {},
|
| 50 |
+
"mask_token": "[MASK]",
|
| 51 |
+
"model_max_length": 512,
|
| 52 |
+
"pad_token": "[PAD]",
|
| 53 |
+
"padding_side": "right",
|
| 54 |
+
"sep_token": "[SEP]",
|
| 55 |
+
"sp_model_kwargs": {},
|
| 56 |
+
"split_by_punct": false,
|
| 57 |
+
"tokenizer_class": "DebertaV2Tokenizer",
|
| 58 |
+
"unk_token": "[UNK]",
|
| 59 |
+
"vocab_type": "spm"
|
| 60 |
+
}
|
nlu/glue_exp/rte/2dr0.15,mlr5e-04,clr1e-03,ep=30.0t=18d21h08m36/trainer_state.json
ADDED
|
@@ -0,0 +1,411 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_global_step": 1600,
|
| 3 |
+
"best_metric": 0.8916967509025271,
|
| 4 |
+
"best_model_checkpoint": "./glue_exp/rte/dr0.15,mlr5e-04,clr1e-03,ep=30.0t=18d21h08m36/checkpoint-1600",
|
| 5 |
+
"epoch": 30.0,
|
| 6 |
+
"eval_steps": 100,
|
| 7 |
+
"global_step": 2340,
|
| 8 |
+
"is_hyper_param_search": false,
|
| 9 |
+
"is_local_process_zero": true,
|
| 10 |
+
"is_world_process_zero": true,
|
| 11 |
+
"log_history": [
|
| 12 |
+
{
|
| 13 |
+
"epoch": 1.282051282051282,
|
| 14 |
+
"grad_norm": 3.0298995971679688,
|
| 15 |
+
"learning_rate": 0.00099,
|
| 16 |
+
"loss": 0.6535,
|
| 17 |
+
"step": 100
|
| 18 |
+
},
|
| 19 |
+
{
|
| 20 |
+
"epoch": 1.282051282051282,
|
| 21 |
+
"eval_accuracy": 0.7978339350180506,
|
| 22 |
+
"eval_loss": 0.4904983341693878,
|
| 23 |
+
"eval_runtime": 0.5544,
|
| 24 |
+
"eval_samples_per_second": 499.638,
|
| 25 |
+
"eval_steps_per_second": 1.804,
|
| 26 |
+
"step": 100
|
| 27 |
+
},
|
| 28 |
+
{
|
| 29 |
+
"epoch": 2.564102564102564,
|
| 30 |
+
"grad_norm": 5.644094944000244,
|
| 31 |
+
"learning_rate": 0.0009950567941864378,
|
| 32 |
+
"loss": 0.3826,
|
| 33 |
+
"step": 200
|
| 34 |
+
},
|
| 35 |
+
{
|
| 36 |
+
"epoch": 2.564102564102564,
|
| 37 |
+
"eval_accuracy": 0.8303249097472925,
|
| 38 |
+
"eval_loss": 0.40063101053237915,
|
| 39 |
+
"eval_runtime": 0.4602,
|
| 40 |
+
"eval_samples_per_second": 601.893,
|
| 41 |
+
"eval_steps_per_second": 2.173,
|
| 42 |
+
"step": 200
|
| 43 |
+
},
|
| 44 |
+
{
|
| 45 |
+
"epoch": 3.8461538461538463,
|
| 46 |
+
"grad_norm": 4.687230587005615,
|
| 47 |
+
"learning_rate": 0.000980127008204213,
|
| 48 |
+
"loss": 0.2649,
|
| 49 |
+
"step": 300
|
| 50 |
+
},
|
| 51 |
+
{
|
| 52 |
+
"epoch": 3.8461538461538463,
|
| 53 |
+
"eval_accuracy": 0.8664259927797834,
|
| 54 |
+
"eval_loss": 0.3754575550556183,
|
| 55 |
+
"eval_runtime": 0.4604,
|
| 56 |
+
"eval_samples_per_second": 601.657,
|
| 57 |
+
"eval_steps_per_second": 2.172,
|
| 58 |
+
"step": 300
|
| 59 |
+
},
|
| 60 |
+
{
|
| 61 |
+
"epoch": 5.128205128205128,
|
| 62 |
+
"grad_norm": 2.7542014122009277,
|
| 63 |
+
"learning_rate": 0.0009555113246230442,
|
| 64 |
+
"loss": 0.1596,
|
| 65 |
+
"step": 400
|
| 66 |
+
},
|
| 67 |
+
{
|
| 68 |
+
"epoch": 5.128205128205128,
|
| 69 |
+
"eval_accuracy": 0.8700361010830325,
|
| 70 |
+
"eval_loss": 0.4731525778770447,
|
| 71 |
+
"eval_runtime": 0.4614,
|
| 72 |
+
"eval_samples_per_second": 600.315,
|
| 73 |
+
"eval_steps_per_second": 2.167,
|
| 74 |
+
"step": 400
|
| 75 |
+
},
|
| 76 |
+
{
|
| 77 |
+
"epoch": 6.410256410256411,
|
| 78 |
+
"grad_norm": 1.4972096681594849,
|
| 79 |
+
"learning_rate": 0.0009217063307759098,
|
| 80 |
+
"loss": 0.1288,
|
| 81 |
+
"step": 500
|
| 82 |
+
},
|
| 83 |
+
{
|
| 84 |
+
"epoch": 6.410256410256411,
|
| 85 |
+
"eval_accuracy": 0.851985559566787,
|
| 86 |
+
"eval_loss": 0.6349920034408569,
|
| 87 |
+
"eval_runtime": 0.4651,
|
| 88 |
+
"eval_samples_per_second": 595.544,
|
| 89 |
+
"eval_steps_per_second": 2.15,
|
| 90 |
+
"step": 500
|
| 91 |
+
},
|
| 92 |
+
{
|
| 93 |
+
"epoch": 7.6923076923076925,
|
| 94 |
+
"grad_norm": 4.662834644317627,
|
| 95 |
+
"learning_rate": 0.0008793939956067378,
|
| 96 |
+
"loss": 0.0925,
|
| 97 |
+
"step": 600
|
| 98 |
+
},
|
| 99 |
+
{
|
| 100 |
+
"epoch": 7.6923076923076925,
|
| 101 |
+
"eval_accuracy": 0.8700361010830325,
|
| 102 |
+
"eval_loss": 0.5567091703414917,
|
| 103 |
+
"eval_runtime": 0.4608,
|
| 104 |
+
"eval_samples_per_second": 601.113,
|
| 105 |
+
"eval_steps_per_second": 2.17,
|
| 106 |
+
"step": 600
|
| 107 |
+
},
|
| 108 |
+
{
|
| 109 |
+
"epoch": 8.974358974358974,
|
| 110 |
+
"grad_norm": 7.242234230041504,
|
| 111 |
+
"learning_rate": 0.0008294279118912267,
|
| 112 |
+
"loss": 0.0912,
|
| 113 |
+
"step": 700
|
| 114 |
+
},
|
| 115 |
+
{
|
| 116 |
+
"epoch": 8.974358974358974,
|
| 117 |
+
"eval_accuracy": 0.8628158844765343,
|
| 118 |
+
"eval_loss": 0.779809832572937,
|
| 119 |
+
"eval_runtime": 0.4593,
|
| 120 |
+
"eval_samples_per_second": 603.125,
|
| 121 |
+
"eval_steps_per_second": 2.177,
|
| 122 |
+
"step": 700
|
| 123 |
+
},
|
| 124 |
+
{
|
| 125 |
+
"epoch": 10.256410256410255,
|
| 126 |
+
"grad_norm": 0.10321938246488571,
|
| 127 |
+
"learning_rate": 0.0007728160761844938,
|
| 128 |
+
"loss": 0.0628,
|
| 129 |
+
"step": 800
|
| 130 |
+
},
|
| 131 |
+
{
|
| 132 |
+
"epoch": 10.256410256410255,
|
| 133 |
+
"eval_accuracy": 0.8808664259927798,
|
| 134 |
+
"eval_loss": 0.6088728904724121,
|
| 135 |
+
"eval_runtime": 0.4514,
|
| 136 |
+
"eval_samples_per_second": 613.593,
|
| 137 |
+
"eval_steps_per_second": 2.215,
|
| 138 |
+
"step": 800
|
| 139 |
+
},
|
| 140 |
+
{
|
| 141 |
+
"epoch": 11.538461538461538,
|
| 142 |
+
"grad_norm": 0.0666593536734581,
|
| 143 |
+
"learning_rate": 0.0007107005538862646,
|
| 144 |
+
"loss": 0.0465,
|
| 145 |
+
"step": 900
|
| 146 |
+
},
|
| 147 |
+
{
|
| 148 |
+
"epoch": 11.538461538461538,
|
| 149 |
+
"eval_accuracy": 0.855595667870036,
|
| 150 |
+
"eval_loss": 0.9525722861289978,
|
| 151 |
+
"eval_runtime": 0.46,
|
| 152 |
+
"eval_samples_per_second": 602.236,
|
| 153 |
+
"eval_steps_per_second": 2.174,
|
| 154 |
+
"step": 900
|
| 155 |
+
},
|
| 156 |
+
{
|
| 157 |
+
"epoch": 12.820512820512821,
|
| 158 |
+
"grad_norm": 0.07844485342502594,
|
| 159 |
+
"learning_rate": 0.0006443344396527548,
|
| 160 |
+
"loss": 0.0375,
|
| 161 |
+
"step": 1000
|
| 162 |
+
},
|
| 163 |
+
{
|
| 164 |
+
"epoch": 12.820512820512821,
|
| 165 |
+
"eval_accuracy": 0.8808664259927798,
|
| 166 |
+
"eval_loss": 0.7848650217056274,
|
| 167 |
+
"eval_runtime": 0.4581,
|
| 168 |
+
"eval_samples_per_second": 604.703,
|
| 169 |
+
"eval_steps_per_second": 2.183,
|
| 170 |
+
"step": 1000
|
| 171 |
+
},
|
| 172 |
+
{
|
| 173 |
+
"epoch": 14.102564102564102,
|
| 174 |
+
"grad_norm": 1.5535038709640503,
|
| 175 |
+
"learning_rate": 0.0005750565779470368,
|
| 176 |
+
"loss": 0.0307,
|
| 177 |
+
"step": 1100
|
| 178 |
+
},
|
| 179 |
+
{
|
| 180 |
+
"epoch": 14.102564102564102,
|
| 181 |
+
"eval_accuracy": 0.8736462093862816,
|
| 182 |
+
"eval_loss": 0.8866338133811951,
|
| 183 |
+
"eval_runtime": 0.456,
|
| 184 |
+
"eval_samples_per_second": 607.481,
|
| 185 |
+
"eval_steps_per_second": 2.193,
|
| 186 |
+
"step": 1100
|
| 187 |
+
},
|
| 188 |
+
{
|
| 189 |
+
"epoch": 15.384615384615385,
|
| 190 |
+
"grad_norm": 0.0072989086620509624,
|
| 191 |
+
"learning_rate": 0.0005042645537057819,
|
| 192 |
+
"loss": 0.0208,
|
| 193 |
+
"step": 1200
|
| 194 |
+
},
|
| 195 |
+
{
|
| 196 |
+
"epoch": 15.384615384615385,
|
| 197 |
+
"eval_accuracy": 0.8880866425992779,
|
| 198 |
+
"eval_loss": 0.8754919171333313,
|
| 199 |
+
"eval_runtime": 0.4117,
|
| 200 |
+
"eval_samples_per_second": 672.814,
|
| 201 |
+
"eval_steps_per_second": 2.429,
|
| 202 |
+
"step": 1200
|
| 203 |
+
},
|
| 204 |
+
{
|
| 205 |
+
"epoch": 16.666666666666668,
|
| 206 |
+
"grad_norm": 4.213427543640137,
|
| 207 |
+
"learning_rate": 0.00043338649799828244,
|
| 208 |
+
"loss": 0.022,
|
| 209 |
+
"step": 1300
|
| 210 |
+
},
|
| 211 |
+
{
|
| 212 |
+
"epoch": 16.666666666666668,
|
| 213 |
+
"eval_accuracy": 0.8880866425992779,
|
| 214 |
+
"eval_loss": 0.8318637609481812,
|
| 215 |
+
"eval_runtime": 0.4614,
|
| 216 |
+
"eval_samples_per_second": 600.39,
|
| 217 |
+
"eval_steps_per_second": 2.167,
|
| 218 |
+
"step": 1300
|
| 219 |
+
},
|
| 220 |
+
{
|
| 221 |
+
"epoch": 17.94871794871795,
|
| 222 |
+
"grad_norm": 0.0056141032837331295,
|
| 223 |
+
"learning_rate": 0.00036385227745954163,
|
| 224 |
+
"loss": 0.0111,
|
| 225 |
+
"step": 1400
|
| 226 |
+
},
|
| 227 |
+
{
|
| 228 |
+
"epoch": 17.94871794871795,
|
| 229 |
+
"eval_accuracy": 0.8736462093862816,
|
| 230 |
+
"eval_loss": 0.9078261256217957,
|
| 231 |
+
"eval_runtime": 0.459,
|
| 232 |
+
"eval_samples_per_second": 603.53,
|
| 233 |
+
"eval_steps_per_second": 2.179,
|
| 234 |
+
"step": 1400
|
| 235 |
+
},
|
| 236 |
+
{
|
| 237 |
+
"epoch": 19.23076923076923,
|
| 238 |
+
"grad_norm": 0.0017077454831451178,
|
| 239 |
+
"learning_rate": 0.0002970646487107289,
|
| 240 |
+
"loss": 0.012,
|
| 241 |
+
"step": 1500
|
| 242 |
+
},
|
| 243 |
+
{
|
| 244 |
+
"epoch": 19.23076923076923,
|
| 245 |
+
"eval_accuracy": 0.8880866425992779,
|
| 246 |
+
"eval_loss": 0.9111207723617554,
|
| 247 |
+
"eval_runtime": 0.4541,
|
| 248 |
+
"eval_samples_per_second": 609.988,
|
| 249 |
+
"eval_steps_per_second": 2.202,
|
| 250 |
+
"step": 1500
|
| 251 |
+
},
|
| 252 |
+
{
|
| 253 |
+
"epoch": 20.51282051282051,
|
| 254 |
+
"grad_norm": 0.0007726816111244261,
|
| 255 |
+
"learning_rate": 0.00023437095968662668,
|
| 256 |
+
"loss": 0.0053,
|
| 257 |
+
"step": 1600
|
| 258 |
+
},
|
| 259 |
+
{
|
| 260 |
+
"epoch": 20.51282051282051,
|
| 261 |
+
"eval_accuracy": 0.8916967509025271,
|
| 262 |
+
"eval_loss": 0.9903465509414673,
|
| 263 |
+
"eval_runtime": 0.4604,
|
| 264 |
+
"eval_samples_per_second": 601.631,
|
| 265 |
+
"eval_steps_per_second": 2.172,
|
| 266 |
+
"step": 1600
|
| 267 |
+
},
|
| 268 |
+
{
|
| 269 |
+
"epoch": 21.794871794871796,
|
| 270 |
+
"grad_norm": 0.0005964247975498438,
|
| 271 |
+
"learning_rate": 0.00017703596875660643,
|
| 272 |
+
"loss": 0.006,
|
| 273 |
+
"step": 1700
|
| 274 |
+
},
|
| 275 |
+
{
|
| 276 |
+
"epoch": 21.794871794871796,
|
| 277 |
+
"eval_accuracy": 0.8808664259927798,
|
| 278 |
+
"eval_loss": 1.0135316848754883,
|
| 279 |
+
"eval_runtime": 0.4563,
|
| 280 |
+
"eval_samples_per_second": 607.055,
|
| 281 |
+
"eval_steps_per_second": 2.192,
|
| 282 |
+
"step": 1700
|
| 283 |
+
},
|
| 284 |
+
{
|
| 285 |
+
"epoch": 23.076923076923077,
|
| 286 |
+
"grad_norm": 0.00156286614947021,
|
| 287 |
+
"learning_rate": 0.00012621632997573078,
|
| 288 |
+
"loss": 0.0045,
|
| 289 |
+
"step": 1800
|
| 290 |
+
},
|
| 291 |
+
{
|
| 292 |
+
"epoch": 23.076923076923077,
|
| 293 |
+
"eval_accuracy": 0.8844765342960289,
|
| 294 |
+
"eval_loss": 1.0323450565338135,
|
| 295 |
+
"eval_runtime": 0.4591,
|
| 296 |
+
"eval_samples_per_second": 603.386,
|
| 297 |
+
"eval_steps_per_second": 2.178,
|
| 298 |
+
"step": 1800
|
| 299 |
+
},
|
| 300 |
+
{
|
| 301 |
+
"epoch": 24.358974358974358,
|
| 302 |
+
"grad_norm": 0.06641647219657898,
|
| 303 |
+
"learning_rate": 8.293725919071515e-05,
|
| 304 |
+
"loss": 0.0019,
|
| 305 |
+
"step": 1900
|
| 306 |
+
},
|
| 307 |
+
{
|
| 308 |
+
"epoch": 24.358974358974358,
|
| 309 |
+
"eval_accuracy": 0.8808664259927798,
|
| 310 |
+
"eval_loss": 1.0457179546356201,
|
| 311 |
+
"eval_runtime": 0.4578,
|
| 312 |
+
"eval_samples_per_second": 605.093,
|
| 313 |
+
"eval_steps_per_second": 2.184,
|
| 314 |
+
"step": 1900
|
| 315 |
+
},
|
| 316 |
+
{
|
| 317 |
+
"epoch": 25.641025641025642,
|
| 318 |
+
"grad_norm": 0.0004662807914428413,
|
| 319 |
+
"learning_rate": 4.807185172974976e-05,
|
| 320 |
+
"loss": 0.0048,
|
| 321 |
+
"step": 2000
|
| 322 |
+
},
|
| 323 |
+
{
|
| 324 |
+
"epoch": 25.641025641025642,
|
| 325 |
+
"eval_accuracy": 0.8916967509025271,
|
| 326 |
+
"eval_loss": 1.0062867403030396,
|
| 327 |
+
"eval_runtime": 0.4592,
|
| 328 |
+
"eval_samples_per_second": 603.257,
|
| 329 |
+
"eval_steps_per_second": 2.178,
|
| 330 |
+
"step": 2000
|
| 331 |
+
},
|
| 332 |
+
{
|
| 333 |
+
"epoch": 26.923076923076923,
|
| 334 |
+
"grad_norm": 0.0021972227841615677,
|
| 335 |
+
"learning_rate": 2.2323468913155842e-05,
|
| 336 |
+
"loss": 0.0012,
|
| 337 |
+
"step": 2100
|
| 338 |
+
},
|
| 339 |
+
{
|
| 340 |
+
"epoch": 26.923076923076923,
|
| 341 |
+
"eval_accuracy": 0.8880866425992779,
|
| 342 |
+
"eval_loss": 1.0196377038955688,
|
| 343 |
+
"eval_runtime": 0.4445,
|
| 344 |
+
"eval_samples_per_second": 623.159,
|
| 345 |
+
"eval_steps_per_second": 2.25,
|
| 346 |
+
"step": 2100
|
| 347 |
+
},
|
| 348 |
+
{
|
| 349 |
+
"epoch": 28.205128205128204,
|
| 350 |
+
"grad_norm": 0.0024973733816295862,
|
| 351 |
+
"learning_rate": 6.211548712633297e-06,
|
| 352 |
+
"loss": 0.0004,
|
| 353 |
+
"step": 2200
|
| 354 |
+
},
|
| 355 |
+
{
|
| 356 |
+
"epoch": 28.205128205128204,
|
| 357 |
+
"eval_accuracy": 0.8844765342960289,
|
| 358 |
+
"eval_loss": 1.0207929611206055,
|
| 359 |
+
"eval_runtime": 0.4673,
|
| 360 |
+
"eval_samples_per_second": 592.762,
|
| 361 |
+
"eval_steps_per_second": 2.14,
|
| 362 |
+
"step": 2200
|
| 363 |
+
},
|
| 364 |
+
{
|
| 365 |
+
"epoch": 29.487179487179485,
|
| 366 |
+
"grad_norm": 0.03873719647526741,
|
| 367 |
+
"learning_rate": 6.112680940389969e-08,
|
| 368 |
+
"loss": 0.0007,
|
| 369 |
+
"step": 2300
|
| 370 |
+
},
|
| 371 |
+
{
|
| 372 |
+
"epoch": 29.487179487179485,
|
| 373 |
+
"eval_accuracy": 0.8844765342960289,
|
| 374 |
+
"eval_loss": 1.0203951597213745,
|
| 375 |
+
"eval_runtime": 0.4603,
|
| 376 |
+
"eval_samples_per_second": 601.834,
|
| 377 |
+
"eval_steps_per_second": 2.173,
|
| 378 |
+
"step": 2300
|
| 379 |
+
},
|
| 380 |
+
{
|
| 381 |
+
"epoch": 30.0,
|
| 382 |
+
"step": 2340,
|
| 383 |
+
"total_flos": 1.2464312631552e+16,
|
| 384 |
+
"train_loss": 0.08723565932934403,
|
| 385 |
+
"train_runtime": 570.4535,
|
| 386 |
+
"train_samples_per_second": 130.948,
|
| 387 |
+
"train_steps_per_second": 4.102
|
| 388 |
+
}
|
| 389 |
+
],
|
| 390 |
+
"logging_steps": 100,
|
| 391 |
+
"max_steps": 2340,
|
| 392 |
+
"num_input_tokens_seen": 0,
|
| 393 |
+
"num_train_epochs": 30,
|
| 394 |
+
"save_steps": 100,
|
| 395 |
+
"stateful_callbacks": {
|
| 396 |
+
"TrainerControl": {
|
| 397 |
+
"args": {
|
| 398 |
+
"should_epoch_stop": false,
|
| 399 |
+
"should_evaluate": false,
|
| 400 |
+
"should_log": false,
|
| 401 |
+
"should_save": true,
|
| 402 |
+
"should_training_stop": true
|
| 403 |
+
},
|
| 404 |
+
"attributes": {}
|
| 405 |
+
}
|
| 406 |
+
},
|
| 407 |
+
"total_flos": 1.2464312631552e+16,
|
| 408 |
+
"train_batch_size": 32,
|
| 409 |
+
"trial_name": null,
|
| 410 |
+
"trial_params": null
|
| 411 |
+
}
|
nlu/test.sh
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
|
| 3 |
+
export OMINI_CONFIG=./config/glue.yaml
|
| 4 |
+
export TOKENIZERS_PARALLELISM=true
|
| 5 |
+
|
| 6 |
+
export CPATH=$CPATH:$CUDA_INCLUDE_PATH
|
| 7 |
+
export CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:$CUDA_INCLUDE_PATH
|
| 8 |
+
# echo "CPATH is set to: $CPATH"
|
| 9 |
+
# echo "CPLUS_INCLUDE_PATH is set to: $CPLUS_INCLUDE_PATH"
|
| 10 |
+
# export PYTHONPATH=$PYTHONPATH:$(pwd)/../nl_tasks # for rpeft
|
| 11 |
+
|
| 12 |
+
export WANDB_PROJECT="DeBERTaV3-GLUE-Test"
|
| 13 |
+
|
| 14 |
+
export OMP_NUM_THREADS=1
|
| 15 |
+
export MKL_NUM_THREADS=1
|
| 16 |
+
export OPENBLAS_NUM_THREADS=1
|
| 17 |
+
export NUMEXPR_NUM_THREADS=1
|
| 18 |
+
|
| 19 |
+
date +"%F %T"
|
| 20 |
+
|
| 21 |
+
MODEL_LRS=("1e-4")
|
| 22 |
+
CLS_LRS=("2e-3")
|
| 23 |
+
DROPOUT_RATES=("0.1")
|
| 24 |
+
TEXT=("oft" "boft" "hra" "loco")
|
| 25 |
+
|
| 26 |
+
STEPS=2000
|
| 27 |
+
EPOCHS=11
|
| 28 |
+
for m_lr in "${MODEL_LRS[@]}"; do
|
| 29 |
+
for c_lr in "${CLS_LRS[@]}"; do
|
| 30 |
+
for drop_out in "${DROPOUT_RATES[@]}"; do
|
| 31 |
+
|
| 32 |
+
echo ">>> Params: model_lr=$m_lr, cls_lr=$c_lr, dropout=$drop_out, step=$STEPS, epoch=$EPOCHS"
|
| 33 |
+
python -m src.test \
|
| 34 |
+
--config_path $OMINI_CONFIG --trainer_args.output_dir "./glue_testYY" --run_text 'oft' \
|
| 35 |
+
--rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 6 \
|
| 36 |
+
--trainer_args.gradient_accumulation_steps 1 \
|
| 37 |
+
--glue.is_debug False --rotation_adapter_config.drop_out "$drop_out" \
|
| 38 |
+
--glue.task_name qnli --trainer_args.metric_for_best_model accuracy \
|
| 39 |
+
--trainer_args.num_train_epochs $EPOCHS --trainer_args.max_steps=405 --trainer_args.warmup_steps 200 \
|
| 40 |
+
--glue.model_lr "$m_lr" --glue.cls_lr "$c_lr" \
|
| 41 |
+
--trainer_args.logging_step $STEPS --trainer_args.eval_step $STEPS --trainer_args.save_steps $STEPS \
|
| 42 |
+
--trainer_args.report_to none \
|
| 43 |
+
--glue.max_seq_length 512 \
|
| 44 |
+
--trainer_args.per_device_train_batch_size 64 --trainer_args.per_device_eval_batch_size 128 \
|
| 45 |
+
--trainer_args.eval_strategy '"no"' \
|
| 46 |
+
--trainer_args.load_best_model_at_end False \
|
| 47 |
+
--trainer_args.save_strategy '"no"'
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
done
|
| 51 |
+
done
|
| 52 |
+
done
|
| 53 |
+
|
| 54 |
+
date +"%F %T"
|
| 55 |
+
|
| 56 |
+
# for m_lr in "${MODEL_LRS[@]}"; do
|
| 57 |
+
# for c_lr in "${CLS_LRS[@]}"; do
|
| 58 |
+
# for drop_out in "${DROPOUT_RATES[@]}"; do
|
| 59 |
+
# for text in "${TEXT[@]}"; do
|
| 60 |
+
# echo ">>> Params: model_lr=$m_lr, cls_lr=$c_lr, dropout=$drop_out, step=$STEPS, epoch=$EPOCHS"
|
| 61 |
+
# python -m src.test \
|
| 62 |
+
# --config_path $OMINI_CONFIG --trainer_args.output_dir "./glue_testYY" --run_text "$text" \
|
| 63 |
+
# --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 \
|
| 64 |
+
# --trainer_args.gradient_accumulation_steps 1 \
|
| 65 |
+
# --glue.is_debug False --rotation_adapter_config.drop_out "$drop_out" \
|
| 66 |
+
# --glue.task_name qnli --trainer_args.metric_for_best_model accuracy \
|
| 67 |
+
# --trainer_args.num_train_epochs $EPOCHS --trainer_args.max_steps=405 --trainer_args.warmup_steps 200 \
|
| 68 |
+
# --glue.model_lr "$m_lr" --glue.cls_lr "$c_lr" \
|
| 69 |
+
# --trainer_args.logging_step $STEPS --trainer_args.eval_step $STEPS --trainer_args.save_steps $STEPS \
|
| 70 |
+
# --trainer_args.report_to none \
|
| 71 |
+
# --glue.max_seq_length 512 \
|
| 72 |
+
# --trainer_args.per_device_train_batch_size 64 --trainer_args.per_device_eval_batch_size 128 \
|
| 73 |
+
# --trainer_args.eval_strategy '"no"' \
|
| 74 |
+
# --trainer_args.load_best_model_at_end False \
|
| 75 |
+
# --trainer_args.save_strategy '"no"'
|
| 76 |
+
# done
|
| 77 |
+
|
| 78 |
+
# done
|
| 79 |
+
# done
|
| 80 |
+
# done
|
| 81 |
+
|
nlu/training_metrics_bs8.json
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"metadata": {
|
| 3 |
+
"run_name": "Experiment_BatchSize_8",
|
| 4 |
+
"timestamp": "2025-12-25 17:31:34",
|
| 5 |
+
"python_version": "3.11.3",
|
| 6 |
+
"pytorch_version": "2.9.0+cu128",
|
| 7 |
+
"gpu_info": {
|
| 8 |
+
"name": "NVIDIA H200",
|
| 9 |
+
"count": 1,
|
| 10 |
+
"capability": [
|
| 11 |
+
9,
|
| 12 |
+
0
|
| 13 |
+
]
|
| 14 |
+
},
|
| 15 |
+
"configuration": {
|
| 16 |
+
"batch_size_per_device": 64,
|
| 17 |
+
"learning_rate": 0.0002,
|
| 18 |
+
"max_steps": 405,
|
| 19 |
+
"num_train_epochs": 11.0,
|
| 20 |
+
"fp16": false,
|
| 21 |
+
"bf16": false,
|
| 22 |
+
"optim": "adamw_torch"
|
| 23 |
+
}
|
| 24 |
+
},
|
| 25 |
+
"metrics": []
|
| 26 |
+
}
|
omini/train_flux/train_spatial_alignment.py
ADDED
|
@@ -0,0 +1,211 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
from torch.utils.data import Dataset
|
| 3 |
+
import torchvision.transforms as T
|
| 4 |
+
import os
|
| 5 |
+
import random
|
| 6 |
+
import numpy as np
|
| 7 |
+
|
| 8 |
+
from PIL import Image, ImageDraw
|
| 9 |
+
|
| 10 |
+
from datasets import load_dataset
|
| 11 |
+
|
| 12 |
+
from .trainer import OminiModel, get_config, train
|
| 13 |
+
from ..pipeline.flux_omini import Condition, convert_to_condition, generate
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class ImageConditionDataset(Dataset):
|
| 17 |
+
def __init__(
|
| 18 |
+
self,
|
| 19 |
+
base_dataset,
|
| 20 |
+
condition_size=(512, 512),
|
| 21 |
+
target_size=(512, 512),
|
| 22 |
+
condition_type: str = "canny",
|
| 23 |
+
drop_text_prob: float = 0.1,
|
| 24 |
+
drop_image_prob: float = 0.1,
|
| 25 |
+
return_pil_image: bool = False,
|
| 26 |
+
position_scale=1.0,
|
| 27 |
+
):
|
| 28 |
+
self.base_dataset = base_dataset
|
| 29 |
+
self.condition_size = condition_size
|
| 30 |
+
self.target_size = target_size
|
| 31 |
+
self.condition_type = condition_type
|
| 32 |
+
self.drop_text_prob = drop_text_prob
|
| 33 |
+
self.drop_image_prob = drop_image_prob
|
| 34 |
+
self.return_pil_image = return_pil_image
|
| 35 |
+
self.position_scale = position_scale
|
| 36 |
+
|
| 37 |
+
self.to_tensor = T.ToTensor()
|
| 38 |
+
|
| 39 |
+
def __len__(self):
|
| 40 |
+
return len(self.base_dataset)
|
| 41 |
+
|
| 42 |
+
def __get_condition__(self, image, condition_type):
|
| 43 |
+
condition_size = self.condition_size
|
| 44 |
+
position_delta = np.array([0, 0])
|
| 45 |
+
if condition_type in ["canny", "coloring", "deblurring", "depth"]:
|
| 46 |
+
image, kwargs = image.resize(condition_size), {}
|
| 47 |
+
if condition_type == "deblurring":
|
| 48 |
+
blur_radius = random.randint(1, 10)
|
| 49 |
+
kwargs["blur_radius"] = blur_radius
|
| 50 |
+
condition_img = convert_to_condition(condition_type, image, **kwargs)
|
| 51 |
+
elif condition_type == "depth_pred":
|
| 52 |
+
depth_img = convert_to_condition("depth", image)
|
| 53 |
+
condition_img = image.resize(condition_size)
|
| 54 |
+
image = depth_img.resize(condition_size)
|
| 55 |
+
elif condition_type == "fill":
|
| 56 |
+
condition_img = image.resize(condition_size).convert("RGB")
|
| 57 |
+
w, h = image.size
|
| 58 |
+
x1, x2 = sorted([random.randint(0, w), random.randint(0, w)])
|
| 59 |
+
y1, y2 = sorted([random.randint(0, h), random.randint(0, h)])
|
| 60 |
+
mask = Image.new("L", image.size, 0)
|
| 61 |
+
draw = ImageDraw.Draw(mask)
|
| 62 |
+
draw.rectangle([x1, y1, x2, y2], fill=255)
|
| 63 |
+
if random.random() > 0.5:
|
| 64 |
+
mask = Image.eval(mask, lambda a: 255 - a)
|
| 65 |
+
condition_img = Image.composite(
|
| 66 |
+
image, Image.new("RGB", image.size, (0, 0, 0)), mask
|
| 67 |
+
)
|
| 68 |
+
elif condition_type == "sr":
|
| 69 |
+
condition_img = image.resize(condition_size)
|
| 70 |
+
position_delta = np.array([0, -condition_size[0] // 16])
|
| 71 |
+
else:
|
| 72 |
+
raise ValueError(f"Condition type {condition_type} is not implemented.")
|
| 73 |
+
return condition_img, position_delta
|
| 74 |
+
|
| 75 |
+
def __getitem__(self, idx):
|
| 76 |
+
image = self.base_dataset[idx]["jpg"]
|
| 77 |
+
image = image.resize(self.target_size).convert("RGB")
|
| 78 |
+
description = self.base_dataset[idx]["json"]["prompt"]
|
| 79 |
+
|
| 80 |
+
condition_size = self.condition_size
|
| 81 |
+
position_scale = self.position_scale
|
| 82 |
+
|
| 83 |
+
condition_img, position_delta = self.__get_condition__(
|
| 84 |
+
image, self.condition_type
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
# Randomly drop text or image (for training)
|
| 88 |
+
drop_text = random.random() < self.drop_text_prob
|
| 89 |
+
drop_image = random.random() < self.drop_image_prob
|
| 90 |
+
|
| 91 |
+
if drop_text:
|
| 92 |
+
description = ""
|
| 93 |
+
if drop_image:
|
| 94 |
+
condition_img = Image.new("RGB", condition_size, (0, 0, 0))
|
| 95 |
+
|
| 96 |
+
return {
|
| 97 |
+
"image": self.to_tensor(image),
|
| 98 |
+
"condition_0": self.to_tensor(condition_img),
|
| 99 |
+
"condition_type_0": self.condition_type,
|
| 100 |
+
"position_delta_0": position_delta,
|
| 101 |
+
"description": description,
|
| 102 |
+
**({"pil_image": [image, condition_img]} if self.return_pil_image else {}),
|
| 103 |
+
**({"position_scale_0": position_scale} if position_scale != 1.0 else {}),
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
@torch.no_grad()
|
| 108 |
+
def test_function(model, save_path, file_name):
|
| 109 |
+
condition_size = model.training_config["dataset"]["condition_size"]
|
| 110 |
+
target_size = model.training_config["dataset"]["target_size"]
|
| 111 |
+
|
| 112 |
+
position_delta = model.training_config["dataset"].get("position_delta", [0, 0])
|
| 113 |
+
position_scale = model.training_config["dataset"].get("position_scale", 1.0)
|
| 114 |
+
|
| 115 |
+
adapter = model.adapter_names[2]
|
| 116 |
+
condition_type = model.training_config["condition_type"]
|
| 117 |
+
test_list = []
|
| 118 |
+
|
| 119 |
+
if condition_type in ["canny", "coloring", "deblurring", "depth"]:
|
| 120 |
+
image = Image.open("assets/vase_hq.jpg")
|
| 121 |
+
image = image.resize(condition_size)
|
| 122 |
+
condition_img = convert_to_condition(condition_type, image, 5)
|
| 123 |
+
condition = Condition(condition_img, adapter, position_delta, position_scale)
|
| 124 |
+
test_list.append((condition, "A beautiful vase on a table."))
|
| 125 |
+
elif condition_type == "depth_pred":
|
| 126 |
+
image = Image.open("assets/vase_hq.jpg")
|
| 127 |
+
image = image.resize(condition_size)
|
| 128 |
+
condition = Condition(image, adapter, position_delta, position_scale)
|
| 129 |
+
test_list.append((condition, "A beautiful vase on a table."))
|
| 130 |
+
elif condition_type == "fill":
|
| 131 |
+
condition_img = (
|
| 132 |
+
Image.open("./assets/vase_hq.jpg").resize(condition_size).convert("RGB")
|
| 133 |
+
)
|
| 134 |
+
mask = Image.new("L", condition_img.size, 0)
|
| 135 |
+
draw = ImageDraw.Draw(mask)
|
| 136 |
+
a = condition_img.size[0] // 4
|
| 137 |
+
b = a * 3
|
| 138 |
+
draw.rectangle([a, a, b, b], fill=255)
|
| 139 |
+
condition_img = Image.composite(
|
| 140 |
+
condition_img, Image.new("RGB", condition_img.size, (0, 0, 0)), mask
|
| 141 |
+
)
|
| 142 |
+
condition = Condition(condition, adapter, position_delta, position_scale)
|
| 143 |
+
test_list.append((condition, "A beautiful vase on a table."))
|
| 144 |
+
elif condition_type == "super_resolution":
|
| 145 |
+
image = Image.open("assets/vase_hq.jpg")
|
| 146 |
+
image = image.resize(condition_size)
|
| 147 |
+
condition = Condition(image, adapter, position_delta, position_scale)
|
| 148 |
+
test_list.append((condition, "A beautiful vase on a table."))
|
| 149 |
+
else:
|
| 150 |
+
raise NotImplementedError
|
| 151 |
+
os.makedirs(save_path, exist_ok=True)
|
| 152 |
+
for i, (condition, prompt) in enumerate(test_list):
|
| 153 |
+
generator = torch.Generator(device=model.device)
|
| 154 |
+
generator.manual_seed(42)
|
| 155 |
+
|
| 156 |
+
res = generate(
|
| 157 |
+
model.flux_pipe,
|
| 158 |
+
prompt=prompt,
|
| 159 |
+
conditions=[condition],
|
| 160 |
+
height=target_size[1],
|
| 161 |
+
width=target_size[0],
|
| 162 |
+
generator=generator,
|
| 163 |
+
model_config=model.model_config,
|
| 164 |
+
kv_cache=model.model_config.get("independent_condition", False),
|
| 165 |
+
)
|
| 166 |
+
file_path = os.path.join(save_path, f"{file_name}_{condition_type}_{i}.jpg")
|
| 167 |
+
res.images[0].save(file_path)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
def main():
|
| 171 |
+
# Initialize
|
| 172 |
+
config = get_config()
|
| 173 |
+
training_config = config["train"]
|
| 174 |
+
torch.cuda.set_device(int(os.environ.get("LOCAL_RANK", 0)))
|
| 175 |
+
|
| 176 |
+
# Load dataset text-to-image-2M
|
| 177 |
+
dataset = load_dataset(
|
| 178 |
+
"webdataset",
|
| 179 |
+
data_files={"train": training_config["dataset"]["urls"]},
|
| 180 |
+
split="train",
|
| 181 |
+
cache_dir="cache/t2i2m",
|
| 182 |
+
num_proc=32,
|
| 183 |
+
)
|
| 184 |
+
|
| 185 |
+
# Initialize custom dataset
|
| 186 |
+
dataset = ImageConditionDataset(
|
| 187 |
+
dataset,
|
| 188 |
+
condition_size=training_config["dataset"]["condition_size"],
|
| 189 |
+
target_size=training_config["dataset"]["target_size"],
|
| 190 |
+
condition_type=training_config["condition_type"],
|
| 191 |
+
drop_text_prob=training_config["dataset"]["drop_text_prob"],
|
| 192 |
+
drop_image_prob=training_config["dataset"]["drop_image_prob"],
|
| 193 |
+
position_scale=training_config["dataset"].get("position_scale", 1.0),
|
| 194 |
+
)
|
| 195 |
+
|
| 196 |
+
# Initialize model
|
| 197 |
+
trainable_model = OminiModel(
|
| 198 |
+
flux_pipe_id=config["flux_path"],
|
| 199 |
+
lora_config=training_config["lora_config"],
|
| 200 |
+
device=f"cuda",
|
| 201 |
+
dtype=getattr(torch, config["dtype"]),
|
| 202 |
+
optimizer_config=training_config["optimizer"],
|
| 203 |
+
model_config=config.get("model", {}),
|
| 204 |
+
gradient_checkpointing=training_config.get("gradient_checkpointing", False),
|
| 205 |
+
)
|
| 206 |
+
|
| 207 |
+
train(dataset, trainable_model, config, test_function)
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
if __name__ == "__main__":
|
| 211 |
+
main()
|
omini/train_flux/train_spatial_alignment_rotation.py
ADDED
|
@@ -0,0 +1,211 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
from torch.utils.data import Dataset
|
| 3 |
+
import torchvision.transforms as T
|
| 4 |
+
import os
|
| 5 |
+
import random
|
| 6 |
+
import numpy as np
|
| 7 |
+
|
| 8 |
+
from PIL import Image, ImageDraw
|
| 9 |
+
|
| 10 |
+
from datasets import load_dataset
|
| 11 |
+
|
| 12 |
+
from .trainer_rotation import OminiModelRotation, get_config, train
|
| 13 |
+
from ..pipeline.flux_omini import Condition, convert_to_condition, generate
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class ImageConditionDataset(Dataset):
|
| 17 |
+
def __init__(
|
| 18 |
+
self,
|
| 19 |
+
base_dataset,
|
| 20 |
+
condition_size=(512, 512),
|
| 21 |
+
target_size=(512, 512),
|
| 22 |
+
condition_type: str = "canny",
|
| 23 |
+
drop_text_prob: float = 0.1,
|
| 24 |
+
drop_image_prob: float = 0.1,
|
| 25 |
+
return_pil_image: bool = False,
|
| 26 |
+
position_scale=1.0,
|
| 27 |
+
):
|
| 28 |
+
self.base_dataset = base_dataset
|
| 29 |
+
self.condition_size = condition_size
|
| 30 |
+
self.target_size = target_size
|
| 31 |
+
self.condition_type = condition_type
|
| 32 |
+
self.drop_text_prob = drop_text_prob
|
| 33 |
+
self.drop_image_prob = drop_image_prob
|
| 34 |
+
self.return_pil_image = return_pil_image
|
| 35 |
+
self.position_scale = position_scale
|
| 36 |
+
|
| 37 |
+
self.to_tensor = T.ToTensor()
|
| 38 |
+
|
| 39 |
+
def __len__(self):
|
| 40 |
+
return len(self.base_dataset)
|
| 41 |
+
|
| 42 |
+
def __get_condition__(self, image, condition_type):
|
| 43 |
+
condition_size = self.condition_size
|
| 44 |
+
position_delta = np.array([0, 0])
|
| 45 |
+
if condition_type in ["canny", "coloring", "deblurring", "depth"]:
|
| 46 |
+
image, kwargs = image.resize(condition_size), {}
|
| 47 |
+
if condition_type == "deblurring":
|
| 48 |
+
blur_radius = random.randint(1, 10)
|
| 49 |
+
kwargs["blur_radius"] = blur_radius
|
| 50 |
+
condition_img = convert_to_condition(condition_type, image, **kwargs)
|
| 51 |
+
elif condition_type == "depth_pred":
|
| 52 |
+
depth_img = convert_to_condition("depth", image)
|
| 53 |
+
condition_img = image.resize(condition_size)
|
| 54 |
+
image = depth_img.resize(condition_size)
|
| 55 |
+
elif condition_type == "fill":
|
| 56 |
+
condition_img = image.resize(condition_size).convert("RGB")
|
| 57 |
+
w, h = image.size
|
| 58 |
+
x1, x2 = sorted([random.randint(0, w), random.randint(0, w)])
|
| 59 |
+
y1, y2 = sorted([random.randint(0, h), random.randint(0, h)])
|
| 60 |
+
mask = Image.new("L", image.size, 0)
|
| 61 |
+
draw = ImageDraw.Draw(mask)
|
| 62 |
+
draw.rectangle([x1, y1, x2, y2], fill=255)
|
| 63 |
+
if random.random() > 0.5:
|
| 64 |
+
mask = Image.eval(mask, lambda a: 255 - a)
|
| 65 |
+
condition_img = Image.composite(
|
| 66 |
+
image, Image.new("RGB", image.size, (0, 0, 0)), mask
|
| 67 |
+
)
|
| 68 |
+
elif condition_type == "sr":
|
| 69 |
+
condition_img = image.resize(condition_size)
|
| 70 |
+
position_delta = np.array([0, -condition_size[0] // 16])
|
| 71 |
+
else:
|
| 72 |
+
raise ValueError(f"Condition type {condition_type} is not implemented.")
|
| 73 |
+
return condition_img, position_delta
|
| 74 |
+
|
| 75 |
+
def __getitem__(self, idx):
|
| 76 |
+
image = self.base_dataset[idx]["jpg"]
|
| 77 |
+
image = image.resize(self.target_size).convert("RGB")
|
| 78 |
+
description = self.base_dataset[idx]["json"]["prompt"]
|
| 79 |
+
|
| 80 |
+
condition_size = self.condition_size
|
| 81 |
+
position_scale = self.position_scale
|
| 82 |
+
|
| 83 |
+
condition_img, position_delta = self.__get_condition__(
|
| 84 |
+
image, self.condition_type
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
# Randomly drop text or image (for training)
|
| 88 |
+
drop_text = random.random() < self.drop_text_prob
|
| 89 |
+
drop_image = random.random() < self.drop_image_prob
|
| 90 |
+
|
| 91 |
+
if drop_text:
|
| 92 |
+
description = ""
|
| 93 |
+
if drop_image:
|
| 94 |
+
condition_img = Image.new("RGB", condition_size, (0, 0, 0))
|
| 95 |
+
|
| 96 |
+
return {
|
| 97 |
+
"image": self.to_tensor(image),
|
| 98 |
+
"condition_0": self.to_tensor(condition_img),
|
| 99 |
+
"condition_type_0": self.condition_type,
|
| 100 |
+
"position_delta_0": position_delta,
|
| 101 |
+
"description": description,
|
| 102 |
+
**({"pil_image": [image, condition_img]} if self.return_pil_image else {}),
|
| 103 |
+
**({"position_scale_0": position_scale} if position_scale != 1.0 else {}),
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
@torch.no_grad()
|
| 108 |
+
def test_function(model, save_path, file_name):
|
| 109 |
+
condition_size = model.training_config["dataset"]["condition_size"]
|
| 110 |
+
target_size = model.training_config["dataset"]["target_size"]
|
| 111 |
+
|
| 112 |
+
position_delta = model.training_config["dataset"].get("position_delta", [0, 0])
|
| 113 |
+
position_scale = model.training_config["dataset"].get("position_scale", 1.0)
|
| 114 |
+
|
| 115 |
+
adapter = model.adapter_names[2]
|
| 116 |
+
condition_type = model.training_config["condition_type"]
|
| 117 |
+
test_list = []
|
| 118 |
+
|
| 119 |
+
if condition_type in ["canny", "coloring", "deblurring", "depth"]:
|
| 120 |
+
image = Image.open("assets/vase_hq.jpg")
|
| 121 |
+
image = image.resize(condition_size)
|
| 122 |
+
condition_img = convert_to_condition(condition_type, image, 5)
|
| 123 |
+
condition = Condition(condition_img, adapter, position_delta, position_scale)
|
| 124 |
+
test_list.append((condition, "A beautiful vase on a table."))
|
| 125 |
+
elif condition_type == "depth_pred":
|
| 126 |
+
image = Image.open("assets/vase_hq.jpg")
|
| 127 |
+
image = image.resize(condition_size)
|
| 128 |
+
condition = Condition(image, adapter, position_delta, position_scale)
|
| 129 |
+
test_list.append((condition, "A beautiful vase on a table."))
|
| 130 |
+
elif condition_type == "fill":
|
| 131 |
+
condition_img = (
|
| 132 |
+
Image.open("./assets/vase_hq.jpg").resize(condition_size).convert("RGB")
|
| 133 |
+
)
|
| 134 |
+
mask = Image.new("L", condition_img.size, 0)
|
| 135 |
+
draw = ImageDraw.Draw(mask)
|
| 136 |
+
a = condition_img.size[0] // 4
|
| 137 |
+
b = a * 3
|
| 138 |
+
draw.rectangle([a, a, b, b], fill=255)
|
| 139 |
+
condition_img = Image.composite(
|
| 140 |
+
condition_img, Image.new("RGB", condition_img.size, (0, 0, 0)), mask
|
| 141 |
+
)
|
| 142 |
+
condition = Condition(condition, adapter, position_delta, position_scale)
|
| 143 |
+
test_list.append((condition, "A beautiful vase on a table."))
|
| 144 |
+
elif condition_type == "super_resolution":
|
| 145 |
+
image = Image.open("assets/vase_hq.jpg")
|
| 146 |
+
image = image.resize(condition_size)
|
| 147 |
+
condition = Condition(image, adapter, position_delta, position_scale)
|
| 148 |
+
test_list.append((condition, "A beautiful vase on a table."))
|
| 149 |
+
else:
|
| 150 |
+
raise NotImplementedError
|
| 151 |
+
os.makedirs(save_path, exist_ok=True)
|
| 152 |
+
for i, (condition, prompt) in enumerate(test_list):
|
| 153 |
+
generator = torch.Generator(device=model.device)
|
| 154 |
+
generator.manual_seed(42)
|
| 155 |
+
|
| 156 |
+
res = generate(
|
| 157 |
+
model.flux_pipe,
|
| 158 |
+
prompt=prompt,
|
| 159 |
+
conditions=[condition],
|
| 160 |
+
height=target_size[1],
|
| 161 |
+
width=target_size[0],
|
| 162 |
+
generator=generator,
|
| 163 |
+
model_config=model.model_config,
|
| 164 |
+
kv_cache=model.model_config.get("independent_condition", False),
|
| 165 |
+
)
|
| 166 |
+
file_path = os.path.join(save_path, f"{file_name}_{condition_type}_{i}.jpg")
|
| 167 |
+
res.images[0].save(file_path)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
def main():
|
| 171 |
+
# Initialize
|
| 172 |
+
config = get_config()
|
| 173 |
+
training_config = config["train"]
|
| 174 |
+
torch.cuda.set_device(int(os.environ.get("LOCAL_RANK", 0)))
|
| 175 |
+
|
| 176 |
+
# Load dataset text-to-image-2M
|
| 177 |
+
dataset = load_dataset(
|
| 178 |
+
"webdataset",
|
| 179 |
+
data_files={"train": training_config["dataset"]["urls"]},
|
| 180 |
+
split="train",
|
| 181 |
+
cache_dir="cache/t2i2m",
|
| 182 |
+
num_proc=32,
|
| 183 |
+
)
|
| 184 |
+
|
| 185 |
+
# Initialize custom dataset
|
| 186 |
+
dataset = ImageConditionDataset(
|
| 187 |
+
dataset,
|
| 188 |
+
condition_size=training_config["dataset"]["condition_size"],
|
| 189 |
+
target_size=training_config["dataset"]["target_size"],
|
| 190 |
+
condition_type=training_config["condition_type"],
|
| 191 |
+
drop_text_prob=training_config["dataset"]["drop_text_prob"],
|
| 192 |
+
drop_image_prob=training_config["dataset"]["drop_image_prob"],
|
| 193 |
+
position_scale=training_config["dataset"].get("position_scale", 1.0),
|
| 194 |
+
)
|
| 195 |
+
|
| 196 |
+
# Initialize model
|
| 197 |
+
trainable_model = OminiModelRotation(
|
| 198 |
+
flux_pipe_id=config["flux_path"],
|
| 199 |
+
rotation_adapter_config=training_config["rotation_adapter_config"],
|
| 200 |
+
device=f"cuda",
|
| 201 |
+
dtype=getattr(torch, config["dtype"]),
|
| 202 |
+
optimizer_config=training_config["optimizer"],
|
| 203 |
+
model_config=config.get("model", {}),
|
| 204 |
+
gradient_checkpointing=training_config.get("gradient_checkpointing", False),
|
| 205 |
+
)
|
| 206 |
+
|
| 207 |
+
train(dataset, trainable_model, config, test_function)
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
if __name__ == "__main__":
|
| 211 |
+
main()
|
omini/train_flux/train_subject.py
ADDED
|
@@ -0,0 +1,205 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
from torch.utils.data import Dataset
|
| 3 |
+
import torchvision.transforms as T
|
| 4 |
+
import os
|
| 5 |
+
import random
|
| 6 |
+
import numpy as np
|
| 7 |
+
|
| 8 |
+
from PIL import Image
|
| 9 |
+
|
| 10 |
+
from datasets import load_dataset
|
| 11 |
+
|
| 12 |
+
from .trainer import OminiModel, get_config, train
|
| 13 |
+
from ..pipeline.flux_omini import Condition, generate
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class Subject200KDataset(Dataset):
|
| 17 |
+
def __init__(
|
| 18 |
+
self,
|
| 19 |
+
base_dataset,
|
| 20 |
+
condition_size=(512, 512),
|
| 21 |
+
target_size=(512, 512),
|
| 22 |
+
image_size: int = 512,
|
| 23 |
+
padding: int = 0,
|
| 24 |
+
condition_type: str = "subject",
|
| 25 |
+
drop_text_prob: float = 0.1,
|
| 26 |
+
drop_image_prob: float = 0.1,
|
| 27 |
+
return_pil_image: bool = False,
|
| 28 |
+
):
|
| 29 |
+
self.base_dataset = base_dataset
|
| 30 |
+
self.condition_size = condition_size
|
| 31 |
+
self.target_size = target_size
|
| 32 |
+
self.image_size = image_size
|
| 33 |
+
self.padding = padding
|
| 34 |
+
self.condition_type = condition_type
|
| 35 |
+
self.drop_text_prob = drop_text_prob
|
| 36 |
+
self.drop_image_prob = drop_image_prob
|
| 37 |
+
self.return_pil_image = return_pil_image
|
| 38 |
+
|
| 39 |
+
self.to_tensor = T.ToTensor()
|
| 40 |
+
|
| 41 |
+
def __len__(self):
|
| 42 |
+
return len(self.base_dataset) * 2
|
| 43 |
+
|
| 44 |
+
def __getitem__(self, idx):
|
| 45 |
+
# If target is 0, left image is target, right image is condition
|
| 46 |
+
target = idx % 2
|
| 47 |
+
item = self.base_dataset[idx // 2]
|
| 48 |
+
|
| 49 |
+
# Crop the image to target and condition
|
| 50 |
+
image = item["image"]
|
| 51 |
+
left_img = image.crop(
|
| 52 |
+
(
|
| 53 |
+
self.padding,
|
| 54 |
+
self.padding,
|
| 55 |
+
self.image_size + self.padding,
|
| 56 |
+
self.image_size + self.padding,
|
| 57 |
+
)
|
| 58 |
+
)
|
| 59 |
+
right_img = image.crop(
|
| 60 |
+
(
|
| 61 |
+
self.image_size + self.padding * 2,
|
| 62 |
+
self.padding,
|
| 63 |
+
self.image_size * 2 + self.padding * 2,
|
| 64 |
+
self.image_size + self.padding,
|
| 65 |
+
)
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
# Get the target and condition image
|
| 69 |
+
target_image, condition_img = (
|
| 70 |
+
(left_img, right_img) if target == 0 else (right_img, left_img)
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
# Resize the image
|
| 74 |
+
condition_img = condition_img.resize(self.condition_size).convert("RGB")
|
| 75 |
+
target_image = target_image.resize(self.target_size).convert("RGB")
|
| 76 |
+
|
| 77 |
+
# Get the description
|
| 78 |
+
description = item["description"][
|
| 79 |
+
"description_0" if target == 0 else "description_1"
|
| 80 |
+
]
|
| 81 |
+
|
| 82 |
+
# Randomly drop text or image
|
| 83 |
+
drop_text = random.random() < self.drop_text_prob
|
| 84 |
+
drop_image = random.random() < self.drop_image_prob
|
| 85 |
+
if drop_text:
|
| 86 |
+
description = ""
|
| 87 |
+
if drop_image:
|
| 88 |
+
condition_img = Image.new("RGB", self.condition_size, (0, 0, 0))
|
| 89 |
+
|
| 90 |
+
# 16 is the downscale factor of the image.
|
| 91 |
+
# More details about position delta can be found in the documentation.
|
| 92 |
+
position_delta = np.array([0, -self.condition_size[0] // 16])
|
| 93 |
+
|
| 94 |
+
return {
|
| 95 |
+
"image": self.to_tensor(target_image),
|
| 96 |
+
"condition_0": self.to_tensor(condition_img),
|
| 97 |
+
"condition_type_0": self.condition_type,
|
| 98 |
+
"position_delta_0": position_delta,
|
| 99 |
+
"description": description,
|
| 100 |
+
**({"pil_image": image} if self.return_pil_image else {}),
|
| 101 |
+
}
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
@torch.no_grad()
|
| 105 |
+
def test_function(model, save_path, file_name):
|
| 106 |
+
condition_size = model.training_config["dataset"]["condition_size"]
|
| 107 |
+
target_size = model.training_config["dataset"]["target_size"]
|
| 108 |
+
|
| 109 |
+
# More details about position delta can be found in the documentation.
|
| 110 |
+
position_delta = [0, -condition_size[0] // 16]
|
| 111 |
+
|
| 112 |
+
# Set adapters
|
| 113 |
+
adapter = model.adapter_names[2]
|
| 114 |
+
condition_type = model.training_config["condition_type"]
|
| 115 |
+
test_list = []
|
| 116 |
+
|
| 117 |
+
# Test case1 (in-distribution test case)
|
| 118 |
+
image = Image.open("assets/test_in.jpg")
|
| 119 |
+
image = image.resize(condition_size)
|
| 120 |
+
prompt = "Resting on the picnic table at a lakeside campsite, it's caught in the golden glow of early morning, with mist rising from the water and tall pines casting long shadows behind the scene."
|
| 121 |
+
condition = Condition(image, adapter, position_delta)
|
| 122 |
+
test_list.append((condition, prompt))
|
| 123 |
+
|
| 124 |
+
# Test case2 (out-of-distribution test case)
|
| 125 |
+
image = Image.open("assets/test_out.jpg")
|
| 126 |
+
image = image.resize(condition_size)
|
| 127 |
+
prompt = "In a bright room. It is placed on a table."
|
| 128 |
+
condition = Condition(image, adapter, position_delta)
|
| 129 |
+
test_list.append((condition, prompt))
|
| 130 |
+
|
| 131 |
+
# Generate images
|
| 132 |
+
os.makedirs(save_path, exist_ok=True)
|
| 133 |
+
for i, (condition, prompt) in enumerate(test_list):
|
| 134 |
+
generator = torch.Generator(device=model.device)
|
| 135 |
+
generator.manual_seed(42)
|
| 136 |
+
|
| 137 |
+
res = generate(
|
| 138 |
+
model.flux_pipe,
|
| 139 |
+
prompt=prompt,
|
| 140 |
+
conditions=[condition],
|
| 141 |
+
height=target_size[1],
|
| 142 |
+
width=target_size[0],
|
| 143 |
+
generator=generator,
|
| 144 |
+
model_config=model.model_config,
|
| 145 |
+
kv_cache=model.model_config.get("independent_condition", False),
|
| 146 |
+
)
|
| 147 |
+
file_path = os.path.join(save_path, f"{file_name}_{condition_type}_{i}.jpg")
|
| 148 |
+
res.images[0].save(file_path)
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
def main():
|
| 152 |
+
# Initialize
|
| 153 |
+
config = get_config()
|
| 154 |
+
training_config = config["train"]
|
| 155 |
+
torch.cuda.set_device(int(os.environ.get("LOCAL_RANK", 0)))
|
| 156 |
+
|
| 157 |
+
# Initialize raw dataset
|
| 158 |
+
raw_dataset = load_dataset("Yuanshi/Subjects200K")
|
| 159 |
+
|
| 160 |
+
# Define filter function to filter out low-quality images from Subjects200K
|
| 161 |
+
def filter_func(item):
|
| 162 |
+
if not item.get("quality_assessment"):
|
| 163 |
+
return False
|
| 164 |
+
return all(
|
| 165 |
+
item["quality_assessment"].get(key, 0) >= 5
|
| 166 |
+
for key in ["compositeStructure", "objectConsistency", "imageQuality"]
|
| 167 |
+
)
|
| 168 |
+
|
| 169 |
+
# Filter dataset
|
| 170 |
+
if not os.path.exists("./cache/dataset"):
|
| 171 |
+
os.makedirs("./cache/dataset")
|
| 172 |
+
data_valid = raw_dataset["train"].filter(
|
| 173 |
+
filter_func,
|
| 174 |
+
num_proc=16,
|
| 175 |
+
cache_file_name="./cache/dataset/data_valid.arrow",
|
| 176 |
+
)
|
| 177 |
+
|
| 178 |
+
# Initialize the dataset
|
| 179 |
+
dataset = Subject200KDataset(
|
| 180 |
+
data_valid,
|
| 181 |
+
condition_size=training_config["dataset"]["condition_size"],
|
| 182 |
+
target_size=training_config["dataset"]["target_size"],
|
| 183 |
+
image_size=training_config["dataset"]["image_size"],
|
| 184 |
+
padding=training_config["dataset"]["padding"],
|
| 185 |
+
condition_type=training_config["condition_type"],
|
| 186 |
+
drop_text_prob=training_config["dataset"]["drop_text_prob"],
|
| 187 |
+
drop_image_prob=training_config["dataset"]["drop_image_prob"],
|
| 188 |
+
)
|
| 189 |
+
|
| 190 |
+
# Initialize model
|
| 191 |
+
trainable_model = OminiModel(
|
| 192 |
+
flux_pipe_id=config["flux_path"],
|
| 193 |
+
lora_config=training_config["lora_config"],
|
| 194 |
+
device=f"cuda",
|
| 195 |
+
dtype=getattr(torch, config["dtype"]),
|
| 196 |
+
optimizer_config=training_config["optimizer"],
|
| 197 |
+
model_config=config.get("model", {}),
|
| 198 |
+
gradient_checkpointing=training_config.get("gradient_checkpointing", False),
|
| 199 |
+
)
|
| 200 |
+
|
| 201 |
+
train(dataset, trainable_model, config, test_function)
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
if __name__ == "__main__":
|
| 205 |
+
main()
|
omini/train_flux/train_subject_rotation.py
ADDED
|
@@ -0,0 +1,205 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
from torch.utils.data import Dataset
|
| 3 |
+
import torchvision.transforms as T
|
| 4 |
+
import os
|
| 5 |
+
import random
|
| 6 |
+
import numpy as np
|
| 7 |
+
|
| 8 |
+
from PIL import Image
|
| 9 |
+
|
| 10 |
+
from datasets import load_dataset
|
| 11 |
+
|
| 12 |
+
from .trainer_rotation import OminiModelRotation, get_config, train
|
| 13 |
+
from ..pipeline.flux_omini import Condition, generate
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class Subject200KDataset(Dataset):
|
| 17 |
+
def __init__(
|
| 18 |
+
self,
|
| 19 |
+
base_dataset,
|
| 20 |
+
condition_size=(512, 512),
|
| 21 |
+
target_size=(512, 512),
|
| 22 |
+
image_size: int = 512,
|
| 23 |
+
padding: int = 0,
|
| 24 |
+
condition_type: str = "subject",
|
| 25 |
+
drop_text_prob: float = 0.1,
|
| 26 |
+
drop_image_prob: float = 0.1,
|
| 27 |
+
return_pil_image: bool = False,
|
| 28 |
+
):
|
| 29 |
+
self.base_dataset = base_dataset
|
| 30 |
+
self.condition_size = condition_size
|
| 31 |
+
self.target_size = target_size
|
| 32 |
+
self.image_size = image_size
|
| 33 |
+
self.padding = padding
|
| 34 |
+
self.condition_type = condition_type
|
| 35 |
+
self.drop_text_prob = drop_text_prob
|
| 36 |
+
self.drop_image_prob = drop_image_prob
|
| 37 |
+
self.return_pil_image = return_pil_image
|
| 38 |
+
|
| 39 |
+
self.to_tensor = T.ToTensor()
|
| 40 |
+
|
| 41 |
+
def __len__(self):
|
| 42 |
+
return len(self.base_dataset) * 2
|
| 43 |
+
|
| 44 |
+
def __getitem__(self, idx):
|
| 45 |
+
# If target is 0, left image is target, right image is condition
|
| 46 |
+
target = idx % 2
|
| 47 |
+
item = self.base_dataset[idx // 2]
|
| 48 |
+
|
| 49 |
+
# Crop the image to target and condition
|
| 50 |
+
image = item["image"]
|
| 51 |
+
left_img = image.crop(
|
| 52 |
+
(
|
| 53 |
+
self.padding,
|
| 54 |
+
self.padding,
|
| 55 |
+
self.image_size + self.padding,
|
| 56 |
+
self.image_size + self.padding,
|
| 57 |
+
)
|
| 58 |
+
)
|
| 59 |
+
right_img = image.crop(
|
| 60 |
+
(
|
| 61 |
+
self.image_size + self.padding * 2,
|
| 62 |
+
self.padding,
|
| 63 |
+
self.image_size * 2 + self.padding * 2,
|
| 64 |
+
self.image_size + self.padding,
|
| 65 |
+
)
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
# Get the target and condition image
|
| 69 |
+
target_image, condition_img = (
|
| 70 |
+
(left_img, right_img) if target == 0 else (right_img, left_img)
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
# Resize the image
|
| 74 |
+
condition_img = condition_img.resize(self.condition_size).convert("RGB")
|
| 75 |
+
target_image = target_image.resize(self.target_size).convert("RGB")
|
| 76 |
+
|
| 77 |
+
# Get the description
|
| 78 |
+
description = item["description"][
|
| 79 |
+
"description_0" if target == 0 else "description_1"
|
| 80 |
+
]
|
| 81 |
+
|
| 82 |
+
# Randomly drop text or image
|
| 83 |
+
drop_text = random.random() < self.drop_text_prob
|
| 84 |
+
drop_image = random.random() < self.drop_image_prob
|
| 85 |
+
if drop_text:
|
| 86 |
+
description = ""
|
| 87 |
+
if drop_image:
|
| 88 |
+
condition_img = Image.new("RGB", self.condition_size, (0, 0, 0))
|
| 89 |
+
|
| 90 |
+
# 16 is the downscale factor of the image.
|
| 91 |
+
# More details about position delta can be found in the documentation.
|
| 92 |
+
position_delta = np.array([0, -self.condition_size[0] // 16])
|
| 93 |
+
|
| 94 |
+
return {
|
| 95 |
+
"image": self.to_tensor(target_image),
|
| 96 |
+
"condition_0": self.to_tensor(condition_img),
|
| 97 |
+
"condition_type_0": self.condition_type,
|
| 98 |
+
"position_delta_0": position_delta,
|
| 99 |
+
"description": description,
|
| 100 |
+
**({"pil_image": image} if self.return_pil_image else {}),
|
| 101 |
+
}
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
@torch.no_grad()
|
| 105 |
+
def test_function(model, save_path, file_name):
|
| 106 |
+
condition_size = model.training_config["dataset"]["condition_size"]
|
| 107 |
+
target_size = model.training_config["dataset"]["target_size"]
|
| 108 |
+
|
| 109 |
+
# More details about position delta can be found in the documentation.
|
| 110 |
+
position_delta = [0, -condition_size[0] // 16]
|
| 111 |
+
|
| 112 |
+
# Set adapters
|
| 113 |
+
adapter = model.adapter_names[2]
|
| 114 |
+
condition_type = model.training_config["condition_type"]
|
| 115 |
+
test_list = []
|
| 116 |
+
|
| 117 |
+
# Test case1 (in-distribution test case)
|
| 118 |
+
image = Image.open("assets/test_in.jpg")
|
| 119 |
+
image = image.resize(condition_size)
|
| 120 |
+
prompt = "Resting on the picnic table at a lakeside campsite, it's caught in the golden glow of early morning, with mist rising from the water and tall pines casting long shadows behind the scene."
|
| 121 |
+
condition = Condition(image, adapter, position_delta)
|
| 122 |
+
test_list.append((condition, prompt))
|
| 123 |
+
|
| 124 |
+
# Test case2 (out-of-distribution test case)
|
| 125 |
+
image = Image.open("assets/test_out.jpg")
|
| 126 |
+
image = image.resize(condition_size)
|
| 127 |
+
prompt = "In a bright room. It is placed on a table."
|
| 128 |
+
condition = Condition(image, adapter, position_delta)
|
| 129 |
+
test_list.append((condition, prompt))
|
| 130 |
+
|
| 131 |
+
# Generate images
|
| 132 |
+
os.makedirs(save_path, exist_ok=True)
|
| 133 |
+
for i, (condition, prompt) in enumerate(test_list):
|
| 134 |
+
generator = torch.Generator(device=model.device)
|
| 135 |
+
generator.manual_seed(42)
|
| 136 |
+
|
| 137 |
+
res = generate(
|
| 138 |
+
model.flux_pipe,
|
| 139 |
+
prompt=prompt,
|
| 140 |
+
conditions=[condition],
|
| 141 |
+
height=target_size[1],
|
| 142 |
+
width=target_size[0],
|
| 143 |
+
generator=generator,
|
| 144 |
+
model_config=model.model_config,
|
| 145 |
+
kv_cache=model.model_config.get("independent_condition", False),
|
| 146 |
+
)
|
| 147 |
+
file_path = os.path.join(save_path, f"{file_name}_{condition_type}_{i}.jpg")
|
| 148 |
+
res.images[0].save(file_path)
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
def main():
|
| 152 |
+
# Initialize
|
| 153 |
+
config = get_config()
|
| 154 |
+
training_config = config["train"]
|
| 155 |
+
torch.cuda.set_device(int(os.environ.get("LOCAL_RANK", 0)))
|
| 156 |
+
|
| 157 |
+
# Initialize raw dataset
|
| 158 |
+
raw_dataset = load_dataset("Yuanshi/Subjects200K")
|
| 159 |
+
|
| 160 |
+
# Define filter function to filter out low-quality images from Subjects200K
|
| 161 |
+
def filter_func(item):
|
| 162 |
+
if not item.get("quality_assessment"):
|
| 163 |
+
return False
|
| 164 |
+
return all(
|
| 165 |
+
item["quality_assessment"].get(key, 0) >= 5
|
| 166 |
+
for key in ["compositeStructure", "objectConsistency", "imageQuality"]
|
| 167 |
+
)
|
| 168 |
+
|
| 169 |
+
# Filter dataset
|
| 170 |
+
if not os.path.exists("./cache/dataset"):
|
| 171 |
+
os.makedirs("./cache/dataset")
|
| 172 |
+
data_valid = raw_dataset["train"].filter(
|
| 173 |
+
filter_func,
|
| 174 |
+
num_proc=16,
|
| 175 |
+
cache_file_name="./cache/dataset/data_valid.arrow",
|
| 176 |
+
)
|
| 177 |
+
|
| 178 |
+
# Initialize the dataset
|
| 179 |
+
dataset = Subject200KDataset(
|
| 180 |
+
data_valid,
|
| 181 |
+
condition_size=training_config["dataset"]["condition_size"],
|
| 182 |
+
target_size=training_config["dataset"]["target_size"],
|
| 183 |
+
image_size=training_config["dataset"]["image_size"],
|
| 184 |
+
padding=training_config["dataset"]["padding"],
|
| 185 |
+
condition_type=training_config["condition_type"],
|
| 186 |
+
drop_text_prob=training_config["dataset"]["drop_text_prob"],
|
| 187 |
+
drop_image_prob=training_config["dataset"]["drop_image_prob"],
|
| 188 |
+
)
|
| 189 |
+
|
| 190 |
+
# Initialize model
|
| 191 |
+
trainable_model = OminiModelRotation(
|
| 192 |
+
flux_pipe_id=config["flux_path"],
|
| 193 |
+
rotation_adapter_config=training_config["rotation_adapter_config"],
|
| 194 |
+
device=f"cuda",
|
| 195 |
+
dtype=getattr(torch, config["dtype"]),
|
| 196 |
+
optimizer_config=training_config["optimizer"],
|
| 197 |
+
model_config=config.get("model", {}),
|
| 198 |
+
gradient_checkpointing=training_config.get("gradient_checkpointing", False),
|
| 199 |
+
)
|
| 200 |
+
|
| 201 |
+
train(dataset, trainable_model, config, test_function)
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
if __name__ == "__main__":
|
| 205 |
+
main()
|
omini/train_flux/train_token_integration.py
ADDED
|
@@ -0,0 +1,136 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import os
|
| 3 |
+
import random
|
| 4 |
+
|
| 5 |
+
from PIL import Image, ImageDraw
|
| 6 |
+
|
| 7 |
+
from datasets import load_dataset
|
| 8 |
+
|
| 9 |
+
from .trainer import OminiModel, get_config, train
|
| 10 |
+
from ..pipeline.flux_omini import Condition, generate
|
| 11 |
+
from .train_spatial_alignment import ImageConditionDataset
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class TokenIntergrationDataset(ImageConditionDataset):
|
| 15 |
+
def __getitem__(self, idx):
|
| 16 |
+
image = self.base_dataset[idx]["jpg"]
|
| 17 |
+
image = image.resize(self.target_size).convert("RGB")
|
| 18 |
+
description = self.base_dataset[idx]["json"]["prompt"]
|
| 19 |
+
|
| 20 |
+
assert self.condition_type == "token_intergration"
|
| 21 |
+
assert (
|
| 22 |
+
image.size[0] % 16 == 0 and image.size[1] % 16 == 0
|
| 23 |
+
), "Condition size must be divisible by 16"
|
| 24 |
+
|
| 25 |
+
# Randomly drop text or image (for training)
|
| 26 |
+
description = "" if random.random() < self.drop_text_prob else description
|
| 27 |
+
|
| 28 |
+
# Generate a latent mask
|
| 29 |
+
w, h = image.size[0] // 16, image.size[1] // 16
|
| 30 |
+
while True:
|
| 31 |
+
x1, x2 = sorted([random.randint(0, w), random.randint(0, w)])
|
| 32 |
+
y1, y2 = sorted([random.randint(0, h), random.randint(0, h)])
|
| 33 |
+
is_zero = x1 == x2 or y1 == y2
|
| 34 |
+
is_full = x1 == 0 and y1 == 0 and x2 == w and y2 == h
|
| 35 |
+
if not (is_zero or is_full):
|
| 36 |
+
break
|
| 37 |
+
mask = Image.new("L", (w, h), 0)
|
| 38 |
+
draw = ImageDraw.Draw(mask)
|
| 39 |
+
draw.rectangle([x1, y1, x2, y2], fill=255)
|
| 40 |
+
if random.random() > 0.5:
|
| 41 |
+
mask = Image.eval(mask, lambda a: 255 - a)
|
| 42 |
+
mask = self.to_tensor(mask).to(bool).reshape(-1)
|
| 43 |
+
|
| 44 |
+
return {
|
| 45 |
+
"image": self.to_tensor(image),
|
| 46 |
+
"image_latent_mask": torch.logical_not(mask),
|
| 47 |
+
"condition_0": self.to_tensor(image),
|
| 48 |
+
"condition_type_0": self.condition_type,
|
| 49 |
+
"condition_latent_mask_0": mask,
|
| 50 |
+
"description": description,
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
@torch.no_grad()
|
| 55 |
+
def test_function(model, save_path, file_name):
|
| 56 |
+
target_size = model.training_config["dataset"]["target_size"]
|
| 57 |
+
|
| 58 |
+
condition_type = model.training_config["condition_type"]
|
| 59 |
+
test_list = []
|
| 60 |
+
|
| 61 |
+
# Generate two masks to test inpainting and outpainting.
|
| 62 |
+
mask1 = torch.ones((32, 32), dtype=bool)
|
| 63 |
+
mask1[8:24, 8:24] = False
|
| 64 |
+
mask2 = torch.logical_not(mask1)
|
| 65 |
+
|
| 66 |
+
image = Image.open("assets/vase_hq.jpg").resize(target_size)
|
| 67 |
+
condition1 = Condition(
|
| 68 |
+
image, model.adapter_names[2], latent_mask=mask1, is_complement=True
|
| 69 |
+
)
|
| 70 |
+
condition2 = Condition(
|
| 71 |
+
image, model.adapter_names[2], latent_mask=mask2, is_complement=True
|
| 72 |
+
)
|
| 73 |
+
test_list.append((condition1, "A beautiful vase on a table.", mask2))
|
| 74 |
+
test_list.append((condition2, "A beautiful vase on a table.", mask1))
|
| 75 |
+
|
| 76 |
+
os.makedirs(save_path, exist_ok=True)
|
| 77 |
+
for i, (condition, prompt, latent_mask) in enumerate(test_list):
|
| 78 |
+
generator = torch.Generator(device=model.device)
|
| 79 |
+
generator.manual_seed(42)
|
| 80 |
+
|
| 81 |
+
res = generate(
|
| 82 |
+
model.flux_pipe,
|
| 83 |
+
prompt=prompt,
|
| 84 |
+
conditions=[condition],
|
| 85 |
+
height=target_size[0],
|
| 86 |
+
width=target_size[1],
|
| 87 |
+
generator=generator,
|
| 88 |
+
model_config=model.model_config,
|
| 89 |
+
kv_cache=model.model_config.get("independent_condition", False),
|
| 90 |
+
latent_mask=latent_mask,
|
| 91 |
+
)
|
| 92 |
+
file_path = os.path.join(save_path, f"{file_name}_{condition_type}_{i}.jpg")
|
| 93 |
+
res.images[0].save(file_path)
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def main():
|
| 97 |
+
# Initialize
|
| 98 |
+
config = get_config()
|
| 99 |
+
training_config = config["train"]
|
| 100 |
+
torch.cuda.set_device(int(os.environ.get("LOCAL_RANK", 0)))
|
| 101 |
+
|
| 102 |
+
# Load dataset text-to-image-2M
|
| 103 |
+
dataset = load_dataset(
|
| 104 |
+
"webdataset",
|
| 105 |
+
data_files={"train": training_config["dataset"]["urls"]},
|
| 106 |
+
split="train",
|
| 107 |
+
cache_dir="cache/t2i2m",
|
| 108 |
+
num_proc=32,
|
| 109 |
+
)
|
| 110 |
+
dataset = TokenIntergrationDataset(
|
| 111 |
+
dataset,
|
| 112 |
+
condition_size=training_config["dataset"]["condition_size"],
|
| 113 |
+
target_size=training_config["dataset"]["target_size"],
|
| 114 |
+
condition_type=training_config["condition_type"],
|
| 115 |
+
drop_text_prob=training_config["dataset"]["drop_text_prob"],
|
| 116 |
+
drop_image_prob=training_config["dataset"]["drop_image_prob"],
|
| 117 |
+
position_scale=training_config["dataset"].get("position_scale", 1.0),
|
| 118 |
+
)
|
| 119 |
+
|
| 120 |
+
# Initialize model
|
| 121 |
+
trainable_model = OminiModel(
|
| 122 |
+
flux_pipe_id=config["flux_path"],
|
| 123 |
+
lora_config=training_config["lora_config"],
|
| 124 |
+
device=f"cuda",
|
| 125 |
+
dtype=getattr(torch, config["dtype"]),
|
| 126 |
+
optimizer_config=training_config["optimizer"],
|
| 127 |
+
model_config=config.get("model", {}),
|
| 128 |
+
gradient_checkpointing=training_config.get("gradient_checkpointing", False),
|
| 129 |
+
adapter_names=[None, None, "default"],
|
| 130 |
+
)
|
| 131 |
+
|
| 132 |
+
train(dataset, trainable_model, config, test_function)
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
if __name__ == "__main__":
|
| 136 |
+
main()
|
omini/train_flux/trainer.py
ADDED
|
@@ -0,0 +1,384 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import lightning as L
|
| 2 |
+
from diffusers.pipelines import FluxPipeline
|
| 3 |
+
import torch
|
| 4 |
+
import wandb
|
| 5 |
+
import os
|
| 6 |
+
import yaml
|
| 7 |
+
from peft import LoraConfig, get_peft_model_state_dict
|
| 8 |
+
from torch.utils.data import DataLoader
|
| 9 |
+
import time
|
| 10 |
+
|
| 11 |
+
from typing import List
|
| 12 |
+
|
| 13 |
+
import prodigyopt
|
| 14 |
+
|
| 15 |
+
from ..pipeline.flux_omini import transformer_forward, encode_images
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def get_rank():
|
| 19 |
+
try:
|
| 20 |
+
rank = int(os.environ.get("LOCAL_RANK"))
|
| 21 |
+
except:
|
| 22 |
+
rank = 0
|
| 23 |
+
return rank
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def get_config():
|
| 27 |
+
config_path = os.environ.get("OMINI_CONFIG")
|
| 28 |
+
assert config_path is not None, "Please set the OMINI_CONFIG environment variable"
|
| 29 |
+
with open(config_path, "r") as f:
|
| 30 |
+
config = yaml.safe_load(f)
|
| 31 |
+
return config
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def init_wandb(wandb_config, run_name):
|
| 35 |
+
import wandb
|
| 36 |
+
|
| 37 |
+
try:
|
| 38 |
+
assert os.environ.get("WANDB_API_KEY") is not None
|
| 39 |
+
wandb.init(
|
| 40 |
+
project=wandb_config["project"],
|
| 41 |
+
name=run_name,
|
| 42 |
+
config={},
|
| 43 |
+
)
|
| 44 |
+
except Exception as e:
|
| 45 |
+
print("Failed to initialize WanDB:", e)
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class OminiModel(L.LightningModule):
|
| 49 |
+
def __init__(
|
| 50 |
+
self,
|
| 51 |
+
flux_pipe_id: str,
|
| 52 |
+
lora_path: str = None,
|
| 53 |
+
lora_config: dict = None,
|
| 54 |
+
device: str = "cuda",
|
| 55 |
+
dtype: torch.dtype = torch.bfloat16,
|
| 56 |
+
model_config: dict = {},
|
| 57 |
+
adapter_names: List[str] = [None, None, "default"],
|
| 58 |
+
optimizer_config: dict = None,
|
| 59 |
+
gradient_checkpointing: bool = False,
|
| 60 |
+
):
|
| 61 |
+
# Initialize the LightningModule
|
| 62 |
+
super().__init__()
|
| 63 |
+
self.model_config = model_config
|
| 64 |
+
self.optimizer_config = optimizer_config
|
| 65 |
+
|
| 66 |
+
# Load the Flux pipeline
|
| 67 |
+
self.flux_pipe: FluxPipeline = FluxPipeline.from_pretrained(
|
| 68 |
+
flux_pipe_id, torch_dtype=dtype
|
| 69 |
+
).to(device)
|
| 70 |
+
self.transformer = self.flux_pipe.transformer
|
| 71 |
+
self.transformer.gradient_checkpointing = gradient_checkpointing
|
| 72 |
+
self.transformer.train()
|
| 73 |
+
|
| 74 |
+
# Freeze the Flux pipeline
|
| 75 |
+
self.flux_pipe.text_encoder.requires_grad_(False).eval()
|
| 76 |
+
self.flux_pipe.text_encoder_2.requires_grad_(False).eval()
|
| 77 |
+
self.flux_pipe.vae.requires_grad_(False).eval()
|
| 78 |
+
self.adapter_names = adapter_names
|
| 79 |
+
self.adapter_set = set([each for each in adapter_names if each is not None])
|
| 80 |
+
|
| 81 |
+
# Initialize LoRA layers
|
| 82 |
+
self.lora_layers = self.init_lora(lora_path, lora_config)
|
| 83 |
+
|
| 84 |
+
self.to(device).to(dtype)
|
| 85 |
+
|
| 86 |
+
def init_lora(self, lora_path: str, lora_config: dict):
|
| 87 |
+
assert lora_path or lora_config
|
| 88 |
+
if lora_path:
|
| 89 |
+
# TODO: Implement this
|
| 90 |
+
raise NotImplementedError
|
| 91 |
+
else:
|
| 92 |
+
for adapter_name in self.adapter_set:
|
| 93 |
+
self.transformer.add_adapter(
|
| 94 |
+
LoraConfig(**lora_config), adapter_name=adapter_name
|
| 95 |
+
)
|
| 96 |
+
# TODO: Check if this is correct (p.requires_grad)
|
| 97 |
+
lora_layers = filter(
|
| 98 |
+
lambda p: p.requires_grad, self.transformer.parameters()
|
| 99 |
+
)
|
| 100 |
+
return list(lora_layers)
|
| 101 |
+
|
| 102 |
+
def save_lora(self, path: str):
|
| 103 |
+
for adapter_name in self.adapter_set:
|
| 104 |
+
FluxPipeline.save_lora_weights(
|
| 105 |
+
save_directory=path,
|
| 106 |
+
weight_name=f"{adapter_name}.safetensors",
|
| 107 |
+
transformer_lora_layers=get_peft_model_state_dict(
|
| 108 |
+
self.transformer, adapter_name=adapter_name
|
| 109 |
+
),
|
| 110 |
+
safe_serialization=True,
|
| 111 |
+
)
|
| 112 |
+
|
| 113 |
+
def configure_optimizers(self):
|
| 114 |
+
# Freeze the transformer
|
| 115 |
+
self.transformer.requires_grad_(False)
|
| 116 |
+
opt_config = self.optimizer_config
|
| 117 |
+
|
| 118 |
+
# Set the trainable parameters
|
| 119 |
+
self.trainable_params = self.lora_layers
|
| 120 |
+
|
| 121 |
+
# Unfreeze trainable parameters
|
| 122 |
+
for p in self.trainable_params:
|
| 123 |
+
p.requires_grad_(True)
|
| 124 |
+
|
| 125 |
+
# Initialize the optimizer
|
| 126 |
+
if opt_config["type"] == "AdamW":
|
| 127 |
+
optimizer = torch.optim.AdamW(self.trainable_params, **opt_config["params"])
|
| 128 |
+
elif opt_config["type"] == "Prodigy":
|
| 129 |
+
optimizer = prodigyopt.Prodigy(
|
| 130 |
+
self.trainable_params,
|
| 131 |
+
**opt_config["params"],
|
| 132 |
+
)
|
| 133 |
+
elif opt_config["type"] == "SGD":
|
| 134 |
+
optimizer = torch.optim.SGD(self.trainable_params, **opt_config["params"])
|
| 135 |
+
else:
|
| 136 |
+
raise NotImplementedError("Optimizer not implemented.")
|
| 137 |
+
return optimizer
|
| 138 |
+
|
| 139 |
+
def training_step(self, batch, batch_idx):
|
| 140 |
+
imgs, prompts = batch["image"], batch["description"]
|
| 141 |
+
image_latent_mask = batch.get("image_latent_mask", None)
|
| 142 |
+
|
| 143 |
+
# Get the conditions and position deltas from the batch
|
| 144 |
+
conditions, position_deltas, position_scales, latent_masks = [], [], [], []
|
| 145 |
+
for i in range(1000):
|
| 146 |
+
if f"condition_{i}" not in batch:
|
| 147 |
+
break
|
| 148 |
+
conditions.append(batch[f"condition_{i}"])
|
| 149 |
+
position_deltas.append(batch.get(f"position_delta_{i}", [[0, 0]]))
|
| 150 |
+
position_scales.append(batch.get(f"position_scale_{i}", [1.0])[0])
|
| 151 |
+
latent_masks.append(batch.get(f"condition_latent_mask_{i}", None))
|
| 152 |
+
|
| 153 |
+
# Prepare inputs
|
| 154 |
+
with torch.no_grad():
|
| 155 |
+
# Prepare image input
|
| 156 |
+
x_0, img_ids = encode_images(self.flux_pipe, imgs)
|
| 157 |
+
|
| 158 |
+
# Prepare text input
|
| 159 |
+
(
|
| 160 |
+
prompt_embeds,
|
| 161 |
+
pooled_prompt_embeds,
|
| 162 |
+
text_ids,
|
| 163 |
+
) = self.flux_pipe.encode_prompt(
|
| 164 |
+
prompt=prompts,
|
| 165 |
+
prompt_2=None,
|
| 166 |
+
prompt_embeds=None,
|
| 167 |
+
pooled_prompt_embeds=None,
|
| 168 |
+
device=self.flux_pipe.device,
|
| 169 |
+
num_images_per_prompt=1,
|
| 170 |
+
max_sequence_length=self.model_config.get("max_sequence_length", 512),
|
| 171 |
+
lora_scale=None,
|
| 172 |
+
)
|
| 173 |
+
|
| 174 |
+
# Prepare t and x_t
|
| 175 |
+
t = torch.sigmoid(torch.randn((imgs.shape[0],), device=self.device))
|
| 176 |
+
x_1 = torch.randn_like(x_0).to(self.device)
|
| 177 |
+
t_ = t.unsqueeze(1).unsqueeze(1)
|
| 178 |
+
x_t = ((1 - t_) * x_0 + t_ * x_1).to(self.dtype)
|
| 179 |
+
if image_latent_mask is not None:
|
| 180 |
+
x_0 = x_0[:, image_latent_mask[0]]
|
| 181 |
+
x_1 = x_1[:, image_latent_mask[0]]
|
| 182 |
+
x_t = x_t[:, image_latent_mask[0]]
|
| 183 |
+
img_ids = img_ids[image_latent_mask[0]]
|
| 184 |
+
|
| 185 |
+
# Prepare conditions
|
| 186 |
+
condition_latents, condition_ids = [], []
|
| 187 |
+
for cond, p_delta, p_scale, latent_mask in zip(
|
| 188 |
+
conditions, position_deltas, position_scales, latent_masks
|
| 189 |
+
):
|
| 190 |
+
# Prepare conditions
|
| 191 |
+
c_latents, c_ids = encode_images(self.flux_pipe, cond)
|
| 192 |
+
# Scale the position (see OminiConrtol2)
|
| 193 |
+
if p_scale != 1.0:
|
| 194 |
+
scale_bias = (p_scale - 1.0) / 2
|
| 195 |
+
c_ids[:, 1:] *= p_scale
|
| 196 |
+
c_ids[:, 1:] += scale_bias
|
| 197 |
+
# Add position delta (see OminiControl)
|
| 198 |
+
c_ids[:, 1] += p_delta[0][0]
|
| 199 |
+
c_ids[:, 2] += p_delta[0][1]
|
| 200 |
+
if len(p_delta) > 1:
|
| 201 |
+
print("Warning: only the first position delta is used.")
|
| 202 |
+
# Append to the list
|
| 203 |
+
if latent_mask is not None:
|
| 204 |
+
c_latents, c_ids = c_latents[latent_mask], c_ids[latent_mask[0]]
|
| 205 |
+
condition_latents.append(c_latents)
|
| 206 |
+
condition_ids.append(c_ids)
|
| 207 |
+
|
| 208 |
+
# Prepare guidance
|
| 209 |
+
guidance = (
|
| 210 |
+
torch.ones_like(t).to(self.device)
|
| 211 |
+
if self.transformer.config.guidance_embeds
|
| 212 |
+
else None
|
| 213 |
+
)
|
| 214 |
+
|
| 215 |
+
branch_n = 2 + len(conditions)
|
| 216 |
+
group_mask = torch.ones([branch_n, branch_n], dtype=torch.bool).to(self.device)
|
| 217 |
+
# Disable the attention cross different condition branches
|
| 218 |
+
group_mask[2:, 2:] = torch.diag(torch.tensor([1] * len(conditions)))
|
| 219 |
+
# Disable the attention from condition branches to image branch and text branch
|
| 220 |
+
if self.model_config.get("independent_condition", False):
|
| 221 |
+
group_mask[2:, :2] = False
|
| 222 |
+
|
| 223 |
+
# Forward pass
|
| 224 |
+
transformer_out = transformer_forward(
|
| 225 |
+
self.transformer,
|
| 226 |
+
image_features=[x_t, *(condition_latents)],
|
| 227 |
+
text_features=[prompt_embeds],
|
| 228 |
+
img_ids=[img_ids, *(condition_ids)],
|
| 229 |
+
txt_ids=[text_ids],
|
| 230 |
+
# There are three timesteps for the three branches
|
| 231 |
+
# (text, image, and the condition)
|
| 232 |
+
timesteps=[t, t] + [torch.zeros_like(t)] * len(conditions),
|
| 233 |
+
# Same as above
|
| 234 |
+
pooled_projections=[pooled_prompt_embeds] * branch_n,
|
| 235 |
+
guidances=[guidance] * branch_n,
|
| 236 |
+
# The LoRA adapter names of each branch
|
| 237 |
+
adapters=self.adapter_names,
|
| 238 |
+
return_dict=False,
|
| 239 |
+
group_mask=group_mask,
|
| 240 |
+
)
|
| 241 |
+
pred = transformer_out[0]
|
| 242 |
+
|
| 243 |
+
# Compute loss
|
| 244 |
+
step_loss = torch.nn.functional.mse_loss(pred, (x_1 - x_0), reduction="mean")
|
| 245 |
+
self.last_t = t.mean().item()
|
| 246 |
+
|
| 247 |
+
self.log_loss = (
|
| 248 |
+
step_loss.item()
|
| 249 |
+
if not hasattr(self, "log_loss")
|
| 250 |
+
else self.log_loss * 0.95 + step_loss.item() * 0.05
|
| 251 |
+
)
|
| 252 |
+
return step_loss
|
| 253 |
+
|
| 254 |
+
def generate_a_sample(self):
|
| 255 |
+
raise NotImplementedError("Generate a sample not implemented.")
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
class TrainingCallback(L.Callback):
|
| 259 |
+
def __init__(self, run_name, training_config: dict = {}, test_function=None):
|
| 260 |
+
self.run_name, self.training_config = run_name, training_config
|
| 261 |
+
|
| 262 |
+
self.print_every_n_steps = training_config.get("print_every_n_steps", 10)
|
| 263 |
+
self.save_interval = training_config.get("save_interval", 1000)
|
| 264 |
+
self.sample_interval = training_config.get("sample_interval", 1000)
|
| 265 |
+
self.save_path = training_config.get("save_path", "./output")
|
| 266 |
+
|
| 267 |
+
self.wandb_config = training_config.get("wandb", None)
|
| 268 |
+
self.use_wandb = (
|
| 269 |
+
wandb is not None and os.environ.get("WANDB_API_KEY") is not None
|
| 270 |
+
)
|
| 271 |
+
|
| 272 |
+
self.total_steps = 0
|
| 273 |
+
self.test_function = test_function
|
| 274 |
+
|
| 275 |
+
def on_train_batch_end(self, trainer, pl_module, outputs, batch, batch_idx):
|
| 276 |
+
gradient_size = 0
|
| 277 |
+
max_gradient_size = 0
|
| 278 |
+
count = 0
|
| 279 |
+
for _, param in pl_module.named_parameters():
|
| 280 |
+
if param.grad is not None:
|
| 281 |
+
gradient_size += param.grad.norm(2).item()
|
| 282 |
+
max_gradient_size = max(max_gradient_size, param.grad.norm(2).item())
|
| 283 |
+
count += 1
|
| 284 |
+
if count > 0:
|
| 285 |
+
gradient_size /= count
|
| 286 |
+
|
| 287 |
+
self.total_steps += 1
|
| 288 |
+
|
| 289 |
+
# Print training progress every n steps
|
| 290 |
+
if self.use_wandb:
|
| 291 |
+
report_dict = {
|
| 292 |
+
"steps": batch_idx,
|
| 293 |
+
"steps": self.total_steps,
|
| 294 |
+
"epoch": trainer.current_epoch,
|
| 295 |
+
"gradient_size": gradient_size,
|
| 296 |
+
}
|
| 297 |
+
loss_value = outputs["loss"].item() * trainer.accumulate_grad_batches
|
| 298 |
+
report_dict["loss"] = loss_value
|
| 299 |
+
report_dict["t"] = pl_module.last_t
|
| 300 |
+
wandb.log(report_dict)
|
| 301 |
+
|
| 302 |
+
if self.total_steps % self.print_every_n_steps == 0:
|
| 303 |
+
print(
|
| 304 |
+
f"Epoch: {trainer.current_epoch}, Steps: {self.total_steps}, Batch: {batch_idx}, Loss: {pl_module.log_loss:.4f}, Gradient size: {gradient_size:.4f}, Max gradient size: {max_gradient_size:.4f}"
|
| 305 |
+
)
|
| 306 |
+
|
| 307 |
+
# Save LoRA weights at specified intervals
|
| 308 |
+
if self.total_steps % self.save_interval == 0:
|
| 309 |
+
print(
|
| 310 |
+
f"Epoch: {trainer.current_epoch}, Steps: {self.total_steps} - Saving LoRA weights"
|
| 311 |
+
)
|
| 312 |
+
pl_module.save_lora(
|
| 313 |
+
f"{self.save_path}/{self.run_name}/ckpt/{self.total_steps}"
|
| 314 |
+
)
|
| 315 |
+
|
| 316 |
+
# Generate and save a sample image at specified intervals
|
| 317 |
+
if self.total_steps % self.sample_interval == 0 and self.test_function:
|
| 318 |
+
print(
|
| 319 |
+
f"Epoch: {trainer.current_epoch}, Steps: {self.total_steps} - Generating a sample"
|
| 320 |
+
)
|
| 321 |
+
pl_module.eval()
|
| 322 |
+
self.test_function(
|
| 323 |
+
pl_module,
|
| 324 |
+
f"{self.save_path}/{self.run_name}/output",
|
| 325 |
+
f"lora_{self.total_steps}",
|
| 326 |
+
)
|
| 327 |
+
pl_module.train()
|
| 328 |
+
|
| 329 |
+
|
| 330 |
+
def train(dataset, trainable_model, config, test_function):
|
| 331 |
+
# Initialize
|
| 332 |
+
is_main_process, rank = get_rank() == 0, get_rank()
|
| 333 |
+
torch.cuda.set_device(rank)
|
| 334 |
+
# config = get_config()
|
| 335 |
+
|
| 336 |
+
training_config = config["train"]
|
| 337 |
+
run_name = time.strftime("%Y%m%d-%H%M%S")
|
| 338 |
+
|
| 339 |
+
# Initialize WanDB
|
| 340 |
+
wandb_config = training_config.get("wandb", None)
|
| 341 |
+
if wandb_config is not None and is_main_process:
|
| 342 |
+
init_wandb(wandb_config, run_name)
|
| 343 |
+
|
| 344 |
+
print("Rank:", rank)
|
| 345 |
+
if is_main_process:
|
| 346 |
+
print("Config:", config)
|
| 347 |
+
|
| 348 |
+
# Initialize dataloader
|
| 349 |
+
print("Dataset length:", len(dataset))
|
| 350 |
+
train_loader = DataLoader(
|
| 351 |
+
dataset,
|
| 352 |
+
batch_size=training_config.get("batch_size", 1),
|
| 353 |
+
shuffle=True,
|
| 354 |
+
num_workers=training_config["dataloader_workers"],
|
| 355 |
+
)
|
| 356 |
+
|
| 357 |
+
# Callbacks for testing and saving checkpoints
|
| 358 |
+
if is_main_process:
|
| 359 |
+
callbacks = [TrainingCallback(run_name, training_config, test_function)]
|
| 360 |
+
|
| 361 |
+
# Initialize trainer
|
| 362 |
+
trainer = L.Trainer(
|
| 363 |
+
accumulate_grad_batches=training_config["accumulate_grad_batches"],
|
| 364 |
+
callbacks=callbacks if is_main_process else [],
|
| 365 |
+
enable_checkpointing=False,
|
| 366 |
+
enable_progress_bar=False,
|
| 367 |
+
logger=False,
|
| 368 |
+
max_steps=training_config.get("max_steps", -1),
|
| 369 |
+
max_epochs=training_config.get("max_epochs", -1),
|
| 370 |
+
gradient_clip_val=training_config.get("gradient_clip_val", 0.5),
|
| 371 |
+
)
|
| 372 |
+
|
| 373 |
+
setattr(trainer, "training_config", training_config)
|
| 374 |
+
setattr(trainable_model, "training_config", training_config)
|
| 375 |
+
|
| 376 |
+
# Save the training config
|
| 377 |
+
save_path = training_config.get("save_path", "./output")
|
| 378 |
+
if is_main_process:
|
| 379 |
+
os.makedirs(f"{save_path}/{run_name}")
|
| 380 |
+
with open(f"{save_path}/{run_name}/config.yaml", "w") as f:
|
| 381 |
+
yaml.dump(config, f)
|
| 382 |
+
|
| 383 |
+
# Start training
|
| 384 |
+
trainer.fit(trainable_model, train_loader)
|
omini/train_flux/trainer_rotation.py
ADDED
|
@@ -0,0 +1,449 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import lightning as L
|
| 2 |
+
from diffusers.pipelines import FluxPipeline
|
| 3 |
+
import torch
|
| 4 |
+
import wandb
|
| 5 |
+
import os
|
| 6 |
+
import yaml
|
| 7 |
+
from peft import LoraConfig, get_peft_model_state_dict
|
| 8 |
+
from torch.utils.data import DataLoader
|
| 9 |
+
import time
|
| 10 |
+
|
| 11 |
+
from typing import List
|
| 12 |
+
|
| 13 |
+
import prodigyopt
|
| 14 |
+
|
| 15 |
+
from ..pipeline.flux_omini import transformer_forward, encode_images
|
| 16 |
+
from ..rotation import RotationTuner, RotationConfig
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def get_rank():
|
| 20 |
+
try:
|
| 21 |
+
rank = int(os.environ.get("LOCAL_RANK"))
|
| 22 |
+
except:
|
| 23 |
+
rank = 0
|
| 24 |
+
return rank
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def get_config():
|
| 28 |
+
config_path = os.environ.get("OMINI_CONFIG")
|
| 29 |
+
assert config_path is not None, "Please set the OMINI_CONFIG environment variable"
|
| 30 |
+
with open(config_path, "r") as f:
|
| 31 |
+
config = yaml.safe_load(f)
|
| 32 |
+
return config
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def init_wandb(wandb_config, run_name):
|
| 36 |
+
import wandb
|
| 37 |
+
|
| 38 |
+
try:
|
| 39 |
+
assert os.environ.get("WANDB_API_KEY") is not None
|
| 40 |
+
wandb.init(
|
| 41 |
+
project=wandb_config["project"],
|
| 42 |
+
name=run_name,
|
| 43 |
+
config={},
|
| 44 |
+
)
|
| 45 |
+
except Exception as e:
|
| 46 |
+
print("Failed to initialize WanDB:", e)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class OminiModelRotation(L.LightningModule):
|
| 50 |
+
def __init__(
|
| 51 |
+
self,
|
| 52 |
+
flux_pipe_id: str,
|
| 53 |
+
rotation_adapter_path: str = None,
|
| 54 |
+
rotation_adapter_config: dict = None,
|
| 55 |
+
device: str = "cuda",
|
| 56 |
+
dtype: torch.dtype = torch.bfloat16,
|
| 57 |
+
model_config: dict = {},
|
| 58 |
+
adapter_names: List[str] = [None, None, "default"],
|
| 59 |
+
optimizer_config: dict = None,
|
| 60 |
+
gradient_checkpointing: bool = False,
|
| 61 |
+
):
|
| 62 |
+
# Initialize the LightningModule
|
| 63 |
+
super().__init__()
|
| 64 |
+
self.model_config = model_config
|
| 65 |
+
self.optimizer_config = optimizer_config
|
| 66 |
+
|
| 67 |
+
# Load the Flux pipeline
|
| 68 |
+
self.flux_pipe: FluxPipeline = FluxPipeline.from_pretrained(
|
| 69 |
+
flux_pipe_id, torch_dtype=dtype
|
| 70 |
+
).to(device)
|
| 71 |
+
self.transformer = self.flux_pipe.transformer
|
| 72 |
+
self.transformer.gradient_checkpointing = gradient_checkpointing
|
| 73 |
+
self.transformer.train()
|
| 74 |
+
|
| 75 |
+
# Freeze the Flux pipeline
|
| 76 |
+
self.flux_pipe.text_encoder.requires_grad_(False).eval()
|
| 77 |
+
self.flux_pipe.text_encoder_2.requires_grad_(False).eval()
|
| 78 |
+
self.flux_pipe.vae.requires_grad_(False).eval()
|
| 79 |
+
self.adapter_names = adapter_names
|
| 80 |
+
self.adapter_set = set([each for each in adapter_names if each is not None])
|
| 81 |
+
|
| 82 |
+
self.rotation_layers = self.init_rotation(rotation_adapter_path, rotation_adapter_config)
|
| 83 |
+
print(f"Total trainable parameters: {sum(p.numel() for p in self.rotation_layers)}")
|
| 84 |
+
self.to(device).to(dtype)
|
| 85 |
+
|
| 86 |
+
def init_rotation(self, rotation_adapter_path: str, rotation_adapter_config: dict):
|
| 87 |
+
assert rotation_adapter_path or rotation_adapter_config
|
| 88 |
+
if rotation_adapter_path:
|
| 89 |
+
# TODO: Implement this
|
| 90 |
+
raise NotImplementedError
|
| 91 |
+
else:
|
| 92 |
+
|
| 93 |
+
for adapter_name in self.adapter_set:
|
| 94 |
+
print(f"Initializing rotation adapter: {adapter_name}")
|
| 95 |
+
|
| 96 |
+
if not self.transformer._hf_peft_config_loaded:
|
| 97 |
+
self.transformer._hf_peft_config_loaded = True
|
| 98 |
+
elif adapter_name in self.transformer.peft_config:
|
| 99 |
+
raise ValueError(f"Adapter with name {adapter_name} already exists. Please use a different name.")
|
| 100 |
+
|
| 101 |
+
config = RotationConfig(**rotation_adapter_config)
|
| 102 |
+
rotation_tuner = RotationTuner(
|
| 103 |
+
self.transformer,
|
| 104 |
+
config,
|
| 105 |
+
adapter_name=adapter_name,
|
| 106 |
+
)
|
| 107 |
+
|
| 108 |
+
# self.transformer = rotation_tuner.model
|
| 109 |
+
self.transformer.set_adapter(adapter_name)
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
rotation_layers = filter(
|
| 113 |
+
lambda p: p.requires_grad, self.transformer.parameters()
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
return list(rotation_layers)
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
# def save_lora(self, path: str):
|
| 122 |
+
# for adapter_name in self.adapter_set:
|
| 123 |
+
# FluxPipeline.save_lora_weights(
|
| 124 |
+
# save_directory=path,
|
| 125 |
+
# weight_name=f"{adapter_name}.safetensors",
|
| 126 |
+
# transformer_lora_layers=get_peft_model_state_dict(
|
| 127 |
+
# self.transformer, adapter_name=adapter_name
|
| 128 |
+
# ),
|
| 129 |
+
# safe_serialization=True,
|
| 130 |
+
# )
|
| 131 |
+
|
| 132 |
+
def save_rotation(self, path: str):
|
| 133 |
+
|
| 134 |
+
import os
|
| 135 |
+
from safetensors.torch import save_file
|
| 136 |
+
os.makedirs(path, exist_ok=True)
|
| 137 |
+
|
| 138 |
+
# Get the full model state dict (handles DDP, FSDP, etc.)
|
| 139 |
+
state_dict = self.transformer.state_dict()
|
| 140 |
+
for adapter_name in self.adapter_set:
|
| 141 |
+
to_return = {}
|
| 142 |
+
|
| 143 |
+
for k, v in state_dict.items():
|
| 144 |
+
|
| 145 |
+
if f".rotation.{adapter_name}." in k:
|
| 146 |
+
# Remove DDP/FSDP prefixes if present
|
| 147 |
+
clean_key = k.replace("module.", "").replace("_fsdp_wrapped_module.", "")
|
| 148 |
+
to_return[clean_key] = v
|
| 149 |
+
|
| 150 |
+
if len(to_return) == 0:
|
| 151 |
+
print(f"Warning: No rotation parameters found for adapter {adapter_name}")
|
| 152 |
+
print(f"Available keys sample: {list(state_dict.keys())[:5]}")
|
| 153 |
+
continue
|
| 154 |
+
|
| 155 |
+
# Remove adapter name from keys (following PEFT convention)
|
| 156 |
+
# This makes loading easier and follows the pattern used by LoRA
|
| 157 |
+
to_return = {k.replace(f".{adapter_name}", ""): v for k, v in to_return.items()}
|
| 158 |
+
save_path = os.path.join(path, f"{adapter_name}.safetensors")
|
| 159 |
+
|
| 160 |
+
# Convert to CPU and detach before saving
|
| 161 |
+
to_return_cpu = {k: v.cpu().detach() for k, v in to_return.items()}
|
| 162 |
+
save_file(to_return_cpu, save_path)
|
| 163 |
+
|
| 164 |
+
total_params = sum(p.numel() for p in to_return.values())
|
| 165 |
+
num_U_params = sum(p.numel() for k, p in to_return.items() if '.U' in k)
|
| 166 |
+
num_V_params = sum(p.numel() for k, p in to_return.items() if '.V' in k)
|
| 167 |
+
|
| 168 |
+
print(f"Saved adapter '{adapter_name}':")
|
| 169 |
+
print(f" - {len(to_return)} tensors ({total_params:,} total parameters)")
|
| 170 |
+
print(f" - U parameters: {num_U_params:,}, V parameters: {num_V_params:,}")
|
| 171 |
+
print(f" - Path: {save_path}")
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
def configure_optimizers(self):
|
| 175 |
+
# Freeze the transformer
|
| 176 |
+
self.transformer.requires_grad_(False)
|
| 177 |
+
opt_config = self.optimizer_config
|
| 178 |
+
|
| 179 |
+
# Set the trainable parameters
|
| 180 |
+
self.trainable_params = self.rotation_layers
|
| 181 |
+
print(f"Number of trainable parameters: {sum(p.numel() for p in self.trainable_params)}")
|
| 182 |
+
|
| 183 |
+
# Unfreeze trainable parameters
|
| 184 |
+
for p in self.trainable_params:
|
| 185 |
+
p.requires_grad_(True)
|
| 186 |
+
|
| 187 |
+
# Initialize the optimizer
|
| 188 |
+
if opt_config["type"] == "AdamW":
|
| 189 |
+
optimizer = torch.optim.AdamW(self.trainable_params, **opt_config["params"])
|
| 190 |
+
elif opt_config["type"] == "Prodigy":
|
| 191 |
+
optimizer = prodigyopt.Prodigy(
|
| 192 |
+
self.trainable_params,
|
| 193 |
+
**opt_config["params"],
|
| 194 |
+
)
|
| 195 |
+
elif opt_config["type"] == "SGD":
|
| 196 |
+
optimizer = torch.optim.SGD(self.trainable_params, **opt_config["params"])
|
| 197 |
+
else:
|
| 198 |
+
raise NotImplementedError("Optimizer not implemented.")
|
| 199 |
+
return optimizer
|
| 200 |
+
|
| 201 |
+
def training_step(self, batch, batch_idx):
|
| 202 |
+
imgs, prompts = batch["image"], batch["description"]
|
| 203 |
+
image_latent_mask = batch.get("image_latent_mask", None)
|
| 204 |
+
|
| 205 |
+
# Get the conditions and position deltas from the batch
|
| 206 |
+
conditions, position_deltas, position_scales, latent_masks = [], [], [], []
|
| 207 |
+
for i in range(1000):
|
| 208 |
+
if f"condition_{i}" not in batch:
|
| 209 |
+
break
|
| 210 |
+
conditions.append(batch[f"condition_{i}"])
|
| 211 |
+
position_deltas.append(batch.get(f"position_delta_{i}", [[0, 0]]))
|
| 212 |
+
position_scales.append(batch.get(f"position_scale_{i}", [1.0])[0])
|
| 213 |
+
latent_masks.append(batch.get(f"condition_latent_mask_{i}", None))
|
| 214 |
+
|
| 215 |
+
# Prepare inputs
|
| 216 |
+
with torch.no_grad():
|
| 217 |
+
# Prepare image input
|
| 218 |
+
x_0, img_ids = encode_images(self.flux_pipe, imgs)
|
| 219 |
+
|
| 220 |
+
# Prepare text input
|
| 221 |
+
(
|
| 222 |
+
prompt_embeds,
|
| 223 |
+
pooled_prompt_embeds,
|
| 224 |
+
text_ids,
|
| 225 |
+
) = self.flux_pipe.encode_prompt(
|
| 226 |
+
prompt=prompts,
|
| 227 |
+
prompt_2=None,
|
| 228 |
+
prompt_embeds=None,
|
| 229 |
+
pooled_prompt_embeds=None,
|
| 230 |
+
device=self.flux_pipe.device,
|
| 231 |
+
num_images_per_prompt=1,
|
| 232 |
+
max_sequence_length=self.model_config.get("max_sequence_length", 512),
|
| 233 |
+
lora_scale=None,
|
| 234 |
+
)
|
| 235 |
+
|
| 236 |
+
# Prepare t and x_t
|
| 237 |
+
t = torch.sigmoid(torch.randn((imgs.shape[0],), device=self.device))
|
| 238 |
+
x_1 = torch.randn_like(x_0).to(self.device)
|
| 239 |
+
t_ = t.unsqueeze(1).unsqueeze(1)
|
| 240 |
+
x_t = ((1 - t_) * x_0 + t_ * x_1).to(self.dtype)
|
| 241 |
+
if image_latent_mask is not None:
|
| 242 |
+
x_0 = x_0[:, image_latent_mask[0]]
|
| 243 |
+
x_1 = x_1[:, image_latent_mask[0]]
|
| 244 |
+
x_t = x_t[:, image_latent_mask[0]]
|
| 245 |
+
img_ids = img_ids[image_latent_mask[0]]
|
| 246 |
+
|
| 247 |
+
# Prepare conditions
|
| 248 |
+
condition_latents, condition_ids = [], []
|
| 249 |
+
for cond, p_delta, p_scale, latent_mask in zip(
|
| 250 |
+
conditions, position_deltas, position_scales, latent_masks
|
| 251 |
+
):
|
| 252 |
+
# Prepare conditions
|
| 253 |
+
c_latents, c_ids = encode_images(self.flux_pipe, cond)
|
| 254 |
+
# Scale the position (see OminiConrtol2)
|
| 255 |
+
if p_scale != 1.0:
|
| 256 |
+
scale_bias = (p_scale - 1.0) / 2
|
| 257 |
+
c_ids[:, 1:] *= p_scale
|
| 258 |
+
c_ids[:, 1:] += scale_bias
|
| 259 |
+
# Add position delta (see OminiControl)
|
| 260 |
+
c_ids[:, 1] += p_delta[0][0]
|
| 261 |
+
c_ids[:, 2] += p_delta[0][1]
|
| 262 |
+
if len(p_delta) > 1:
|
| 263 |
+
print("Warning: only the first position delta is used.")
|
| 264 |
+
# Append to the list
|
| 265 |
+
if latent_mask is not None:
|
| 266 |
+
c_latents, c_ids = c_latents[latent_mask], c_ids[latent_mask[0]]
|
| 267 |
+
condition_latents.append(c_latents)
|
| 268 |
+
condition_ids.append(c_ids)
|
| 269 |
+
|
| 270 |
+
# Prepare guidance
|
| 271 |
+
guidance = (
|
| 272 |
+
torch.ones_like(t).to(self.device)
|
| 273 |
+
if self.transformer.config.guidance_embeds
|
| 274 |
+
else None
|
| 275 |
+
)
|
| 276 |
+
|
| 277 |
+
branch_n = 2 + len(conditions)
|
| 278 |
+
group_mask = torch.ones([branch_n, branch_n], dtype=torch.bool).to(self.device)
|
| 279 |
+
# Disable the attention cross different condition branches
|
| 280 |
+
group_mask[2:, 2:] = torch.diag(torch.tensor([1] * len(conditions)))
|
| 281 |
+
# Disable the attention from condition branches to image branch and text branch
|
| 282 |
+
if self.model_config.get("independent_condition", False):
|
| 283 |
+
group_mask[2:, :2] = False
|
| 284 |
+
|
| 285 |
+
# Forward pass
|
| 286 |
+
transformer_out = transformer_forward(
|
| 287 |
+
self.transformer,
|
| 288 |
+
image_features=[x_t, *(condition_latents)],
|
| 289 |
+
text_features=[prompt_embeds],
|
| 290 |
+
img_ids=[img_ids, *(condition_ids)],
|
| 291 |
+
txt_ids=[text_ids],
|
| 292 |
+
# There are three timesteps for the three branches
|
| 293 |
+
# (text, image, and the condition)
|
| 294 |
+
timesteps=[t, t] + [torch.zeros_like(t)] * len(conditions),
|
| 295 |
+
# Same as above
|
| 296 |
+
pooled_projections=[pooled_prompt_embeds] * branch_n,
|
| 297 |
+
guidances=[guidance] * branch_n,
|
| 298 |
+
# The LoRA adapter names of each branch
|
| 299 |
+
adapters=self.adapter_names,
|
| 300 |
+
return_dict=False,
|
| 301 |
+
group_mask=group_mask,
|
| 302 |
+
)
|
| 303 |
+
pred = transformer_out[0]
|
| 304 |
+
|
| 305 |
+
# Compute loss
|
| 306 |
+
step_loss = torch.nn.functional.mse_loss(pred, (x_1 - x_0), reduction="mean")
|
| 307 |
+
self.last_t = t.mean().item()
|
| 308 |
+
|
| 309 |
+
self.log_loss = (
|
| 310 |
+
step_loss.item()
|
| 311 |
+
if not hasattr(self, "log_loss")
|
| 312 |
+
else self.log_loss * 0.95 + step_loss.item() * 0.05
|
| 313 |
+
)
|
| 314 |
+
return step_loss
|
| 315 |
+
|
| 316 |
+
def generate_a_sample(self):
|
| 317 |
+
raise NotImplementedError("Generate a sample not implemented.")
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
class TrainingCallback(L.Callback):
|
| 321 |
+
def __init__(self, run_name, training_config: dict = {}, test_function=None):
|
| 322 |
+
self.run_name, self.training_config = run_name, training_config
|
| 323 |
+
|
| 324 |
+
self.print_every_n_steps = training_config.get("print_every_n_steps", 10)
|
| 325 |
+
self.save_interval = training_config.get("save_interval", 1000)
|
| 326 |
+
self.sample_interval = training_config.get("sample_interval", 1000)
|
| 327 |
+
self.save_path = training_config.get("save_path", "./output")
|
| 328 |
+
|
| 329 |
+
self.wandb_config = training_config.get("wandb", None)
|
| 330 |
+
self.use_wandb = (
|
| 331 |
+
wandb is not None and os.environ.get("WANDB_API_KEY") is not None
|
| 332 |
+
)
|
| 333 |
+
|
| 334 |
+
self.total_steps = 0
|
| 335 |
+
self.test_function = test_function
|
| 336 |
+
|
| 337 |
+
def on_train_batch_end(self, trainer, pl_module, outputs, batch, batch_idx):
|
| 338 |
+
gradient_size = 0
|
| 339 |
+
max_gradient_size = 0
|
| 340 |
+
count = 0
|
| 341 |
+
for _, param in pl_module.named_parameters():
|
| 342 |
+
if param.grad is not None:
|
| 343 |
+
gradient_size += param.grad.norm(2).item()
|
| 344 |
+
max_gradient_size = max(max_gradient_size, param.grad.norm(2).item())
|
| 345 |
+
count += 1
|
| 346 |
+
if count > 0:
|
| 347 |
+
gradient_size /= count
|
| 348 |
+
|
| 349 |
+
self.total_steps += 1
|
| 350 |
+
|
| 351 |
+
# Print training progress every n steps
|
| 352 |
+
if self.use_wandb:
|
| 353 |
+
report_dict = {
|
| 354 |
+
"steps": batch_idx,
|
| 355 |
+
"steps": self.total_steps,
|
| 356 |
+
"epoch": trainer.current_epoch,
|
| 357 |
+
"gradient_size": gradient_size,
|
| 358 |
+
}
|
| 359 |
+
loss_value = outputs["loss"].item() * trainer.accumulate_grad_batches
|
| 360 |
+
report_dict["loss"] = loss_value
|
| 361 |
+
report_dict["t"] = pl_module.last_t
|
| 362 |
+
wandb.log(report_dict)
|
| 363 |
+
|
| 364 |
+
if self.total_steps % self.print_every_n_steps == 0:
|
| 365 |
+
print(
|
| 366 |
+
f"Epoch: {trainer.current_epoch}, Steps: {self.total_steps}, Batch: {batch_idx}, Loss: {pl_module.log_loss:.4f}, Gradient size: {gradient_size:.4f}, Max gradient size: {max_gradient_size:.4f}"
|
| 367 |
+
)
|
| 368 |
+
|
| 369 |
+
# Save LoRA weights at specified intervals
|
| 370 |
+
if self.total_steps % self.save_interval == 0:
|
| 371 |
+
print(
|
| 372 |
+
f"Epoch: {trainer.current_epoch}, Steps: {self.total_steps} - Saving LoRA weights"
|
| 373 |
+
)
|
| 374 |
+
pl_module.save_rotation(
|
| 375 |
+
f"{self.save_path}/{self.run_name}/ckpt/{self.total_steps}"
|
| 376 |
+
)
|
| 377 |
+
# pl_module.save_lora(
|
| 378 |
+
# f"{self.save_path}/{self.run_name}/ckpt/{self.total_steps}"
|
| 379 |
+
# )
|
| 380 |
+
|
| 381 |
+
# Generate and save a sample image at specified intervals
|
| 382 |
+
if self.total_steps % self.sample_interval == 0 and self.test_function:
|
| 383 |
+
print(
|
| 384 |
+
f"Epoch: {trainer.current_epoch}, Steps: {self.total_steps} - Generating a sample"
|
| 385 |
+
)
|
| 386 |
+
pl_module.eval()
|
| 387 |
+
self.test_function(
|
| 388 |
+
pl_module,
|
| 389 |
+
f"{self.save_path}/{self.run_name}/output",
|
| 390 |
+
f"lora_{self.total_steps}",
|
| 391 |
+
)
|
| 392 |
+
pl_module.train()
|
| 393 |
+
|
| 394 |
+
|
| 395 |
+
def train(dataset, trainable_model, config, test_function):
|
| 396 |
+
# Initialize
|
| 397 |
+
is_main_process, rank = get_rank() == 0, get_rank()
|
| 398 |
+
torch.cuda.set_device(rank)
|
| 399 |
+
# config = get_config()
|
| 400 |
+
|
| 401 |
+
training_config = config["train"]
|
| 402 |
+
run_name = time.strftime("%Y%m%d-%H%M%S")
|
| 403 |
+
|
| 404 |
+
# Initialize WanDB
|
| 405 |
+
wandb_config = training_config.get("wandb", None)
|
| 406 |
+
if wandb_config is not None and is_main_process:
|
| 407 |
+
init_wandb(wandb_config, run_name)
|
| 408 |
+
|
| 409 |
+
print("Rank:", rank)
|
| 410 |
+
if is_main_process:
|
| 411 |
+
print("Config:", config)
|
| 412 |
+
|
| 413 |
+
# Initialize dataloader
|
| 414 |
+
print("Dataset length:", len(dataset))
|
| 415 |
+
train_loader = DataLoader(
|
| 416 |
+
dataset,
|
| 417 |
+
batch_size=training_config.get("batch_size", 1),
|
| 418 |
+
shuffle=True,
|
| 419 |
+
num_workers=training_config["dataloader_workers"],
|
| 420 |
+
)
|
| 421 |
+
|
| 422 |
+
# Callbacks for testing and saving checkpoints
|
| 423 |
+
if is_main_process:
|
| 424 |
+
callbacks = [TrainingCallback(run_name, training_config, test_function)]
|
| 425 |
+
|
| 426 |
+
# Initialize trainer
|
| 427 |
+
trainer = L.Trainer(
|
| 428 |
+
accumulate_grad_batches=training_config["accumulate_grad_batches"],
|
| 429 |
+
callbacks=callbacks if is_main_process else [],
|
| 430 |
+
enable_checkpointing=False,
|
| 431 |
+
enable_progress_bar=False,
|
| 432 |
+
logger=False,
|
| 433 |
+
max_steps=training_config.get("max_steps", -1),
|
| 434 |
+
max_epochs=training_config.get("max_epochs", -1),
|
| 435 |
+
gradient_clip_val=training_config.get("gradient_clip_val", 0.5),
|
| 436 |
+
)
|
| 437 |
+
|
| 438 |
+
setattr(trainer, "training_config", training_config)
|
| 439 |
+
setattr(trainable_model, "training_config", training_config)
|
| 440 |
+
|
| 441 |
+
# Save the training config
|
| 442 |
+
save_path = training_config.get("save_path", "./output")
|
| 443 |
+
if is_main_process:
|
| 444 |
+
os.makedirs(f"{save_path}/{run_name}")
|
| 445 |
+
with open(f"{save_path}/{run_name}/config.yaml", "w") as f:
|
| 446 |
+
yaml.dump(config, f)
|
| 447 |
+
|
| 448 |
+
# Start training
|
| 449 |
+
trainer.fit(trainable_model, train_loader)
|
train/README.md
ADDED
|
@@ -0,0 +1,253 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Training for FLUX
|
| 2 |
+
|
| 3 |
+
## Table of Contents
|
| 4 |
+
- [Training for FLUX](#training-for-flux)
|
| 5 |
+
- [Table of Contents](#table-of-contents)
|
| 6 |
+
- [Environment Setup](#environment-setup)
|
| 7 |
+
- [Dataset Preparation](#dataset-preparation)
|
| 8 |
+
- [Quick Start](#quick-start)
|
| 9 |
+
- [Basic Training](#basic-training)
|
| 10 |
+
- [Tasks from OminiControl](#tasks-from-ominicontrol)
|
| 11 |
+
- [Creating Your Own Task](#creating-your-own-task)
|
| 12 |
+
- [Training Configuration](#training-configuration)
|
| 13 |
+
- [Batch Size](#batch-size)
|
| 14 |
+
- [Optimizer](#optimizer)
|
| 15 |
+
- [LoRA Configuration](#lora-configuration)
|
| 16 |
+
- [Trainable Modules](#trainable-modules)
|
| 17 |
+
- [Advanced Training](#advanced-training)
|
| 18 |
+
- [Multi-condition](#multi-condition)
|
| 19 |
+
- [Efficient Generation (OminiControl2)](#efficient-generation-ominicontrol2)
|
| 20 |
+
- [Feature Reuse (KV-Cache)](#feature-reuse-kv-cache)
|
| 21 |
+
- [Compact Encoding Representation](#compact-encoding-representation)
|
| 22 |
+
- [Token Integration (for Fill task)](#token-integration-for-fill-task)
|
| 23 |
+
- [Citation](#citation)
|
| 24 |
+
|
| 25 |
+
## Environment Setup
|
| 26 |
+
|
| 27 |
+
1. Create and activate a new conda environment:
|
| 28 |
+
```bash
|
| 29 |
+
conda create -n omini python=3.10
|
| 30 |
+
conda activate omini
|
| 31 |
+
```
|
| 32 |
+
|
| 33 |
+
2. Install required packages:
|
| 34 |
+
```bash
|
| 35 |
+
pip install -r requirements.txt
|
| 36 |
+
```
|
| 37 |
+
|
| 38 |
+
## Dataset Preparation
|
| 39 |
+
|
| 40 |
+
1. Download [Subject200K](https://huggingface.co/datasets/Yuanshi/Subjects200K) dataset for subject-driven generation:
|
| 41 |
+
```bash
|
| 42 |
+
bash train/script/data_download/data_download1.sh
|
| 43 |
+
```
|
| 44 |
+
|
| 45 |
+
2. Download [text-to-image-2M](https://huggingface.co/datasets/jackyhate/text-to-image-2M) dataset for spatial alignment control tasks:
|
| 46 |
+
```bash
|
| 47 |
+
bash train/script/data_download/data_download2.sh
|
| 48 |
+
```
|
| 49 |
+
|
| 50 |
+
**Note:** By default, only a few files will be downloaded. You can edit `data_download2.sh` to download more data, and update the config file accordingly.
|
| 51 |
+
|
| 52 |
+
## Quick Start
|
| 53 |
+
|
| 54 |
+
Use these scripts to start training immediately:
|
| 55 |
+
|
| 56 |
+
1. **Subject-driven generation**:
|
| 57 |
+
```bash
|
| 58 |
+
bash train/script/train_subject.sh
|
| 59 |
+
```
|
| 60 |
+
|
| 61 |
+
2. **Spatial control tasks** (Canny-to-image, colorization, depth map, etc.):
|
| 62 |
+
```bash
|
| 63 |
+
bash train/script/train_spatial_alignment.sh
|
| 64 |
+
```
|
| 65 |
+
|
| 66 |
+
3. **Multi-condition training**:
|
| 67 |
+
```bash
|
| 68 |
+
bash train/script/train_multi_condition.sh
|
| 69 |
+
```
|
| 70 |
+
|
| 71 |
+
4. **Feature reuse** (OminiControl2):
|
| 72 |
+
```bash
|
| 73 |
+
bash train/script/train_feature_reuse.sh
|
| 74 |
+
```
|
| 75 |
+
|
| 76 |
+
5. **Compact token representation** (OminiControl2):
|
| 77 |
+
```bash
|
| 78 |
+
bash train/script/train_compact_token_representation.sh
|
| 79 |
+
```
|
| 80 |
+
|
| 81 |
+
6. **Token integration** (OminiControl2):
|
| 82 |
+
```bash
|
| 83 |
+
bash train/script/train_token_intergration.sh
|
| 84 |
+
```
|
| 85 |
+
|
| 86 |
+
## Basic Training
|
| 87 |
+
|
| 88 |
+
### Tasks from OminiControl
|
| 89 |
+
<a href="https://arxiv.org/abs/2411.15098"><img src="https://img.shields.io/badge/ariXv-2411.15098-A42C25.svg" alt="arXiv"></a>
|
| 90 |
+
|
| 91 |
+
1. Subject-driven generation:
|
| 92 |
+
```bash
|
| 93 |
+
bash train/script/train_subject.sh
|
| 94 |
+
```
|
| 95 |
+
|
| 96 |
+
2. Spatial control tasks (using canny-to-image as example):
|
| 97 |
+
```bash
|
| 98 |
+
bash train/script/train_spatial_alignment.sh
|
| 99 |
+
```
|
| 100 |
+
|
| 101 |
+
<details>
|
| 102 |
+
<summary>Supported tasks</summary>
|
| 103 |
+
|
| 104 |
+
* Canny edge to image (`canny`)
|
| 105 |
+
* Image colorization (`coloring`)
|
| 106 |
+
* Image deblurring (`deblurring`)
|
| 107 |
+
* Depth map to image (`depth`)
|
| 108 |
+
* Image to depth map (`depth_pred`)
|
| 109 |
+
* Image inpainting (`fill`)
|
| 110 |
+
* Super resolution (`sr`)
|
| 111 |
+
|
| 112 |
+
🌟 Change the `condition_type` parameter in the config file to switch between tasks.
|
| 113 |
+
</details>
|
| 114 |
+
|
| 115 |
+
**Note**: Check the **script files** (`train/script/`) and **config files** (`train/configs/`) for WanDB and GPU settings.
|
| 116 |
+
|
| 117 |
+
### Creating Your Own Task
|
| 118 |
+
|
| 119 |
+
You can create a custom task by building a new dataset and modifying the test code:
|
| 120 |
+
|
| 121 |
+
1. **Create a custom dataset:**
|
| 122 |
+
Your custom dataset should follow the format of `Subject200KDataset` in `omini/train_flux/train_subject.py`. Each sample should contain:
|
| 123 |
+
|
| 124 |
+
- Image: the target image (`image`)
|
| 125 |
+
- Text: description of the image (`description`)
|
| 126 |
+
- Conditions: image conditions for generation
|
| 127 |
+
- Position delta:
|
| 128 |
+
- Use `position_delta = (0, 0)` to align the condition with the generated image
|
| 129 |
+
- Use `position_delta = (0, -a)` to separate them (a = condition width / 16)
|
| 130 |
+
|
| 131 |
+
> **Explanation:**
|
| 132 |
+
> The model places both the condition and generated image in a shared coordinate system. `position_delta` shifts the condition image in this space.
|
| 133 |
+
>
|
| 134 |
+
> Each unit equals one patch (16 pixels). For a 512px-wide condition image (32 patches), `position_delta = (0, -32)` moves it fully to the left.
|
| 135 |
+
>
|
| 136 |
+
> This controls whether conditions and generated images share space or appear side-by-side.
|
| 137 |
+
|
| 138 |
+
2. **Modify the test code:**
|
| 139 |
+
Define `test_function()` in `train_custom.py`. Refer to the function in `train_subject.py` for examples. Make sure to keep the `position_delta` parameter consistent with your dataset.
|
| 140 |
+
|
| 141 |
+
### Training Configuration
|
| 142 |
+
|
| 143 |
+
#### Batch Size
|
| 144 |
+
We recommend a batch size of 1 for stable training. And you can set `accumulate_grad_batches` to n to simulate a batch size of n.
|
| 145 |
+
|
| 146 |
+
#### Optimizer
|
| 147 |
+
The default optimizer is `Prodigy`. To use `AdamW` instead, modify the config file:
|
| 148 |
+
```yaml
|
| 149 |
+
optimizer:
|
| 150 |
+
type: AdamW
|
| 151 |
+
lr: 1e-4
|
| 152 |
+
weight_decay: 0.001
|
| 153 |
+
```
|
| 154 |
+
|
| 155 |
+
#### LoRA Configuration
|
| 156 |
+
Default LoRA rank is 4. Increase it for complex tasks (keep `r` and `lora_alpha` parameters the same):
|
| 157 |
+
```yaml
|
| 158 |
+
lora_config:
|
| 159 |
+
r: 128
|
| 160 |
+
lora_alpha: 128
|
| 161 |
+
```
|
| 162 |
+
|
| 163 |
+
#### Trainable Modules
|
| 164 |
+
The `target_modules` parameter uses regex patterns to specify which modules to train. See [PEFT Documentation](https://huggingface.co/docs/peft/package_reference/lora) for details.
|
| 165 |
+
|
| 166 |
+
Default configuration trains all modules affecting image tokens:
|
| 167 |
+
```yaml
|
| 168 |
+
target_modules: "(.*x_embedder|.*(?<!single_)transformer_blocks\\.[0-9]+\\.norm1\\.linear|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_k|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_q|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_v|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_out\\.0|.*(?<!single_)transformer_blocks\\.[0-9]+\\.ff\\.net\\.2|.*single_transformer_blocks\\.[0-9]+\\.norm\\.linear|.*single_transformer_blocks\\.[0-9]+\\.proj_mlp|.*single_transformer_blocks\\.[0-9]+\\.proj_out|.*single_transformer_blocks\\.[0-9]+\\.attn.to_k|.*single_transformer_blocks\\.[0-9]+\\.attn.to_q|.*single_transformer_blocks\\.[0-9]+\\.attn.to_v|.*single_transformer_blocks\\.[0-9]+\\.attn.to_out)"
|
| 169 |
+
```
|
| 170 |
+
|
| 171 |
+
To train only attention components (`to_q`, `to_k`, `to_v`), use:
|
| 172 |
+
```yaml
|
| 173 |
+
target_modules: "(.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_k|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_q|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_v|.*single_transformer_blocks\\.[0-9]+\\.attn.to_k|.*single_transformer_blocks\\.[0-9]+\\.attn.to_q|.*single_transformer_blocks\\.[0-9]+\\.attn.to_v)"
|
| 174 |
+
```
|
| 175 |
+
|
| 176 |
+
## Advanced Training
|
| 177 |
+
|
| 178 |
+
### Multi-condition
|
| 179 |
+
A basic multi-condition implementation is available in `train_multi_condition.py`:
|
| 180 |
+
```bash
|
| 181 |
+
bash train/script/train_multi_condition.sh
|
| 182 |
+
```
|
| 183 |
+
|
| 184 |
+
### Efficient Generation (OminiControl2)
|
| 185 |
+
<a href="https://arxiv.org/abs/2503.08280"><img src="https://img.shields.io/badge/ariXv-2503.08280-A42C25.svg" alt="arXiv"></a>
|
| 186 |
+
|
| 187 |
+
[OminiControl2](https://arxiv.org/abs/2503.08280) introduces techniques to improve generation efficiency:
|
| 188 |
+
|
| 189 |
+
#### Feature Reuse (KV-Cache)
|
| 190 |
+
1. Enable `independent_condition` in the config file during training:
|
| 191 |
+
```yaml
|
| 192 |
+
model:
|
| 193 |
+
independent_condition: true
|
| 194 |
+
```
|
| 195 |
+
|
| 196 |
+
2. During inference, set `kv_cache = True` in the `generate` function to speed up generation.
|
| 197 |
+
|
| 198 |
+
*Example:*
|
| 199 |
+
```bash
|
| 200 |
+
bash train/script/train_feature_reuse.sh
|
| 201 |
+
```
|
| 202 |
+
|
| 203 |
+
**Note:** Feature reuse speeds up generation but may slightly reduce performance and increase training time.
|
| 204 |
+
|
| 205 |
+
#### Compact Encoding Representation
|
| 206 |
+
Reduce the condition image resolution and use `position_scale` to align it with the output image:
|
| 207 |
+
|
| 208 |
+
```diff
|
| 209 |
+
train:
|
| 210 |
+
dataset:
|
| 211 |
+
condition_size:
|
| 212 |
+
- - 512
|
| 213 |
+
- - 512
|
| 214 |
+
+ - 256
|
| 215 |
+
+ - 256
|
| 216 |
+
+ position_scale: 2
|
| 217 |
+
target_size:
|
| 218 |
+
- 512
|
| 219 |
+
- 512
|
| 220 |
+
```
|
| 221 |
+
|
| 222 |
+
*Example:*
|
| 223 |
+
```bash
|
| 224 |
+
bash train/script/train_compact_token_representation.sh
|
| 225 |
+
```
|
| 226 |
+
|
| 227 |
+
#### Token Integration (for Fill task)
|
| 228 |
+
Further reduce tokens by merging condition and generation tokens into a unified sequence. (Refer to [the paper](https://arxiv.org/abs/2503.08280) for details.)
|
| 229 |
+
|
| 230 |
+
*Example:*
|
| 231 |
+
```bash
|
| 232 |
+
bash train/script/train_token_intergration.sh
|
| 233 |
+
```
|
| 234 |
+
|
| 235 |
+
## Citation
|
| 236 |
+
|
| 237 |
+
If you find this code useful, please cite our papers:
|
| 238 |
+
|
| 239 |
+
```
|
| 240 |
+
@article{tan2024ominicontrol,
|
| 241 |
+
title={OminiControl: Minimal and Universal Control for Diffusion Transformer},
|
| 242 |
+
author={Tan, Zhenxiong and Liu, Songhua and Yang, Xingyi and Xue, Qiaochu and Wang, Xinchao},
|
| 243 |
+
journal={arXiv preprint arXiv:2411.15098},
|
| 244 |
+
year={2024}
|
| 245 |
+
}
|
| 246 |
+
|
| 247 |
+
@article{tan2025ominicontrol2,
|
| 248 |
+
title={OminiControl2: Efficient Conditioning for Diffusion Transformers},
|
| 249 |
+
author={Tan, Zhenxiong and Xue, Qiaochu and Yang, Xingyi and Liu, Songhua and Wang, Xinchao},
|
| 250 |
+
journal={arXiv preprint arXiv:2503.08280},
|
| 251 |
+
year={2025}
|
| 252 |
+
}
|
| 253 |
+
```
|
train/config/compact_token_representation.yaml
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
flux_path: "black-forest-labs/FLUX.1-dev"
|
| 2 |
+
dtype: "bfloat16"
|
| 3 |
+
|
| 4 |
+
model:
|
| 5 |
+
independent_condition: false
|
| 6 |
+
|
| 7 |
+
train:
|
| 8 |
+
accumulate_grad_batches: 1
|
| 9 |
+
dataloader_workers: 5
|
| 10 |
+
save_interval: 1000
|
| 11 |
+
sample_interval: 100
|
| 12 |
+
max_steps: -1
|
| 13 |
+
gradient_checkpointing: true # (Turn off for faster training)
|
| 14 |
+
save_path: "runs"
|
| 15 |
+
|
| 16 |
+
# Specify the type of condition to use.
|
| 17 |
+
# Options: ["canny", "coloring", "deblurring", "depth", "depth_pred", "fill"]
|
| 18 |
+
condition_type: "canny"
|
| 19 |
+
dataset:
|
| 20 |
+
type: "img"
|
| 21 |
+
urls:
|
| 22 |
+
# (Uncomment the following lines to use more data)
|
| 23 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000040.tar"
|
| 24 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000041.tar"
|
| 25 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000042.tar"
|
| 26 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000043.tar"
|
| 27 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000044.tar"
|
| 28 |
+
- "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000045.tar"
|
| 29 |
+
- "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000046.tar"
|
| 30 |
+
cache_name: "data_512_2M"
|
| 31 |
+
condition_size:
|
| 32 |
+
- 256
|
| 33 |
+
- 256
|
| 34 |
+
position_scale: 2.0
|
| 35 |
+
target_size:
|
| 36 |
+
- 512
|
| 37 |
+
- 512
|
| 38 |
+
drop_text_prob: 0.1
|
| 39 |
+
drop_image_prob: 0.1
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
wandb:
|
| 43 |
+
project: "OminiControl"
|
| 44 |
+
|
| 45 |
+
lora_config:
|
| 46 |
+
r: 4
|
| 47 |
+
lora_alpha: 4
|
| 48 |
+
init_lora_weights: "gaussian"
|
| 49 |
+
target_modules: "(.*x_embedder|.*(?<!single_)transformer_blocks\\.[0-9]+\\.norm1\\.linear|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_k|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_q|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_v|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_out\\.0|.*(?<!single_)transformer_blocks\\.[0-9]+\\.ff\\.net\\.2|.*single_transformer_blocks\\.[0-9]+\\.norm\\.linear|.*single_transformer_blocks\\.[0-9]+\\.proj_mlp|.*single_transformer_blocks\\.[0-9]+\\.proj_out|.*single_transformer_blocks\\.[0-9]+\\.attn.to_k|.*single_transformer_blocks\\.[0-9]+\\.attn.to_q|.*single_transformer_blocks\\.[0-9]+\\.attn.to_v|.*single_transformer_blocks\\.[0-9]+\\.attn.to_out)"
|
| 50 |
+
# (Uncomment the following lines to train less parameters while keeping the similar performance)
|
| 51 |
+
# target_modules: "(.*(?<!single_)transformer_blocks\\.[0-9]+\\.norm1\\.linear|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_k|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_q|.*single_transformer_blocks\\.[0-9]+\\.norm\\.linear|.*single_transformer_blocks\\.[0-9]+\\.attn.to_k|.*single_transformer_blocks\\.[0-9]+\\.attn.to_q)"
|
| 52 |
+
|
| 53 |
+
optimizer:
|
| 54 |
+
type: "Prodigy"
|
| 55 |
+
params:
|
| 56 |
+
lr: 1
|
| 57 |
+
use_bias_correction: true
|
| 58 |
+
safeguard_warmup: true
|
| 59 |
+
weight_decay: 0.01
|
| 60 |
+
|
| 61 |
+
# (To use AdamW Optimizer, uncomment the following lines)
|
| 62 |
+
# optimizer:
|
| 63 |
+
# type: AdamW
|
| 64 |
+
# lr: 1e-4
|
| 65 |
+
# weight_decay: 0.001
|
train/config/feature_reuse.yaml
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
flux_path: "black-forest-labs/FLUX.1-dev"
|
| 2 |
+
dtype: "bfloat16"
|
| 3 |
+
|
| 4 |
+
model:
|
| 5 |
+
independent_condition: true
|
| 6 |
+
|
| 7 |
+
train:
|
| 8 |
+
accumulate_grad_batches: 1
|
| 9 |
+
dataloader_workers: 5
|
| 10 |
+
save_interval: 1000
|
| 11 |
+
sample_interval: 100
|
| 12 |
+
max_steps: -1
|
| 13 |
+
gradient_checkpointing: true # (Turn off for faster training)
|
| 14 |
+
save_path: "runs"
|
| 15 |
+
|
| 16 |
+
# Specify the type of condition to use.
|
| 17 |
+
# Options: ["canny", "coloring", "deblurring", "depth", "depth_pred", "fill"]
|
| 18 |
+
condition_type: "canny"
|
| 19 |
+
dataset:
|
| 20 |
+
type: "img"
|
| 21 |
+
urls:
|
| 22 |
+
# (Uncomment the following lines to use more data)
|
| 23 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000040.tar"
|
| 24 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000041.tar"
|
| 25 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000042.tar"
|
| 26 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000043.tar"
|
| 27 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000044.tar"
|
| 28 |
+
- "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000045.tar"
|
| 29 |
+
- "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000046.tar"
|
| 30 |
+
cache_name: "data_512_2M"
|
| 31 |
+
condition_size:
|
| 32 |
+
- 512
|
| 33 |
+
- 512
|
| 34 |
+
target_size:
|
| 35 |
+
- 512
|
| 36 |
+
- 512
|
| 37 |
+
drop_text_prob: 0.1
|
| 38 |
+
drop_image_prob: 0.1
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
wandb:
|
| 42 |
+
project: "OminiControl"
|
| 43 |
+
|
| 44 |
+
lora_config:
|
| 45 |
+
r: 4
|
| 46 |
+
lora_alpha: 4
|
| 47 |
+
init_lora_weights: "gaussian"
|
| 48 |
+
target_modules: "(.*x_embedder|.*(?<!single_)transformer_blocks\\.[0-9]+\\.norm1\\.linear|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_k|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_q|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_v|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_out\\.0|.*(?<!single_)transformer_blocks\\.[0-9]+\\.ff\\.net\\.2|.*single_transformer_blocks\\.[0-9]+\\.norm\\.linear|.*single_transformer_blocks\\.[0-9]+\\.proj_mlp|.*single_transformer_blocks\\.[0-9]+\\.proj_out|.*single_transformer_blocks\\.[0-9]+\\.attn.to_k|.*single_transformer_blocks\\.[0-9]+\\.attn.to_q|.*single_transformer_blocks\\.[0-9]+\\.attn.to_v|.*single_transformer_blocks\\.[0-9]+\\.attn.to_out)"
|
| 49 |
+
# (Uncomment the following lines to train less parameters while keeping the similar performance)
|
| 50 |
+
# target_modules: "(.*(?<!single_)transformer_blocks\\.[0-9]+\\.norm1\\.linear|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_k|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_q|.*single_transformer_blocks\\.[0-9]+\\.norm\\.linear|.*single_transformer_blocks\\.[0-9]+\\.attn.to_k|.*single_transformer_blocks\\.[0-9]+\\.attn.to_q)"
|
| 51 |
+
|
| 52 |
+
optimizer:
|
| 53 |
+
type: "Prodigy"
|
| 54 |
+
params:
|
| 55 |
+
lr: 1
|
| 56 |
+
use_bias_correction: true
|
| 57 |
+
safeguard_warmup: true
|
| 58 |
+
weight_decay: 0.01
|
| 59 |
+
|
| 60 |
+
# (To use AdamW Optimizer, uncomment the following lines)
|
| 61 |
+
# optimizer:
|
| 62 |
+
# type: AdamW
|
| 63 |
+
# lr: 1e-4
|
| 64 |
+
# weight_decay: 0.001
|
train/config/multi_condition.yaml
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
flux_path: "black-forest-labs/FLUX.1-dev"
|
| 2 |
+
dtype: "bfloat16"
|
| 3 |
+
|
| 4 |
+
model:
|
| 5 |
+
independent_condition: false
|
| 6 |
+
|
| 7 |
+
train:
|
| 8 |
+
accumulate_grad_batches: 1
|
| 9 |
+
dataloader_workers: 5
|
| 10 |
+
save_interval: 1000
|
| 11 |
+
sample_interval: 100
|
| 12 |
+
max_steps: -1
|
| 13 |
+
gradient_checkpointing: true # (Turn off for faster training)
|
| 14 |
+
save_path: "runs"
|
| 15 |
+
|
| 16 |
+
# Specify the type of condition to use.
|
| 17 |
+
# Options: ["canny", "coloring", "deblurring", "depth", "depth_pred", "fill"]
|
| 18 |
+
condition_type:
|
| 19 |
+
- "canny"
|
| 20 |
+
- "deblurring"
|
| 21 |
+
- "depth"
|
| 22 |
+
- "fill"
|
| 23 |
+
dataset:
|
| 24 |
+
type: "img"
|
| 25 |
+
urls:
|
| 26 |
+
# (Uncomment the following lines to use more data)
|
| 27 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000040.tar"
|
| 28 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000041.tar"
|
| 29 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000042.tar"
|
| 30 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000043.tar"
|
| 31 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000044.tar"
|
| 32 |
+
- "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000045.tar"
|
| 33 |
+
- "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000046.tar"
|
| 34 |
+
cache_name: "data_512_2M"
|
| 35 |
+
condition_size:
|
| 36 |
+
- 512
|
| 37 |
+
- 512
|
| 38 |
+
target_size:
|
| 39 |
+
- 512
|
| 40 |
+
- 512
|
| 41 |
+
drop_text_prob: 0.1
|
| 42 |
+
drop_image_prob: 0.1
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
wandb:
|
| 46 |
+
project: "OminiControl"
|
| 47 |
+
|
| 48 |
+
lora_config:
|
| 49 |
+
r: 4
|
| 50 |
+
lora_alpha: 4
|
| 51 |
+
init_lora_weights: "gaussian"
|
| 52 |
+
target_modules: "(.*x_embedder|.*(?<!single_)transformer_blocks\\.[0-9]+\\.norm1\\.linear|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_k|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_q|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_v|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_out\\.0|.*(?<!single_)transformer_blocks\\.[0-9]+\\.ff\\.net\\.2|.*single_transformer_blocks\\.[0-9]+\\.norm\\.linear|.*single_transformer_blocks\\.[0-9]+\\.proj_mlp|.*single_transformer_blocks\\.[0-9]+\\.proj_out|.*single_transformer_blocks\\.[0-9]+\\.attn.to_k|.*single_transformer_blocks\\.[0-9]+\\.attn.to_q|.*single_transformer_blocks\\.[0-9]+\\.attn.to_v|.*single_transformer_blocks\\.[0-9]+\\.attn.to_out)"
|
| 53 |
+
# (Uncomment the following lines to train less parameters while keeping the similar performance)
|
| 54 |
+
# target_modules: "(.*(?<!single_)transformer_blocks\\.[0-9]+\\.norm1\\.linear|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_k|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_q|.*single_transformer_blocks\\.[0-9]+\\.norm\\.linear|.*single_transformer_blocks\\.[0-9]+\\.attn.to_k|.*single_transformer_blocks\\.[0-9]+\\.attn.to_q)"
|
| 55 |
+
|
| 56 |
+
optimizer:
|
| 57 |
+
type: "Prodigy"
|
| 58 |
+
params:
|
| 59 |
+
lr: 1
|
| 60 |
+
use_bias_correction: true
|
| 61 |
+
safeguard_warmup: true
|
| 62 |
+
weight_decay: 0.01
|
| 63 |
+
|
| 64 |
+
# (To use AdamW Optimizer, uncomment the following lines)
|
| 65 |
+
# optimizer:
|
| 66 |
+
# type: AdamW
|
| 67 |
+
# lr: 1e-4
|
| 68 |
+
# weight_decay: 0.001
|
train/config/spatial_alignment.yaml
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
flux_path: "black-forest-labs/FLUX.1-dev"
|
| 2 |
+
dtype: "bfloat16"
|
| 3 |
+
|
| 4 |
+
model:
|
| 5 |
+
independent_condition: false
|
| 6 |
+
|
| 7 |
+
train:
|
| 8 |
+
accumulate_grad_batches: 1
|
| 9 |
+
dataloader_workers: 5
|
| 10 |
+
save_interval: 1000
|
| 11 |
+
sample_interval: 100
|
| 12 |
+
max_steps: -1
|
| 13 |
+
gradient_checkpointing: true # (Turn off for faster training)
|
| 14 |
+
save_path: "runs"
|
| 15 |
+
|
| 16 |
+
# Specify the type of condition to use.
|
| 17 |
+
# Options: ["canny", "coloring", "deblurring", "depth", "depth_pred", "fill"]
|
| 18 |
+
condition_type: "canny"
|
| 19 |
+
dataset:
|
| 20 |
+
type: "img"
|
| 21 |
+
urls:
|
| 22 |
+
# (Uncomment the following lines to use more data)
|
| 23 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000040.tar"
|
| 24 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000041.tar"
|
| 25 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000042.tar"
|
| 26 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000043.tar"
|
| 27 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000044.tar"
|
| 28 |
+
- "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000045.tar"
|
| 29 |
+
- "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000046.tar"
|
| 30 |
+
cache_name: "data_512_2M"
|
| 31 |
+
condition_size:
|
| 32 |
+
- 512
|
| 33 |
+
- 512
|
| 34 |
+
target_size:
|
| 35 |
+
- 512
|
| 36 |
+
- 512
|
| 37 |
+
drop_text_prob: 0.1
|
| 38 |
+
drop_image_prob: 0.1
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
wandb:
|
| 42 |
+
project: "OminiControl"
|
| 43 |
+
|
| 44 |
+
lora_config:
|
| 45 |
+
r: 4
|
| 46 |
+
lora_alpha: 4
|
| 47 |
+
init_lora_weights: "gaussian"
|
| 48 |
+
target_modules: "(.*x_embedder|.*(?<!single_)transformer_blocks\\.[0-9]+\\.norm1\\.linear|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_k|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_q|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_v|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_out\\.0|.*(?<!single_)transformer_blocks\\.[0-9]+\\.ff\\.net\\.2|.*single_transformer_blocks\\.[0-9]+\\.norm\\.linear|.*single_transformer_blocks\\.[0-9]+\\.proj_mlp|.*single_transformer_blocks\\.[0-9]+\\.proj_out|.*single_transformer_blocks\\.[0-9]+\\.attn.to_k|.*single_transformer_blocks\\.[0-9]+\\.attn.to_q|.*single_transformer_blocks\\.[0-9]+\\.attn.to_v|.*single_transformer_blocks\\.[0-9]+\\.attn.to_out)"
|
| 49 |
+
# (Uncomment the following lines to train less parameters while keeping the similar performance)
|
| 50 |
+
# target_modules: "(.*(?<!single_)transformer_blocks\\.[0-9]+\\.norm1\\.linear|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_k|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_q|.*single_transformer_blocks\\.[0-9]+\\.norm\\.linear|.*single_transformer_blocks\\.[0-9]+\\.attn.to_k|.*single_transformer_blocks\\.[0-9]+\\.attn.to_q)"
|
| 51 |
+
|
| 52 |
+
optimizer:
|
| 53 |
+
type: "Prodigy"
|
| 54 |
+
params:
|
| 55 |
+
lr: 1
|
| 56 |
+
use_bias_correction: true
|
| 57 |
+
safeguard_warmup: true
|
| 58 |
+
weight_decay: 0.01
|
| 59 |
+
|
| 60 |
+
# (To use AdamW Optimizer, uncomment the following lines)
|
| 61 |
+
# optimizer:
|
| 62 |
+
# type: AdamW
|
| 63 |
+
# lr: 1e-4
|
| 64 |
+
# weight_decay: 0.001
|
train/config/spatial_alignment_rotation.yaml
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
flux_path: "black-forest-labs/FLUX.1-dev"
|
| 2 |
+
dtype: "bfloat16"
|
| 3 |
+
|
| 4 |
+
model:
|
| 5 |
+
independent_condition: false
|
| 6 |
+
|
| 7 |
+
train:
|
| 8 |
+
accumulate_grad_batches: 1
|
| 9 |
+
dataloader_workers: 5
|
| 10 |
+
save_interval: 1000
|
| 11 |
+
sample_interval: 100
|
| 12 |
+
max_steps: -1
|
| 13 |
+
gradient_checkpointing: false # (Turn off for faster training)
|
| 14 |
+
save_path: "runs"
|
| 15 |
+
|
| 16 |
+
# Specify the type of condition to use.
|
| 17 |
+
# Options: ["canny", "coloring", "deblurring", "depth", "depth_pred", "fill"]
|
| 18 |
+
condition_type: "canny"
|
| 19 |
+
dataset:
|
| 20 |
+
type: "img"
|
| 21 |
+
urls:
|
| 22 |
+
# (Uncomment the following lines to use more data)
|
| 23 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000040.tar"
|
| 24 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000041.tar"
|
| 25 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000042.tar"
|
| 26 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000043.tar"
|
| 27 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000044.tar"
|
| 28 |
+
- "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000045.tar"
|
| 29 |
+
- "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000046.tar"
|
| 30 |
+
cache_name: "data_512_2M"
|
| 31 |
+
condition_size:
|
| 32 |
+
- 512
|
| 33 |
+
- 512
|
| 34 |
+
target_size:
|
| 35 |
+
- 512
|
| 36 |
+
- 512
|
| 37 |
+
drop_text_prob: 0.1
|
| 38 |
+
drop_image_prob: 0.1
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
wandb:
|
| 42 |
+
project: "OminiControlRotation"
|
| 43 |
+
|
| 44 |
+
rotation_adapter_config:
|
| 45 |
+
r: 4
|
| 46 |
+
num_rotations: 4
|
| 47 |
+
target_modules: "(.*x_embedder|.*(?<!single_)transformer_blocks\\.[0-9]+\\.norm1\\.linear|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_k|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_q|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_v|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_out\\.0|.*(?<!single_)transformer_blocks\\.[0-9]+\\.ff\\.net\\.2|.*single_transformer_blocks\\.[0-9]+\\.norm\\.linear|.*single_transformer_blocks\\.[0-9]+\\.proj_mlp|.*single_transformer_blocks\\.[0-9]+\\.proj_out|.*single_transformer_blocks\\.[0-9]+\\.attn.to_k|.*single_transformer_blocks\\.[0-9]+\\.attn.to_q|.*single_transformer_blocks\\.[0-9]+\\.attn.to_v|.*single_transformer_blocks\\.[0-9]+\\.attn.to_out)"
|
| 48 |
+
# (Uncomment the following lines to train less parameters while keeping the similar performance)
|
| 49 |
+
# target_modules: "(.*(?<!single_)transformer_blocks\\.[0-9]+\\.norm1\\.linear|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_k|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_q|.*single_transformer_blocks\\.[0-9]+\\.norm\\.linear|.*single_transformer_blocks\\.[0-9]+\\.attn.to_k|.*single_transformer_blocks\\.[0-9]+\\.attn.to_q)"
|
| 50 |
+
|
| 51 |
+
optimizer:
|
| 52 |
+
type: "Prodigy"
|
| 53 |
+
params:
|
| 54 |
+
lr: 1
|
| 55 |
+
use_bias_correction: true
|
| 56 |
+
safeguard_warmup: true
|
| 57 |
+
weight_decay: 0.01
|
| 58 |
+
|
| 59 |
+
# (To use AdamW Optimizer, uncomment the following lines)
|
| 60 |
+
# optimizer:
|
| 61 |
+
# type: AdamW
|
| 62 |
+
# lr: 1e-4
|
| 63 |
+
# weight_decay: 0.001
|
train/config/subject.yaml
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
flux_path: "black-forest-labs/FLUX.1-dev"
|
| 2 |
+
dtype: "bfloat16"
|
| 3 |
+
|
| 4 |
+
model:
|
| 5 |
+
independent_condition: false
|
| 6 |
+
|
| 7 |
+
train:
|
| 8 |
+
accumulate_grad_batches: 1
|
| 9 |
+
dataloader_workers: 5
|
| 10 |
+
save_interval: 1000
|
| 11 |
+
sample_interval: 100
|
| 12 |
+
max_steps: -1
|
| 13 |
+
gradient_checkpointing: true # (Turn off for faster training)
|
| 14 |
+
save_path: "runs"
|
| 15 |
+
|
| 16 |
+
# Specify the type of condition to use.
|
| 17 |
+
condition_type: "subject"
|
| 18 |
+
dataset:
|
| 19 |
+
type: "subject"
|
| 20 |
+
condition_size:
|
| 21 |
+
- 512
|
| 22 |
+
- 512
|
| 23 |
+
target_size:
|
| 24 |
+
- 512
|
| 25 |
+
- 512
|
| 26 |
+
image_size: 512
|
| 27 |
+
padding: 8
|
| 28 |
+
drop_text_prob: 0.1
|
| 29 |
+
drop_image_prob: 0.1
|
| 30 |
+
|
| 31 |
+
wandb:
|
| 32 |
+
project: "OminiControl"
|
| 33 |
+
|
| 34 |
+
lora_config:
|
| 35 |
+
r: 16
|
| 36 |
+
lora_alpha: 16
|
| 37 |
+
init_lora_weights: "gaussian"
|
| 38 |
+
target_modules: "(.*x_embedder|.*(?<!single_)transformer_blocks\\.[0-9]+\\.norm1\\.linear|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_k|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_q|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_v|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_out\\.0|.*(?<!single_)transformer_blocks\\.[0-9]+\\.ff\\.net\\.2|.*single_transformer_blocks\\.[0-9]+\\.norm\\.linear|.*single_transformer_blocks\\.[0-9]+\\.proj_mlp|.*single_transformer_blocks\\.[0-9]+\\.proj_out|.*single_transformer_blocks\\.[0-9]+\\.attn.to_k|.*single_transformer_blocks\\.[0-9]+\\.attn.to_q|.*single_transformer_blocks\\.[0-9]+\\.attn.to_v|.*single_transformer_blocks\\.[0-9]+\\.attn.to_out)"
|
| 39 |
+
# (Uncomment the following lines to train less parameters while keeping the similar performance)
|
| 40 |
+
# target_modules: "(.*(?<!single_)transformer_blocks\\.[0-9]+\\.norm1\\.linear|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_k|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_q|.*single_transformer_blocks\\.[0-9]+\\.norm\\.linear|.*single_transformer_blocks\\.[0-9]+\\.attn.to_k|.*single_transformer_blocks\\.[0-9]+\\.attn.to_q)"
|
| 41 |
+
|
| 42 |
+
optimizer:
|
| 43 |
+
type: "Prodigy"
|
| 44 |
+
params:
|
| 45 |
+
lr: 1
|
| 46 |
+
use_bias_correction: true
|
| 47 |
+
safeguard_warmup: true
|
| 48 |
+
weight_decay: 0.01
|
| 49 |
+
|
| 50 |
+
# (To use AdamW Optimizer, uncomment the following lines)
|
| 51 |
+
# optimizer:
|
| 52 |
+
# type: AdamW
|
| 53 |
+
# lr: 1e-4
|
| 54 |
+
# weight_decay: 0.001
|
train/config/subject_rotation.yaml
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
flux_path: "black-forest-labs/FLUX.1-dev"
|
| 2 |
+
dtype: "bfloat16"
|
| 3 |
+
|
| 4 |
+
model:
|
| 5 |
+
independent_condition: false
|
| 6 |
+
|
| 7 |
+
train:
|
| 8 |
+
accumulate_grad_batches: 1
|
| 9 |
+
dataloader_workers: 5
|
| 10 |
+
save_interval: 1000
|
| 11 |
+
sample_interval: 100
|
| 12 |
+
max_steps: -1
|
| 13 |
+
gradient_checkpointing: false # (Turn off for faster training)
|
| 14 |
+
save_path: "runs"
|
| 15 |
+
|
| 16 |
+
# Specify the type of condition to use.
|
| 17 |
+
condition_type: "subject"
|
| 18 |
+
dataset:
|
| 19 |
+
type: "subject"
|
| 20 |
+
condition_size:
|
| 21 |
+
- 512
|
| 22 |
+
- 512
|
| 23 |
+
target_size:
|
| 24 |
+
- 512
|
| 25 |
+
- 512
|
| 26 |
+
image_size: 512
|
| 27 |
+
padding: 8
|
| 28 |
+
drop_text_prob: 0.1
|
| 29 |
+
drop_image_prob: 0.1
|
| 30 |
+
|
| 31 |
+
wandb:
|
| 32 |
+
project: "OminiControRotation"
|
| 33 |
+
|
| 34 |
+
rotation_adapter_config:
|
| 35 |
+
r: 4
|
| 36 |
+
num_rotations: 4
|
| 37 |
+
target_modules: "(.*x_embedder|.*(?<!single_)transformer_blocks\\.[0-9]+\\.norm1\\.linear|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_k|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_q|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_v|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_out\\.0|.*(?<!single_)transformer_blocks\\.[0-9]+\\.ff\\.net\\.2|.*single_transformer_blocks\\.[0-9]+\\.norm\\.linear|.*single_transformer_blocks\\.[0-9]+\\.proj_mlp|.*single_transformer_blocks\\.[0-9]+\\.proj_out|.*single_transformer_blocks\\.[0-9]+\\.attn.to_k|.*single_transformer_blocks\\.[0-9]+\\.attn.to_q|.*single_transformer_blocks\\.[0-9]+\\.attn.to_v|.*single_transformer_blocks\\.[0-9]+\\.attn.to_out)"
|
| 38 |
+
# (Uncomment the following lines to train less parameters while keeping the similar performance)
|
| 39 |
+
# target_modules: "(.*(?<!single_)transformer_blocks\\.[0-9]+\\.norm1\\.linear|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_k|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_q|.*single_transformer_blocks\\.[0-9]+\\.norm\\.linear|.*single_transformer_blocks\\.[0-9]+\\.attn.to_k|.*single_transformer_blocks\\.[0-9]+\\.attn.to_q)"
|
| 40 |
+
|
| 41 |
+
optimizer:
|
| 42 |
+
type: "Prodigy"
|
| 43 |
+
params:
|
| 44 |
+
lr: 1
|
| 45 |
+
use_bias_correction: true
|
| 46 |
+
safeguard_warmup: true
|
| 47 |
+
weight_decay: 0.01
|
| 48 |
+
|
| 49 |
+
# (To use AdamW Optimizer, uncomment the following lines)
|
| 50 |
+
# optimizer:
|
| 51 |
+
# type: AdamW
|
| 52 |
+
# lr: 1e-4
|
| 53 |
+
# weight_decay: 0.001
|
train/config/token_integration.yaml
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
flux_path: "black-forest-labs/FLUX.1-dev"
|
| 2 |
+
dtype: "bfloat16"
|
| 3 |
+
|
| 4 |
+
model:
|
| 5 |
+
independent_condition: false
|
| 6 |
+
|
| 7 |
+
train:
|
| 8 |
+
accumulate_grad_batches: 1
|
| 9 |
+
dataloader_workers: 5
|
| 10 |
+
save_interval: 1000
|
| 11 |
+
sample_interval: 100
|
| 12 |
+
max_steps: -1
|
| 13 |
+
gradient_checkpointing: true # (Turn off for faster training)
|
| 14 |
+
save_path: "runs"
|
| 15 |
+
|
| 16 |
+
# Specify the type of condition to use.
|
| 17 |
+
condition_type: "token_intergration"
|
| 18 |
+
dataset:
|
| 19 |
+
type: "img"
|
| 20 |
+
urls:
|
| 21 |
+
# (Uncomment the following lines to use more data)
|
| 22 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000040.tar"
|
| 23 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000041.tar"
|
| 24 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000042.tar"
|
| 25 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000043.tar"
|
| 26 |
+
# - "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000044.tar"
|
| 27 |
+
- "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000045.tar"
|
| 28 |
+
- "https://huggingface.co/datasets/jackyhate/text-to-image-2M/resolve/main/data_512_2M/data_000046.tar"
|
| 29 |
+
cache_name: "data_512_2M"
|
| 30 |
+
condition_size:
|
| 31 |
+
- 512
|
| 32 |
+
- 512
|
| 33 |
+
target_size:
|
| 34 |
+
- 512
|
| 35 |
+
- 512
|
| 36 |
+
drop_text_prob: 0.1
|
| 37 |
+
drop_image_prob: 0.1
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
wandb:
|
| 41 |
+
project: "OminiControl"
|
| 42 |
+
|
| 43 |
+
lora_config:
|
| 44 |
+
r: 4
|
| 45 |
+
lora_alpha: 4
|
| 46 |
+
init_lora_weights: "gaussian"
|
| 47 |
+
target_modules: "(.*x_embedder|.*(?<!single_)transformer_blocks\\.[0-9]+\\.norm1\\.linear|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_k|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_q|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_v|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_out\\.0|.*(?<!single_)transformer_blocks\\.[0-9]+\\.ff\\.net\\.2|.*single_transformer_blocks\\.[0-9]+\\.norm\\.linear|.*single_transformer_blocks\\.[0-9]+\\.proj_mlp|.*single_transformer_blocks\\.[0-9]+\\.proj_out|.*single_transformer_blocks\\.[0-9]+\\.attn.to_k|.*single_transformer_blocks\\.[0-9]+\\.attn.to_q|.*single_transformer_blocks\\.[0-9]+\\.attn.to_v|.*single_transformer_blocks\\.[0-9]+\\.attn.to_out)"
|
| 48 |
+
# (Uncomment the following lines to train less parameters while keeping the similar performance)
|
| 49 |
+
# target_modules: "(.*(?<!single_)transformer_blocks\\.[0-9]+\\.norm1\\.linear|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_k|.*(?<!single_)transformer_blocks\\.[0-9]+\\.attn\\.to_q|.*single_transformer_blocks\\.[0-9]+\\.norm\\.linear|.*single_transformer_blocks\\.[0-9]+\\.attn.to_k|.*single_transformer_blocks\\.[0-9]+\\.attn.to_q)"
|
| 50 |
+
|
| 51 |
+
optimizer:
|
| 52 |
+
type: "Prodigy"
|
| 53 |
+
params:
|
| 54 |
+
lr: 1
|
| 55 |
+
use_bias_correction: true
|
| 56 |
+
safeguard_warmup: true
|
| 57 |
+
weight_decay: 0.01
|
| 58 |
+
|
| 59 |
+
# (To use AdamW Optimizer, uncomment the following lines)
|
| 60 |
+
# optimizer:
|
| 61 |
+
# type: AdamW
|
| 62 |
+
# lr: 1e-4
|
| 63 |
+
# weight_decay: 0.001
|
train/requirements.txt
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
diffusers==0.31.0
|
| 2 |
+
transformers
|
| 3 |
+
peft
|
| 4 |
+
opencv-python
|
| 5 |
+
protobuf
|
| 6 |
+
sentencepiece
|
| 7 |
+
gradio
|
| 8 |
+
jupyter
|
| 9 |
+
torchao
|
| 10 |
+
|
| 11 |
+
lightning
|
| 12 |
+
datasets
|
| 13 |
+
torchvision
|
| 14 |
+
prodigyopt
|
| 15 |
+
wandb
|
train/script/data_download/data_download1.sh
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
huggingface-cli download --repo-type dataset Yuanshi/Subjects200K
|
train/script/data_download/data_download2.sh
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
huggingface-cli download --repo-type dataset jackyhate/text-to-image-2M data_512_2M/data_000045.tar
|
| 2 |
+
huggingface-cli download --repo-type dataset jackyhate/text-to-image-2M data_512_2M/data_000046.tar
|
| 3 |
+
huggingface-cli download --repo-type dataset jackyhate/text-to-image-2M data_1024_10K/data_000000.tar
|
train/script/train_compact_token_representation.sh
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# *[Specify the config file path and the GPU devices to use]
|
| 2 |
+
# export CUDA_VISIBLE_DEVICES=0,1
|
| 3 |
+
|
| 4 |
+
# *[Specify the config file path]
|
| 5 |
+
export OMINI_CONFIG=./train/config/compact_token_representation.yaml
|
| 6 |
+
|
| 7 |
+
# *[Specify the WANDB API key]
|
| 8 |
+
# export WANDB_API_KEY='YOUR_WANDB_API_KEY'
|
| 9 |
+
|
| 10 |
+
echo $OMINI_CONFIG
|
| 11 |
+
export TOKENIZERS_PARALLELISM=true
|
| 12 |
+
|
| 13 |
+
accelerate launch --main_process_port 41353 -m omini.train_flux.train_spatial_alignment
|
train/script/train_feature_reuse.sh
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# *[Specify the config file path and the GPU devices to use]
|
| 2 |
+
# export CUDA_VISIBLE_DEVICES=0,1
|
| 3 |
+
|
| 4 |
+
# *[Specify the config file path]
|
| 5 |
+
export OMINI_CONFIG=./train/config/feature_reuse.yaml
|
| 6 |
+
|
| 7 |
+
# *[Specify the WANDB API key]
|
| 8 |
+
# export WANDB_API_KEY='YOUR_WANDB_API_KEY'
|
| 9 |
+
|
| 10 |
+
echo $OMINI_CONFIG
|
| 11 |
+
export TOKENIZERS_PARALLELISM=true
|
| 12 |
+
|
| 13 |
+
accelerate launch --main_process_port 41353 -m omini.train_flux.train_spatial_alignment
|
train/script/train_multi_condition.sh
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# *[Specify the config file path and the GPU devices to use]
|
| 2 |
+
# export CUDA_VISIBLE_DEVICES=0,1
|
| 3 |
+
|
| 4 |
+
# *[Specify the config file path]
|
| 5 |
+
export OMINI_CONFIG=./train/config/multi_condition.yaml
|
| 6 |
+
|
| 7 |
+
# *[Specify the WANDB API key]
|
| 8 |
+
# export WANDB_API_KEY='YOUR_WANDB_API_KEY'
|
| 9 |
+
|
| 10 |
+
echo $OMINI_CONFIG
|
| 11 |
+
export TOKENIZERS_PARALLELISM=true
|
| 12 |
+
|
| 13 |
+
accelerate launch --main_process_port 41353 -m omini.train_flux.train_multi_condition
|
train/script/train_spatial_alignment.sh
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# *[Specify the config file path and the GPU devices to use]
|
| 2 |
+
# export CUDA_VISIBLE_DEVICES=0,1
|
| 3 |
+
|
| 4 |
+
# *[Specify the config file path]
|
| 5 |
+
export OMINI_CONFIG=./train/config/spatial_alignment.yaml
|
| 6 |
+
|
| 7 |
+
# *[Specify the WANDB API key]
|
| 8 |
+
# export WANDB_API_KEY='YOUR_WANDB_API_KEY'
|
| 9 |
+
|
| 10 |
+
echo $OMINI_CONFIG
|
| 11 |
+
export TOKENIZERS_PARALLELISM=true
|
| 12 |
+
|
| 13 |
+
accelerate launch --main_process_port 41353 -m omini.train_flux.train_spatial_alignment
|
train/script/train_spatial_alignment_rotation.sh
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# *[Specify the config file path and the GPU devices to use]
|
| 2 |
+
# export CUDA_VISIBLE_DEVICES=0,1
|
| 3 |
+
|
| 4 |
+
# *[Specify the config file path]
|
| 5 |
+
export OMINI_CONFIG=./train/config/spatial_alignment_rotation.yaml
|
| 6 |
+
|
| 7 |
+
# *[Specify the WANDB API key]
|
| 8 |
+
export WANDB_API_KEY='675d7815d8ad1cf89954fd5415071f18007386fd'
|
| 9 |
+
|
| 10 |
+
echo $OMINI_CONFIG
|
| 11 |
+
export TOKENIZERS_PARALLELISM=true
|
| 12 |
+
|
| 13 |
+
accelerate launch --main_process_port 41353 -m omini.train_flux.train_spatial_alignment_rotation
|
train/script/train_subject.sh
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# *[Specify the config file path and the GPU devices to use]
|
| 2 |
+
# export CUDA_VISIBLE_DEVICES=0,1
|
| 3 |
+
|
| 4 |
+
# *[Specify the config file path]
|
| 5 |
+
export OMINI_CONFIG=./train/config/subject.yaml
|
| 6 |
+
|
| 7 |
+
# *[Specify the WANDB API key]
|
| 8 |
+
# export WANDB_API_KEY='YOUR_WANDB_API_KEY'
|
| 9 |
+
|
| 10 |
+
echo $OMINI_CONFIG
|
| 11 |
+
export TOKENIZERS_PARALLELISM=true
|
| 12 |
+
|
| 13 |
+
accelerate launch --main_process_port 41353 -m omini.train_flux.train_subject
|
train/script/train_subject_rotation.sh
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# *[Specify the config file path and the GPU devices to use]
|
| 2 |
+
# export CUDA_VISIBLE_DEVICES=0,1
|
| 3 |
+
|
| 4 |
+
# *[Specify the config file path]
|
| 5 |
+
export OMINI_CONFIG=./train/config/subject_rotation.yaml
|
| 6 |
+
|
| 7 |
+
# *[Specify the WANDB API key]
|
| 8 |
+
export WANDB_API_KEY='675d7815d8ad1cf89954fd5415071f18007386fd'
|
| 9 |
+
|
| 10 |
+
echo $OMINI_CONFIG
|
| 11 |
+
export TOKENIZERS_PARALLELISM=true
|
| 12 |
+
|
| 13 |
+
accelerate launch --main_process_port 41354 -m omini.train_flux.train_subject_rotation
|
train/script/train_token_intergration.sh
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# *[Specify the config file path and the GPU devices to use]
|
| 2 |
+
# export CUDA_VISIBLE_DEVICES=0,1
|
| 3 |
+
|
| 4 |
+
# *[Specify the config file path]
|
| 5 |
+
export OMINI_CONFIG=./train/config/token_integration.yaml
|
| 6 |
+
|
| 7 |
+
# *[Specify the WANDB API key]
|
| 8 |
+
# export WANDB_API_KEY='YOUR_WANDB_API_KEY'
|
| 9 |
+
|
| 10 |
+
echo $OMINI_CONFIG
|
| 11 |
+
export TOKENIZERS_PARALLELISM=true
|
| 12 |
+
|
| 13 |
+
accelerate launch --main_process_port 41353 -m omini.train_flux.train_token_integration
|