package training_set;
/*input files:
 * 1. rawfile: xml file downloaded from PubMed
 * 2. authority file: given in the pairwise_model package
 * 3. stopmesh file: given in the pairwise_model package
 * 4. cui profile: given in the pairwise_model package
 * 5. semantics profile: given in the pairwise_model package
 * 6. cold_idfile: go search "cold[tiab]" under PubMed, and download all the PMIDs as text file, it is the
 * 					input for BasePair_Generation file too.
 * 7. related_graph file: the output of the python file.
 * output file:
 * 8: idfile: contains the pubmed ids for articles that have one instance of cold.
 * 9: trainingfile: set a path and name for this file.
 * 10: testfile: set a path and name for generating the test file.
 * 11. predictions: the prediction result for test data set.
 * 12. filtered_positive_file: the filtered positive pairs, for clustering usage.
 * 13. filtered_base_file: the filtered base pairs, for clustering usage.
 * 14. article_tiab_file: contains the tiab of articles.*/
import pairwise_model.*;
import preprocess.*;

import java.io.*;
import java.util.*;
public class Main {
	public static void main(String args[]){
		// please initialize all the files paths first!!!
		new Main().Procedure();
	}
	private String rawfile ="pubmed_result.xml";
	private String idfile ="oneinstance_id.txt";
	private String article_tiab_file = "";
	private String authority_file = "authorityprofile.txt"; 
	private String stop_mesh = "stopwords_mesh";
	private String cui_profile = "sentence_cuis_profile.txt";
	private String semantics_profile = "sentence_metasemantics.txt";
	private String cold_idfile = ""; // all instances of cold
	private String related_graphfile = "";
	private String trainingfile = "train.arff";
	private String testfile ="test.arff";
	private String prediction_file;
	private String filtered_positive_file = "";
	private String filtered_base_file = "";
	public void Procedure(){
		// get all the articles info
		List <ArticleUnit> articles = new One_Instance_Filter().XMLParser(rawfile);
		// build the index for articles
		Build_Articles_Index(articles);
		// write down the id list of all the articles
		Write_PMIDs(articles,idfile);
		Write_TIAB(articles,article_tiab_file);
		// obtain the global positive pairs
		List<Pair> global_pairs = new Global_Positive_Set().Extract_Positive_Pairs(articles, 
				rawfile,authority_file, stop_mesh, cui_profile, semantics_profile);
		// obtain the related positive pairs
		List<String> related_pairs_id = new Related_Pairs().RelatedPair(idfile);
		List<Pair> related_pairs = Get_Pairs_Info(related_pairs_id, articles);
		// union the global positive pairs and the related positive pairs
		List<Pair> positive_pairs = Pairs_Union(global_pairs, related_pairs);
		//obtain the base pairs.
		List<String> base_pair_ids = new BasePair_Generation().GenerateBasePairs(cold_idfile, related_graphfile, positive_pairs, idfile);
		List<Pair> base_pairs = Get_Pairs_Info(base_pair_ids, articles);
		//write header for weka tools usage.
		WriteHeader(trainingfile, "cold_training");
		// write down the positive pairs attributes value
		Write_Pairs(positive_pairs,trainingfile,"+1");
		Write_Pairs(base_pairs,trainingfile,"-1");
		// write all the pairs for predicting usage.
		WriteHeader(testfile, "cold_test");
		WriteAllPairsInfo(articles,testfile,"-1");
		/**************************************************************/
		//Begin train using logistic regression and predicting.
		Pairwise_Model model = new Pairwise_Model();
		model.ReadTraining(trainingfile);
		model.ReadTest(testfile);
		model.FilterTraining(filtered_positive_file, positive_pairs, true, positive_pairs.size());
		model.FilterTraining(filtered_base_file, base_pairs, false, positive_pairs.size());
		model.TrainModel(prediction_file,articles.size());
				
	}
	private HashMap<String,Integer> Articles_Index = new HashMap<String,Integer>();
	public void Build_Articles_Index(List<ArticleUnit> articles){
		for(int i=0; i<articles.size(); i++)
			Articles_Index.put(articles.get(i).getPmid(), i);
	}
	
	public static void Write_PMIDs(List<ArticleUnit> articles, String writefile){
		File file = new File(writefile);
		OutputStreamWriter write;
		int count = 0;
		try{
			write = new OutputStreamWriter(new FileOutputStream(file,false));
			BufferedWriter bw =new BufferedWriter(write);
			for(ArticleUnit article: articles){
				bw.write(article.getPmid());
				bw.newLine();
				count++;
			}
			System.out.println("There are "+ count +" files containing one instance of cold.");
			bw.close();
		}catch(IOException e){
			e.printStackTrace();
		}
	}
	
	public static void Write_TIAB(List<ArticleUnit> articles, String writefile){
		File file = new File(writefile);
		OutputStreamWriter write;
		int count = 0;
		try{
			write = new OutputStreamWriter(new FileOutputStream(file,false));
			BufferedWriter bw =new BufferedWriter(write);
			for(ArticleUnit article: articles){
				bw.write(article.getPmid());
				bw.write("\t"+article.getTitle());
				bw.write("\t"+article.getAbstract_info());
				bw.newLine();
				count++;
			}
			System.out.println("There are "+ count +" files containing one instance of cold.");
			bw.close();
		}catch(IOException e){
			e.printStackTrace();
		}
	}
	
	public static void Write_Pairs(List<Pair> pairs, String writefile, String label){
		File file = new File(writefile);
		OutputStreamWriter write;
		int count = 0;
		try{
			write = new OutputStreamWriter(new FileOutputStream(file,true));
			BufferedWriter bw =new BufferedWriter(write);
			for(Pair pair: pairs){
				bw.write(pair.PairSummary());
				if(label !=null)
					bw.write(","+label);
				bw.newLine();
				count++;
			}
			System.out.println(count+ " pairs have been written.");
			bw.close();
		}catch(IOException e){
			e.printStackTrace();
		} 
	}
	public List<Pair> Get_Pairs_Info(List<String> pairs_id, List <ArticleUnit> articles){
		List<Pair> pairs = new ArrayList<Pair>();
		if(pairs_id == null || pairs_id.size()==0)
			return pairs;
		pairwise_name Names = new pairwise_name(authority_file);
		pairwise_mesh pm = new pairwise_mesh(stop_mesh);
		Pairwise_CUI pcui = new Pairwise_CUI(cui_profile);
		Pairwise_CUI psemantics = new Pairwise_CUI(semantics_profile);
		for(String pair_id: pairs_id){
			String ids[] = pair_id.split("\t");
			if(ids.length!=2)
				continue;
			if(Articles_Index.containsKey(ids[0]) && Articles_Index.containsKey(ids[1])){
				int key1 = Articles_Index.get(ids[0]);
				int key2 = Articles_Index.get(ids[1]);
				int name_score = Names.Get_AuthorScore(articles.get(key1), articles.get(key2));
				int journal_score = new Global_Positive_Set().Get_Journal_Score(articles.get(key1).getJournal(), articles.get(key2).getJournal());
				int mesh_score = pm.CountCommon(articles.get(key1).getMesh(), articles.get(key2).getMesh());
				int majormesh_score = pm.CountCommon(articles.get(key1).getMajorMesh(), articles.get(key2).getMajorMesh());
				double cosine = new Pairwise_Cosine().BuildProfile(articles.get(key1).getCold_Sentence(), articles.get(key2).getCold_Sentence());
				int cui_score = pcui.Common_CUI_Score(articles.get(key1).getPmid(), articles.get(key2).getPmid());
				int semantics = psemantics.Common_CUI_Score(articles.get(key1).getPmid(), articles.get(key2).getPmid());
				int bigrams = new Pairwise_Bigram().CommonBigram(articles.get(key1).getBigram_profile(), articles.get(key2).getBigram_profile());
				Pair p = new Pair(ids[0], ids[1], name_score, mesh_score, majormesh_score, journal_score, cosine, cui_score, semantics, bigrams);
				pairs.add(p);
			}
		}
		return pairs;
	}
	
	public void WriteAllPairsInfo(List<ArticleUnit> articles, String writefile, String label){
		File file = new File(writefile);
		OutputStreamWriter write;
		int count = 0;
		try{
			write = new OutputStreamWriter(new FileOutputStream(file,true));
			BufferedWriter bw =new BufferedWriter(write);
			pairwise_name Names = new pairwise_name(authority_file);
			pairwise_mesh pm = new pairwise_mesh(stop_mesh);
			Pairwise_CUI pcui = new Pairwise_CUI(cui_profile);
			Pairwise_CUI psemantics = new Pairwise_CUI(semantics_profile);
			for(int i=0; i<articles.size(); i++)
				for(int j=i+1; j<articles.size();j++){
					count++;
					int name_score = Names.Get_AuthorScore(articles.get(i), articles.get(j));
					int mesh_score = pm.CountCommon(articles.get(i).getMesh(), articles.get(j).getMesh());
					int majormesh_score = pm.CountCommon(articles.get(i).getMajorMesh(), articles.get(j).getMajorMesh());
					int journal_score = new Global_Positive_Set().Get_Journal_Score(articles.get(i).getJournal(),articles.get(j).getJournal());
					double cosine = new Pairwise_Cosine().BuildProfile(articles.get(i).getCold_Sentence(),articles.get(j).getCold_Sentence());
					int cui_score = pcui.Common_CUI_Score(articles.get(i).getPmid(), articles.get(j).getPmid());
					int semantics = psemantics.Common_CUI_Score(articles.get(i).getPmid(), articles.get(j).getPmid());
					int bigrams = new Pairwise_Bigram().CommonBigram(articles.get(i).getBigram_profile(), articles.get(j).getBigram_profile());
					String summary = String.valueOf(name_score)+","+String.valueOf(mesh_score)+","+String.valueOf(majormesh_score)+","+String.valueOf(journal_score)+","
										+String.valueOf(cosine)+","+String.valueOf(cui_score)+","+String.valueOf(semantics)+","
										+String.valueOf(bigrams);
					bw.write(summary);
					if(label != null)
						bw.write(","+label);
					bw.newLine();
				}
			bw.close();
			System.out.println(count +" pairs have been written.");
		}catch(IOException e){
			e.printStackTrace();
		} 
	}
	public void WriteHeader(String writefile,String relation){
		File file = new File(writefile);
		OutputStreamWriter write;
		try{
			write = new OutputStreamWriter(new FileOutputStream(file,true));
			BufferedWriter bw =new BufferedWriter(write);
			bw.write("@relation "+relation);
			bw.newLine();
			bw.newLine();
			bw.write("@attribute name numeric");
			bw.newLine();
			bw.write("@attribute mesh numeric");
			bw.newLine();
			bw.write("@attribute majormesh numeric");
			bw.newLine();
			bw.write("@attribute journal numeric");
			bw.newLine();
			bw.write("@attribute cosine numeric");
			bw.newLine();
			bw.write("@attribute cui numberic");
			bw.newLine();
			bw.write("@attribute semantics numberic");
			bw.newLine();
			bw.write("@attribute coldbigram numeric");
			bw.newLine();
			bw.write("@attribute class {+1,-1}");
			bw.newLine();
			bw.newLine();
			bw.write("@DATA");
			bw.newLine();
			bw.close();
		}catch(IOException e){
			e.printStackTrace();
		} 
	}
	public List<Pair> Pairs_Union(List<Pair> pairs1, List<Pair> pairs2){
		if(pairs1 == null || pairs1.size()==0)
			return pairs2;
		if(pairs2 == null || pairs2.size()==0)
			return pairs1;
		HashMap<String,Integer> visited_pairs = new HashMap<String,Integer>();
		for(int i=0; i<pairs1.size(); i++)
			visited_pairs.put(pairs1.get(i).getPmid1()+"\t"+pairs1.get(i).getPmid2(), i);
		for(int i=0; i<pairs2.size(); i++){
			String key1 = pairs2.get(i).getPmid1()+"\t"+pairs2.get(i).getPmid2();
			String key2 = pairs2.get(i).getPmid2()+"\t"+pairs2.get(i).getPmid1();
			if(!visited_pairs.containsKey(key1) && !visited_pairs.containsKey(key2))
				pairs1.add(pairs2.get(i));
		}
		
		return pairs1;
	}
	
}
