package clustering;
import java.io.*;
import java.util.*;
public class Post_Clustering {
	public static void main(String args[]){
		new Post_Clustering().Procedure(false);
		//new Post_Clustering().Procedure(true);
	}
	private String one_instance_id_file; // the one-instance pmid list
	private String hac_cluster_pmid_file; // output from last step
	private String random_cluster_pmid_file; // output file
	private String article_tiab_file; // output from the Main file under training_set package.
	private String hac_clusters_score_file; // output file
	public void Procedure(boolean python){
		if(!python){
			RandomClusters_Generation randomcluster = new RandomClusters_Generation();
			randomcluster.ReadIdList(one_instance_id_file); // one_instance_id file
			List<Integer> cluster_size = randomcluster.GetCluster_Size(hac_cluster_pmid_file); // hac_cluster_pmid_file
			randomcluster.GenerateClusters(random_cluster_pmid_file); // generate random_cluster_file
			Cluster_Global_Profile cluster_profiles = new Cluster_Global_Profile();
			cluster_profiles.ReadClustersFromFile(random_cluster_pmid_file);
			cluster_profiles.ReadArticleTiab_fromFile(article_tiab_file);
			ArrayList<ArrayList<Ngram_Profile>> randomcluster_profiles_bigrams = cluster_profiles.BuildClusters_Global_Profile(2);
			ArrayList<ArrayList<Ngram_Profile>> randomcluster_profiles_trigrams = cluster_profiles.BuildClusters_Global_Profile(3);
			Thresholds threshold = new Thresholds();
			threshold.Get_Clusters_Similarity(cluster_size, Union(randomcluster_profiles_bigrams,randomcluster_profiles_trigrams));
			HashMap<String,Double> thresholds = threshold.GetThresholds();
			
			ArrayList<ArrayList<String>> hac_clusters = cluster_profiles.ReadClustersFromFile(hac_cluster_pmid_file);
			ArrayList<ArrayList<Ngram_Profile>> hac_cluster_bigrams = cluster_profiles.BuildClusters_Global_Profile(2);
			ArrayList<ArrayList<Ngram_Profile>> hac_cluster_trigrams = cluster_profiles.BuildClusters_Global_Profile(3);
			new Cluster_Similarity_by_GlobalProfile().GetClusters_Score_File(cluster_size, Union(hac_cluster_bigrams, hac_cluster_trigrams), hac_clusters_score_file, thresholds);
			
		}
		else{
			//After getting the post-cluster python file, we still need trace back pmids
			Cluster_Global_Profile cluster_profiles = new Cluster_Global_Profile();
			ArrayList<ArrayList<String>> hac_clusters = cluster_profiles.ReadClustersFromFile(hac_cluster_pmid_file);
			WritePost_Clusters_PMIDs(hac_clusters,"",""); // set post_cluster_python_file path, output: final_post_cluster_file
		}
		
	}
	
	public ArrayList<ArrayList<Ngram_Profile>> Union(ArrayList<ArrayList<Ngram_Profile>> list1, ArrayList<ArrayList<Ngram_Profile>> list2){
		if(list1.size() != list2.size())
			return null;
		for(int i=0; i<list1.size(); i++)
			list1.get(i).addAll(list2.get(i));
		return list1;
	}
	
	public void WritePost_Clusters_PMIDs(ArrayList<ArrayList<String>> hac_clusters, String post_cluster_python_file, String post_cluster_pmid_file){
		File f = new File(post_cluster_python_file);
		FileInputStream fis;
		File f1 = new File(post_cluster_pmid_file);
		OutputStreamWriter write;
		try{
			fis = new FileInputStream(f);
			InputStreamReader isr=new InputStreamReader(fis);
			BufferedReader br=new BufferedReader(isr);
			write = new OutputStreamWriter(new FileOutputStream(f1,false));
			BufferedWriter bw =new BufferedWriter(write);
			String line = br.readLine();
			while(line != null){
				line = line.trim();
				if(line.length()>0){
					String s[] = line.split(" ");
					int count = 0;
					for(int i=1; i<s.length; i++)		
						count += hac_clusters.get(Integer.valueOf(s[i])).size();
					bw.write(String.valueOf(count));
					for(int i=1; i<s.length; i++){
						for(String id: hac_clusters.get(Integer.valueOf(s[i])))
							bw.write("\t"+ id);
						}
					}
				line = br.readLine();
			}
			br.close();
			bw.close();
		}catch(IOException e){
			e.printStackTrace();
		}
	}
	
}
