package edu.uta.cse6339.facetedinterface.classifier.src.facetedExplorationMain;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.HashSet;

public class FacetedExplorationHelper
{
	// Input files for some pre-processed data.. Get stopwords and keywords that are to be given higher weights.
	//String rowKeywordsInputFile = "/home/lakshmanas/workspace/cse6339-faceted-interface/data/key_stop_words/row_keywords_stemmed.txt";
	String stateAbbrInputFile = "/home/lakshmanas/workspace/cse6339-faceted-interface/data/input/key_stop_words/state_abbr_stemmed.txt";
	String stopwordsInputFile = "/home/lakshmanas/workspace/cse6339-faceted-interface/data/input/key_stop_words/stopwords_stemmed.txt";
	String hateCrimeInputFile = "/home/lakshmanas/workspace/cse6339-faceted-interface/data/input/key_stop_words/hateCrime_keywords_stemmed.txt";
	String colKeywordsInputFile = "/home/lakshmanas/workspace/cse6339-faceted-interface/data/input/key_stop_words/col_keywords_stemmed.txt";
	String universityKeywordsInputFile = "/home/lakshmanas/workspace/cse6339-faceted-interface/data/input/key_stop_words/UniversityNameList_stemmed.txt";
	
	public void getRowKeyWords()
	{
		processRowKeywordFile(stateAbbrInputFile);
	}
	
	public void getStopWords()
	{
		processKnownFile(FacetedExplorationMain.stopWords, stopwordsInputFile);
	}
	
	public void getColKeywords()
	{
		processKnownFile(FacetedExplorationMain.colKeyWords, colKeywordsInputFile);
	}
	
	public void getHateCrimeKeywords()
	{
		processKnownFile(FacetedExplorationMain.hateCrimeWords, hateCrimeInputFile);
	}
	
	public void getUniversityKeywords()
	{
		tokenizeFile(FacetedExplorationMain.universityKeyWords, universityKeywordsInputFile);
	}
	
	private void tokenizeFile(HashSet<String> set, String inputFilePath)
	{
		try
		{
			BufferedReader br = new BufferedReader(new FileReader(inputFilePath));
			try
			{
				String line = null;
				while((line = br.readLine()) != null)
				{
					String[] tokens = line.split("\\s");
					for(String token : tokens)
					{
						set.add(token.trim().toLowerCase());
					}
				}
			}
			finally
			{
				br.close();
			}
		}
		catch(IOException ioe)
		{
			ioe.printStackTrace();
		}
	}
	
	private void processRowKeywordFile(String inputFilePath)
	{
		// This is a function that will be used to process very specific files like the keywords and stopwords file. The format of the same
		// is assumed to be the same (one word in every line).
		try
		{
			BufferedReader br = new BufferedReader(new FileReader(inputFilePath));
			try
			{
				String line = null;
				while((line = br.readLine()) != null)
				{
					String[] ls = line.split("\\s");
					int len = ls.length;
					if(len == 2)
					{
						FacetedExplorationMain.rowKeyWords.add(ls[1].trim().toLowerCase());
						FacetedExplorationMain.stateAbbrKeywords.put(ls[0].trim(), ls[1].trim().toLowerCase());
					}
					else
					{
						String key = "";
						for(int i=0; i<len-1; i++)
						{
							key += ls[i] + " ";
						}
						FacetedExplorationMain.rowKeyWords.add(ls[len-1].trim().toLowerCase());
						FacetedExplorationMain.stateAbbrKeywords.put(key, ls[len-1].trim().toLowerCase());
					}
				}
			}
			finally
			{
				br.close();
			}
		}
		catch(IOException ioe)
		{
			ioe.printStackTrace();
		}
	}
	
	private void processKnownFile(HashSet<String> set, String inputFilePath)
	{
		// This is a function that will be used to process very specific files like the keywords and stopwords file. The format of the same
		// is assumed to be the same (one word in every line).
		try
		{
			BufferedReader br = new BufferedReader(new FileReader(inputFilePath));
			try
			{
				String line = null;
				while((line = br.readLine()) != null)
				{
					set.add(line.trim().toLowerCase());
				}
			}
			finally
			{
				br.close();
			}
		}
		catch(IOException ioe)
		{
			ioe.printStackTrace();
		}
	}
	
}
