package preprocess;
import java.io.*;
import java.util.*;

import opennlp.tools.sentdetect.SentenceDetectorME;
import opennlp.tools.sentdetect.SentenceModel;


public class Text_Clean {
	private HashMap<String,Integer> stop_words = new HashMap<String,Integer>();
	private static final String SENTENCE_MODEL_NAME = "en-sent.bin"; // modify the path for this
	private SentenceModel smodel= null;
	public Text_Clean(){
		Build_Stop_Word("stopwords.txt"); // modify the path for this
	}
	public boolean Is_StopWord(String w){
		if(stop_words.containsKey(w.toLowerCase()))
			return true;
		return false;
	}
	public String GetColdSentence_Tokenizer_StopWordRemoval(String line){
		return Remove_StopWords(cleanSentence(Get_Cold_Sentence(line)));
	}
	public void Build_Stop_Word(String filename){
		File f= new File(filename);
		FileInputStream fis;
		try {
			fis = new FileInputStream(f);
			InputStreamReader isr=new InputStreamReader(fis);
			BufferedReader br=new BufferedReader(isr);
			String line = br.readLine();
			while(line != null){
				line = line.trim();
				if(line.length()>0){
					if(!stop_words.containsKey(line))
						stop_words.put(line, stop_words.size());
				}
				line = br.readLine();
			}
			br.close();
		}catch(IOException e){
			e.printStackTrace();
		}
	}
	
	private synchronized SentenceModel getSentenceModel()
	{
		if (smodel != null)
		{
			return smodel;
		}

		try
		{
			InputStream modelIn = new FileInputStream(SENTENCE_MODEL_NAME);
			smodel = new SentenceModel(modelIn);
		}
		catch (IOException ioe)
		{
			//throw new GeneralException(ioe);
			ioe.printStackTrace();
		}

		return smodel;
	}
	public String Get_Cold_Sentence(String line){
		StringBuffer sb = new StringBuffer();
		smodel = getSentenceModel();
		SentenceDetectorME sdm = new SentenceDetectorME(smodel);
		String Sen[] = sdm.sentDetect(line);
		for(String sentence: Sen){
			if(sentence.toLowerCase().contains("cold")){
				sb.append(cleanSentence(sentence));
				sb.append(" ");
			}
		}
		return sb.toString();
	}
	
	public String Remove_StopWords(String line){
		if(line == null) return null;
		StringTokenizer st = new StringTokenizer(line," ");
		StringBuffer sb = new StringBuffer();
		while(st.hasMoreTokens()){
			String token = st.nextToken();
			token = token.trim();
			if(token.length()>1 && !stop_words.containsKey(token.toLowerCase()))
				sb.append(" "+token);
		}
		return sb.toString().substring(1);
	}
	
	public String cleanSentence(String sen){
		StringBuffer sb = new StringBuffer();
		char chars[] = sen.toCharArray();
		for(char c: chars){
			if(c==' ' || Character.isLetter(c)|| Character.isDigit(c) || c == '-')
				sb.append(String.valueOf(c));
		}
		return sb.toString();
	}
}
