package edu.uic.readfile;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.Reader;
import java.util.*;

import org.tartarus.snowball.SnowballStemmer;

import edu.stanford.nlp.ling.HasWord;
import edu.stanford.nlp.ling.Label;
import edu.stanford.nlp.ling.Sentence;
import edu.stanford.nlp.ling.TaggedWord;
import edu.stanford.nlp.tagger.maxent.*;

public class WordSetConstructor
{
	public String modelpath = "bidirectional-wsj-0-18.tagger";
	public AllWord wordset = new AllWord();
	public StopWordList stopwordlist = new StopWordList();
	
	
	public void readFile(String dirpath)
	{
		try
		{
			File dir = new File(dirpath);
		       
	        String[] children = dir.list();
	        
	        for(String trainingfile : children)
	        {
	        	BufferedReader br = new BufferedReader(new FileReader((dirpath + "/" + trainingfile)));
	        	POSstemFile(br);
	        	
	        }
	        
	        wordset.savetoFile();

		} catch (Exception e)
		{
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

	public void POSstemFile(Reader reader)
	{
		try
		{

			//BufferedWriter bw = new BufferedWriter(new FileWriter("file1wordstem.txt"));
			
			Class stemClass = Class
					.forName("org.tartarus.snowball.ext.englishStemmer");
			SnowballStemmer stemmer = (SnowballStemmer) stemClass.newInstance();

			MaxentTagger tagger = new MaxentTagger(modelpath);
			List<Sentence<? extends HasWord>> sentences = MaxentTagger
					.tokenizeText(reader);
			String word = "";
			for (Sentence<? extends HasWord> sentence : sentences)
			{
				Sentence<TaggedWord> tSentence = MaxentTagger
						.tagSentence(sentence);
				// System.out.println(tSentence.toString(false));
				for (Iterator<TaggedWord> wordIterator = tSentence.iterator(); wordIterator
						.hasNext();)
				{
					TaggedWord o = wordIterator.next();
					word = o.word().toLowerCase();
					if(stopwordlist.stopwords.contains(word))
						continue;
					if(!stopwordlist.postags.contains(o.tag()))
						continue;
					stemmer.setCurrent(word);
					stemmer.stem();
					String currentword = stemmer.getCurrent();
					//System.out.println(stemmer.getCurrent());
					wordset.addword(currentword);
					//bw.write(currentword + " " + o.tag());
					//bw.newLine();
				}
			}
			
			//wordset.savetoFile();

			//bw.close();
		} catch (Exception e)
		{
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}



	public static void main(String[] args)
	{

		WordSetConstructor rf = new WordSetConstructor();
		System.out.println("begin");
//		try
//		{
//			rf.POSstemFile(new BufferedReader(new FileReader("file1.txt")));
//		} catch (FileNotFoundException e)
//		{
//			// TODO Auto-generated catch block
//			e.printStackTrace();
//		}
		
		//rf.readFile("file");
	}

}
