package tools;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import naive2.BoW;
import edu.stanford.nlp.ling.CoreLabel;
import edu.stanford.nlp.ling.CoreAnnotations.LemmaAnnotation;
import edu.stanford.nlp.ling.CoreAnnotations.PartOfSpeechAnnotation;
import edu.stanford.nlp.ling.CoreAnnotations.SentencesAnnotation;
import edu.stanford.nlp.ling.CoreAnnotations.TextAnnotation;
import edu.stanford.nlp.ling.CoreAnnotations.TokensAnnotation;
import edu.stanford.nlp.pipeline.Annotation;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import edu.stanford.nlp.util.CoreMap;

//given a stop words list, lemmalze it, convert to bow then out put to a file
//read a lemmelaized stop words file, return a bow
public class StopwordConverter {

	public static void convert(StanfordCoreNLP pipeline,String src, String det){
		//
		try {
			String content = DocRetriever.getFileContents(src);
			String[] stopwords = content.split("\n");
			
			BoW bow = new BoW();
			
			//lemmalize all the stop words
			for(String s: stopwords){
				Annotation document = new Annotation(s);
				pipeline.annotate(document);
				List<CoreMap> sentences = document.get(SentencesAnnotation.class);
				for(CoreMap sentence: sentences) {
					for (CoreLabel token: sentence.get(TokensAnnotation.class)) {
						String lemma = token.get(LemmaAnnotation.class);
						bow.putWord(lemma);
				     }

				  }
			}
			
			//System.out.println(bow);
			System.out.println(bow.getbag().keySet().size());
			//output to the destination file
			Map<String,Integer> bag = bow.getbag();
			String list = "";
			for( String word: bag.keySet() ){
				list = list+word+"\t";
			}
			File stopwordsFile = new File(det);
            BufferedWriter output = new BufferedWriter( new FileWriter(stopwordsFile) );
            output.write(list);
            output.close();
		
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
	}
	
	public static BoW stopBow(String dir){
		
		try {
			
			String content = DocRetriever.getFileContents(dir);
			String[] stopwords = content.split("\t");
			BoW bow = new BoW();
			for(String s: stopwords){
				bow.putWord(s);
			}
			//System.out.println(bow);
			//System.out.println(bow.getTotalWords());
			return bow;
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return null;
	}
	
	
	public static void main(String[]args){
	
		//Properties props = new Properties();
		//props.put("annotators", "tokenize,ssplit,pos,lemma");
		//StanfordCoreNLP pipeline = new StanfordCoreNLP(props);
		//StopwordConverter.convert(pipeline,"/Users/leiyao/Desktop/stopwords.txt","/Users/leiyao/Desktop/stopbow.txt");
		BoW stopBow = StopwordConverter.stopBow("/largedata1/cmput696/students/lyao1/stopbow.txt");
	}
}
