package tools.nlp;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import tools.FileReaderEnhance;
import tools.FileWriterEnhance;
import tools.ObjectTools;
import tools.twitter.TweetTidy;
import edu.stanford.nlp.ie.AbstractSequenceClassifier;
import edu.stanford.nlp.ie.crf.CRFClassifier;
import edu.stanford.nlp.ling.CoreAnnotations;
import edu.stanford.nlp.ling.CoreLabel;

public class StanfordNER {
	public static void main(String[] args) throws IOException {
		String serializedClassifier = "data/_StanfordNER/english.muc.7class.distsim.crf.ser.gz";
		// String serializedClassifier =
		// "data/_StanfordNER/english.all.3class.distsim.crf.ser.gz";
		// String serializedClassifier =
		// "data/_StanfordNER/english.conll.4class.distsim.crf.ser.gz";

		AbstractSequenceClassifier<CoreLabel> classifier = CRFClassifier
				.getClassifierNoExceptions(serializedClassifier);

		for (File file : (new File("data/_newData/plainText_filtered/")).listFiles()) {
			String[] tweets = FileReaderEnhance.readToString(file, "UTF-8").split("\n");
			StringBuilder sb = new StringBuilder();
			ArrayList<ArrayList<String>> allNETags = new ArrayList<>();
			for (String tweet : tweets) {
				tweet = TweetTidy.doTidyAll(tweet);
				ArrayList<String> tags = new ArrayList<>();
				for (List<CoreLabel> lcl : classifier.classify(tweet)) {
					for (CoreLabel cl : lcl) {
//						System.out.print(cl + "/" + cl.get(CoreAnnotations.AnswerAnnotation.class) + "\t");
						String tag = cl.get(CoreAnnotations.AnswerAnnotation.class);
						sb.append(cl + "/" + tag + "\t");
						tags.add(tag);
					}
//					System.out.println();
				}
				allNETags.add(tags);
				sb.append('\n');
			}
			FileWriterEnhance fwe = new FileWriterEnhance("data/_newData/plainText_filtered_NER_TEXT/" + file.getName(), "UTF-8");
			fwe.WriteToFile(sb.toString());
			ObjectTools.writeToFile(allNETags, "data/_newData/plainText_filtered_NER/" + file.getName());
		}
	}
}
