package com.newegg.nlp.util;

import java.util.List;
import java.util.Properties;
import com.newegg.nlp.util.corenlp.Filter;
import com.newegg.nlp.util.corenlp.Result;
import edu.stanford.nlp.ling.CoreAnnotations.SentencesAnnotation;
import edu.stanford.nlp.pipeline.Annotation;
import edu.stanford.nlp.pipeline.StanfordCoreNLPClient;
import edu.stanford.nlp.semgraph.SemanticGraph;
import edu.stanford.nlp.semgraph.SemanticGraphEdge;
import edu.stanford.nlp.semgraph.SemanticGraphFactory;
import edu.stanford.nlp.simple.Document;
import edu.stanford.nlp.simple.Sentence;
import edu.stanford.nlp.util.CoreMap;

public class CoreNLPHelp {
	static Remote remote;
	static boolean DEBUG = false;
	static StanfordCoreNLPClient pipeline;

	static Filter[] filters_en = new Filter[] { 
		new com.newegg.nlp.util.corenlp.en.Filter_amod(),
		new com.newegg.nlp.util.corenlp.en.Filter_nsubj(),
		new com.newegg.nlp.util.corenlp.en.Filter_dep(),
		new com.newegg.nlp.util.corenlp.en.Filter_advcl(),
		new com.newegg.nlp.util.corenlp.en.Filter_obl()
	};
	
	static Filter[] filters_zh = new Filter[] {
		new com.newegg.nlp.util.corenlp.zh.Filter_amod(),
		new com.newegg.nlp.util.corenlp.zh.Filter_nsubj(),
		new com.newegg.nlp.util.corenlp.zh.Filter_dep(),
		new com.newegg.nlp.util.corenlp.zh.Filter_dobj()
	};

	public static void useRemote(String server, int port) {
		CoreNLPHelp.remote = new Remote(server, port);
		CoreNLPHelp.pipeline = new StanfordCoreNLPClient(properties(Language.ZH), remote.host, remote.port);
	}
	
	public static void useDEBUG(boolean flag) {
		CoreNLPHelp.DEBUG = flag;
	}
	
	public static Result parse(Language language, String text) {
		if(text == null || text.trim().length() == 0) { return new Result(); }
		Properties props = properties(language);
		List<Sentence> sentences = getSentences(language, props, text);
		Result results = new Result();
		for (Sentence sentence : sentences) {
			SemanticGraph graph = sentence.dependencyGraph(props, SemanticGraphFactory.Mode.ENHANCED_PLUS_PLUS);
			for (SemanticGraphEdge graphEdge : graph.edgeIterable()) {
				if(DEBUG) {
					System.out.println(String.format("%s[%s]  ==> %s(%s)  ->  %s(%s)", 
							graphEdge.getRelation().getShortName(),
							graphEdge.getRelation().getLongName(),
							
							graphEdge.getSource().value(),
							graphEdge.getSource().tag(),
							
							graphEdge.getTarget().value(),
							graphEdge.getTarget().tag()
						));
				}
				if("VV".equals(graphEdge.getSource().tag()) || "JJ".equals(graphEdge.getSource().tag())) {
					results.addJJ(graphEdge.getSource().value());
				}
				if("VV".equals(graphEdge.getTarget().tag()) || "JJ".equals(graphEdge.getTarget().tag())) {
					results.addJJ(graphEdge.getTarget().value());
				}
				Filter[] filters = language.equals(Language.ZH) ? filters_zh : filters_en;
				for (Filter filter : filters) {
					Result result = filter.filter(graphEdge, graph, sentence);
					results.add(result);
				}
			}
		}
		return results;
	}
	
	static Properties properties(Language language) {
		Properties properties = new Properties();
		if(language.equals(Language.ZH)) {
			properties.put("pipelineLanguage", "zh");
			properties.put("language", "chinese");
			properties.put("tokenize.language", "zh");
			properties.put("depparse.language", "chinese");
			properties.put("annotators", "tokenize, ssplit, pos, lemma, ner, parse, depparse, natlog, coref");
			properties.put("segment.model", "edu/stanford/nlp/models/segmenter/chinese/ctb.gz");
			properties.put("segment.serDictionary", "edu/stanford/nlp/models/segmenter/chinese/dict-chris6.ser.gz");
			properties.put("segment.sighanCorporaDict", "edu/stanford/nlp/models/segmenter/chinese");
			properties.put("pos.model", "edu/stanford/nlp/models/pos-tagger/chinese-distsim.tagger");
			properties.put("parse.model", "edu/stanford/nlp/models/srparser/chineseSR.ser.gz");
			properties.put("depparse.model", "edu/stanford/nlp/models/parser/nndep/UD_Chinese.gz");
		}else {
			properties.put("pipelineLanguage", "en");
			properties.put("annotators", "tokenize, ssplit, pos, lemma, ner, parse, depparse, natlog, coref");
			properties.put("tokenize.language", "en");
		}
		return properties;
	}
	
	static List<Sentence> getSentences(Language language, Properties props, String text){
		if(remote == null) {
			Document.useServer(remote.host, remote.port, null, null, true);
		}
		if(language.equals(Language.ZH)) {
			text = text.replace(" ", ","); 
			System.out.println(text);
			Document doc = new Document(props, text);
			Annotation annotation = pipeline.process(text);
			List<CoreMap> cores = annotation.get(SentencesAnnotation.class);
			return doc.formatSentences(cores);
		}else {
			text = text.toLowerCase();
			System.out.println(text);
			Document doc = new Document(props, text);
			Annotation annotation = pipeline.process(text);
			List<CoreMap> cores = annotation.get(SentencesAnnotation.class);
			return doc.formatSentences(cores);
		}
	}
	
	public static enum Language{
		EN,ZH
	}

	public static class Remote{
		String host;
		int port;
		
		public Remote(String host, int port) {
			this.host = host;
			this.port = port;
		}
	}
}

