package net.ming616.nlp.hit.utils;

import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;

import net.ming616.nlp.common.model.NLPWord;
import net.ming616.nlp.common.model.SRLNode;
import edu.hit.ir.ltpService.LTML;
import edu.hit.ir.ltpService.SRL;
import edu.hit.ir.ltpService.Word;

public class LTPUtils {

	public static String convertMapToString(Map<String, String> wordMap) {
		StringBuffer buffer = new StringBuffer();
		for (Entry<String, String> entry : wordMap.entrySet()) {
			buffer.append(entry.getValue() + "\t");
		}
		return buffer.toString();
	}

	public static List<NLPWord> getAllChildren(NLPWord word, List<NLPWord> words) {
		List<NLPWord> results = new ArrayList<NLPWord>();
		for (NLPWord wordNode : words) {
			if (wordNode.isUsed() == false) {
				NLPWord parent = LTPUtils.getParent(wordNode, words);
				if (null != parent && !wordNode.isPredicate()) {
					if (parent.getId() == word.getId()) {
						wordNode.setUsed(true);
						results.addAll(getAllChildren(wordNode, words));
						results.add(wordNode);
					}
				}
			}
		}
		return results;
	}

	public static List<NLPWord> convertToNLPWord(LTML ltml) {
		List<NLPWord> wordNodeList = new ArrayList<NLPWord>();
		int sentNum = ltml.countSentence();
		for (int i = 0; i < sentNum; ++i) {
			ArrayList<Word> wordList = ltml.getWords(i);
			for (Word word : wordList) {
				NLPWord node = new NLPWord();
				node.setId(word.getID());
				node.setText(word.getWS());
				node.setPos(word.getPOS());
				node.setWsd(word.getWSD());
				node.setWsdxp(word.getWSDExplanation());
				node.setNe(word.getNE());
				node.setParent(word.getParserParent());
				node.setRelate(word.getParserRelation());
				node.setPredicate(word.isPredicate());
				if (word.isPredicate()) {
					ArrayList<SRL> srls = word.getSRLs();
					ArrayList<SRLNode> srlNodes = new ArrayList<SRLNode>();
					for (SRL srl : srls) {
						SRLNode slrNode = new SRLNode();
						slrNode.setBegin(srl.beg);
						slrNode.setEnd(srl.end);
						slrNode.setType(srl.type);
						srlNodes.add(slrNode);
					}
					node.setSrls(srlNodes);
				}
				wordNodeList.add(node);
			}
		}
		return wordNodeList;

	}

	public static String convertWordToColl(Word word) {
		Map<String, String> map = convertWordToMap(word);
		return convertMapToString(map);
	}

	public static Map<String, String> convertWordToMap(Word word) {
		Map<String, String> map = new LinkedHashMap<String, String>();
		map.put("id", String.valueOf(word.getID()));
		map.put("text", word.getWS());
		map.put("pos", word.getPOS());
		map.put("name_entity", word.getNE());
		map.put("wsd", word.getWSD());
		map.put("wsd_explanation", word.getWSDExplanation());
		map.put("parser_parent", String.valueOf(word.getParserParent()));
		map.put("parser_relation", word.getParserRelation());
		if (word.isPredicate()) {
			StringBuffer buffer = new StringBuffer();
			ArrayList<SRL> srls = word.getSRLs();
			buffer.append("(");
			for (int k = 0; k < srls.size(); ++k) {
				SRL slr = srls.get(k);
				buffer.append(slr.type + "[" + slr.beg + "," + slr.end + "] ");
			}
			buffer.append(")");
			map.put("slr", buffer.toString());
		}
		return map;
	}

	public static String converWordToString(Word word) {
		Map<String, String> wordMap = convertWordToMap(word);
		StringBuffer buffer = new StringBuffer();
		buffer.append(wordMap.get("text"));
		buffer.append("/" + wordMap.get("pos"));
		buffer.append("/" + wordMap.get("wsd"));
		return buffer.toString();
	}

	/**
	 * 获取当前节点的祖先节点，顺序为与当前节点最近的靠前
	 * 
	 * @param node
	 * @param question
	 * @return
	 */
	public final static List<NLPWord> getAncestors(NLPWord node,
			HITQuestion question) {
		List<NLPWord> results = new ArrayList<NLPWord>();
		NLPWord parent = LTPUtils.getParent(node, question);
		// 由近及远顺序寻找祖先节点
		while (parent != null) {
			results.add(parent);
			parent = LTPUtils.getParent(parent, question);
		}
		return results;

	}

	/**
	 * 获取当前节点的直接孩子节点；
	 * 
	 * @param node
	 * @param question
	 * @return
	 */
	public final static List<NLPWord> getChildren(NLPWord node,
			HITQuestion question) {
		List<NLPWord> results = new ArrayList<NLPWord>();
		List<NLPWord> words = question.getWords();
		for (NLPWord wordNode : words) {
			NLPWord parent = LTPUtils.getParent(wordNode, question);
			if (null != parent) {
				if (parent.getId() == node.getId()) {
					results.add(wordNode);
				}
			}
		}
		return results;
	}

	public static List<NLPWord> getChildren(NLPWord word, List<NLPWord> words) {
		List<NLPWord> results = new ArrayList<NLPWord>();
		for (NLPWord wordNode : words) {
			NLPWord parent = LTPUtils.getParent(wordNode, words);
			if (null != parent) {
				if (parent.getId() == word.getId()) {
					results.add(wordNode);
				}
			}
		}
		return results;

	}

	public static NLPWord getParent(NLPWord node, List<NLPWord> words) {
		NLPWord parent = null;
		if (null != node) {
			Integer parentId = node.getParent();
			if (null != parentId && parentId >= 0) {
				parent = words.get(parentId);
			}
		}
		return parent;
	}

	/**
	 * 得到当前节点的父节点
	 * 
	 * @param node
	 * @param question
	 * @return
	 */
	public final static NLPWord getParent(NLPWord node, HITQuestion question) {
		Integer parentId = node.getParent();
		List<NLPWord> words = question.getWords();
		NLPWord parent = null;
		if (null != parentId && parentId > -1) {
			parent = words.get(parentId);
		}
		return parent;
	}

	// public static List<OntologyEntity> getEntityList(LTML ltml) {
	// List<OntologyEntity> entityList = new ArrayList<OntologyEntity>();
	// int sentNum = ltml.countSentence();
	// for (int i = 0; i < sentNum; ++i) {
	// ArrayList<Word> wordList = ltml.getWords(i);
	// for (Word word : wordList) {
	// while (StringUtils.equalsIgnoreCase("ATT", word
	// .getParserRelation())
	// || StringUtils.equalsIgnoreCase("n", word.getPOS())) {
	// OntologyEntity entity = new OntologyEntity();
	// entity.setName(word.getWS());
	// if (!entityList.contains(entity)) {
	// entityList.add(entity);
	// }
	// word = wordList.get(word.getParserParent());
	// }
	// }
	// }
	// return entityList;
	// }

	/**
	 * 等到指定层级的节点，默认为顶层；顶层为第一层；
	 * 
	 * @return
	 */
	public final static NLPWord getRoot(HITQuestion question) {
		List<NLPWord> words = question.getWords();
		NLPWord root = null;
		for (NLPWord wordNode : words) {
			if (wordNode.getParent() == -1) {
				root = wordNode;
			}
		}
		return root;
	}

	/**
	 * 等到指定层级的节点，默认为顶层；顶层为第一层；
	 * 
	 * @return
	 */
	public final static NLPWord getRoot(List<NLPWord> words) {
		NLPWord root = null;
		for (NLPWord wordNode : words) {
			if (wordNode.getParent() == -1) {
				root = wordNode;
			}
		}
		return root;
	}

	/**
	 * 获取当前节点的兄弟节点；
	 * 
	 * @param node
	 * @param question
	 * @return
	 */
	public static List<NLPWord> getSiblings(NLPWord node, List<NLPWord> words) {
		List<NLPWord> results = new ArrayList<NLPWord>();
		NLPWord parent = LTPUtils.getParent(node, words);
		if (parent != null) {
			// 当前节点的父节点不空，得到父节点的所有直接孩子节点
			results = LTPUtils.getChildren(parent, words);
		}
		results.remove(node);
		return results;
	}

	public static String toString(List<NLPWord> words) {
		StringBuffer buffer = new StringBuffer();
		buffer.append("\n");
		for (NLPWord nlpWord : words) {
			buffer.append(nlpWord.toString() + "\n");
		}
		return buffer.toString();

	}

}
