package com.wutianyi.text;

import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;

import org.apache.commons.lang.StringUtils;
import org.wltea.analyzer.IKSegmentation;
import org.wltea.analyzer.Lexeme;

import com.google.appengine.api.datastore.DatastoreServiceFactory;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.api.datastore.EntityNotFoundException;
import com.google.appengine.api.datastore.Key;
import com.google.appengine.api.datastore.KeyFactory;
import com.wutianyi.entity.EntityTypeEnum;
import com.wutianyi.utils.Utils;

public class TextUtils {

	/**
	 * @param content
	 * @return 返回总共解析了多少个词
	 * @throws IOException
	 * @throws EntityNotFoundException
	 */
	public static int updateRelations(String content) throws IOException,
			EntityNotFoundException {
		if (StringUtils.isBlank(content)) {
			return 0;
		}
		int count = 0;
		Map<String, Map<String, Integer>> wHashs = new HashMap<String, Map<String, Integer>>();
		IKSegmentation seg = new IKSegmentation(new StringReader(content),
				false);
		Lexeme cur = seg.next();
		Lexeme next = null;
		Lexeme prev = null;

		while (null != cur) {

			if (null != prev) {
				if (prev.getEndPosition() == cur.getBeginPosition()) {
					// 说明相连，建立关系
					createRelationships(prev.getLexemeText(),
							cur.getLexemeText(), wHashs);
				}
			}

			next = seg.next();
			List<Lexeme> lexemes = new ArrayList<Lexeme>();
			while (null != prev
					&& null != next
					&& (cur.getBeginPosition() <= next.getBeginPosition() && cur
							.getEndPosition() >= next.getEndPosition())) {
				lexemes.add(cur);
				if (prev.getEndPosition() == next.getBeginPosition()) {
					createRelationships(prev.getLexemeText(),
							next.getLexemeText(), wHashs);
				}
				lexemes.add(next);
				next = seg.next();
				count++;
			}
			multiSplits(lexemes, wHashs);

			prev = cur;
			cur = next;
			++count;
		}
		updateRelationShips(wHashs);
		return count;
	}

	private static void updateRelationShips(
			Map<String, Map<String, Integer>> words)
			throws EntityNotFoundException {
		// 全部获取entity
		List<Key> keys = new ArrayList<Key>();
		for (String k : words.keySet()) {
			keys.add(KeyFactory.createKey(EntityTypeEnum.WordRelation.name(), k));
		}
		Map<Key, Entity> mEntities = DatastoreServiceFactory
				.getDatastoreService().get(keys);
		if (null == mEntities) {
			mEntities = new HashMap<Key, Entity>();
		}
		//
		List<Entity> rEntities = new ArrayList<Entity>();
		// 构建
		for (Entry<String, Map<String, Integer>> word : words.entrySet()) {
			Key key = KeyFactory.createKey(EntityTypeEnum.WordRelation.name(),
					word.getKey());

			Entity entity = mEntities.get(key);
			if (null == entity) {
				entity = new Entity(key);
			}

			for (Entry<String, Integer> entry : word.getValue().entrySet()) {
				Object obj = entity.getProperty(entry.getKey());
				int count = Utils.convertInt(obj, 0);
				entity.setProperty(entry.getKey(), count + entry.getValue());
			}
			rEntities.add(entity);
		}
		DatastoreServiceFactory.getDatastoreService().put(rEntities);
	}
	private static void createRelationships(String cWord, String nWord,
			Map<String, Map<String, Integer>> wHashs) {
		Map<String, Integer> wrelations = wHashs.get(cWord);
		if (null == wrelations) {
			wrelations = new HashMap<String, Integer>();
			wHashs.put(cWord, wrelations);
		}
		Integer count = wrelations.get(nWord);
		if (null == count) {
			count = 0;
		}
		++count;
		wrelations.put(nWord, count);
	}

	private static void multiSplits(List<Lexeme> lexemes,
			Map<String, Map<String, Integer>> wHashs) {
		int size = lexemes.size();
		if (size < 1) {
			return;
		}

		for (int i = 0; i < size; i++) {
			Lexeme cur = lexemes.get(i);
			for (int j = i + 1; j < size; j++) {
				Lexeme next = lexemes.get(j);
				if (cur.getEndPosition() == next.getBeginPosition()) {
					createRelationships(cur.getLexemeText(),
							next.getLexemeText(), wHashs);
				}
			}
		}
	}

}
