package sample.traditional.web;

import java.io.IOException;
import java.io.StringReader;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import org.lionsoul.jcseg.extractor.impl.TextRankKeywordsExtractor;
import org.lionsoul.jcseg.tokenizer.core.ADictionary;
import org.lionsoul.jcseg.tokenizer.core.DictionaryFactory;
import org.lionsoul.jcseg.tokenizer.core.ISegment;
import org.lionsoul.jcseg.tokenizer.core.IWord;
import org.lionsoul.jcseg.tokenizer.core.JcsegException;
import org.lionsoul.jcseg.tokenizer.core.JcsegTaskConfig;
import org.lionsoul.jcseg.tokenizer.core.SegmentFactory;
import org.lionsoul.jcseg.util.IStringBuffer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;

@Controller
public class JcsegController {
	static Logger LOG = LoggerFactory.getLogger(JcsegController.class);

	@RequestMapping("keyword")
	public @ResponseBody Result getKeywords(@RequestParam("text") String text,
			@RequestParam(name = "number", required = false, defaultValue = "100") Integer number,
			@RequestParam(name = "auto_filter", required = false, defaultValue = "false") Boolean autoFilter) {

		// 依据给定的jcseg.properties文件创建 JcsegTaskConfig.
		JcsegTaskConfig config = new JcsegTaskConfig();

		try {
			// config.resetFromPropertyFile("/java/jcseg/jcseg.properties");
			if (text == null || "".equals(text)) {
				throw new Exception("参数错误。");
			}
			ADictionary dic = DictionaryFactory.createDefaultDictionary(config);
			ISegment seg = SegmentFactory.createJcseg(JcsegTaskConfig.COMPLEX_MODE, new Object[] { config, dic });

			TextRankKeywordsExtractor extractor = new TextRankKeywordsExtractor(seg);
			extractor.setKeywordsNum(number);
			extractor.setAutoFilter(autoFilter);

			long s_time = System.nanoTime();
			List<String> keywords = extractor.getKeywordsFromString(text);
			double c_time = (System.nanoTime() - s_time) / 1E9;

			Map<String, Object> map = new HashMap<String, Object>();
			DecimalFormat df = new DecimalFormat("0.00000");
			map.put("took", Float.valueOf(df.format(c_time)));
			map.put("keywords", keywords);

			// response the request
			return Result.success(map);
		} catch (Exception e) {
			LOG.error("keyword failed ", e);
			return Result.fail(-1, "Internal error...");
		}
	}

	@RequestMapping("tokenizer")
	public @ResponseBody Result getTokenizer(@RequestParam("text") String text,
			@RequestParam(name = "ret_pos", required = false, defaultValue = "false") Boolean ret_pos,
			@RequestParam(name = "ret_pinyin", required = false, defaultValue = "false") Boolean ret_pinyin) {

		try {
			long s_time = System.nanoTime();
			List<WordEntry> list = getTokenizer(text);

			double c_time = (System.nanoTime() - s_time) / 1E9;
			Map<String, Object> map = new HashMap<String, Object>();
			DecimalFormat df = new DecimalFormat("0.00000");
			map.put("took", Float.valueOf(df.format(c_time)));
			map.put("list", list);

			// response the request
			return Result.success(map);
		} catch (Exception e) {
			return Result.fail(-1, e.getMessage());
		}
	}

	private List<WordEntry> getTokenizer(String text) throws Exception, JcsegException, IOException {
		// 依据给定的jcseg.properties文件创建 JcsegTaskConfig.
		JcsegTaskConfig config = new JcsegTaskConfig();
		// config.resetFromPropertyFile("/java/jcseg/jcseg.properties");
		if (text == null || "".equals(text)) {
			throw new Exception("参数错误。");
		}
		ADictionary dic = DictionaryFactory.createDefaultDictionary(config);

		ISegment seg = SegmentFactory.createJcseg(JcsegTaskConfig.COMPLEX_MODE, new Object[] { config, dic });

		IWord word = null;
		List<WordEntry> list = new ArrayList<WordEntry>();
		seg.reset(new StringReader(text));
		while ((word = seg.next()) != null) {
			WordEntry w = new WordEntry();
			w.setWord(word.getValue());
			w.setLength(word.getLength());
			w.setPosition(word.getPosition());

			// if (ret_pinyin) {
			// String pinyin = word.getPinyin();
			// w.setPinYin(pinyin == null ? "" : pinyin);
			// }
			//
			// if (ret_pos) {
			// String[] poss = word.getPartSpeech();
			//
			// if (poss != null) {
			// w.setPartSpeech(poss[0]);
			// }
			// }

			list.add(w);

			// clear the allocations of the word.
			word = null;
		}
		return list;
	}

	/**
	 * WordEntry for return data
	 *
	 * */
	public class WordEntry {

		private String word = null;
		// private String pinYin = null;
		// private String partSpeech = null;
		private int length = -1;
		private int position = -1;

		public String getWord() {
			return word;
		}

		public void setWord(String word) {
			this.word = word;
		}

		// public String getPinYin() {
		// return pinYin;
		// }
		//
		// public void setPinYin(String pinYin) {
		// this.pinYin = pinYin;
		// }
		//
		// public String getPartSpeech() {
		// return partSpeech;
		// }
		//
		// public void setPartSpeech(String partSpeech) {
		// this.partSpeech = partSpeech;
		// }

		public int getLength() {
			return length;
		}

		public void setLength(int length) {
			this.length = length;
		}

		public int getPosition() {
			return position;
		}

		public void setPosition(int position) {
			this.position = position;
		}

		public String toString() {
			IStringBuffer sb = new IStringBuffer();
			sb.append('{');

			sb.append("\"word\":\"" + word + "\"");

			if (length != -1)
				sb.append(",\"length\":" + length);

			if (position != -1)
				sb.append(",\"position\":" + position);

			// if (pinYin != null)
			// sb.append(",\"pinyin\":\"" + pinYin + "\"");
			//
			// if (partSpeech != null)
			// sb.append(",\"pos\":\"" + partSpeech.toString() + "\"");

			sb.append('}');

			return sb.toString();
		}
	}

	@RequestMapping("similarity")
	public @ResponseBody Result getSimilarity(@RequestParam("text1") String text1, @RequestParam("text2") String text2) {
		Map<String, int[]> words = new HashMap<String, int[]>();
		try {
			List<WordEntry> list1 = getTokenizer(text1);
			List<WordEntry> list2 = getTokenizer(text2);
			for (WordEntry wordEntry : list1) {
				int[] fq = words.get(wordEntry.getWord());
				if (fq == null) {
					fq = new int[2];
					fq[0] = 1;
					fq[1] = 0;
					words.put(wordEntry.getWord(), fq);
				} else {
					fq[0]++;
				}
			}
			for (WordEntry wordEntry : list2) {
				int[] fq = words.get(wordEntry.getWord());
				if (fq == null) {
					fq = new int[2];
					fq[0] = 0;
					fq[1] = 1;
					words.put(wordEntry.getWord(), fq);
				} else {
					fq[1]++;
				}
			}
			long denominator = 0;
			long sqtext1 = 0;
			long sqtext2 = 0;
			Iterator<String> iterator = words.keySet().iterator();
			while (iterator.hasNext()) {
				String next = iterator.next();
				int[] fq = words.get(next);
				denominator +=(fq[0] * fq[1]);
				sqtext1 += (fq[0] * fq[0]);
				sqtext2 += (fq[1] * fq[1]);
			}
			double similarity = denominator / Math.sqrt(sqtext1 * sqtext2);
			return Result.success(similarity);
		} catch (Exception e) {
			return Result.fail(-1, e.getMessage());
		}

	}
}