package org.jiangwei.cmput696.entitylinking.algorithm;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import opennlp.tools.tokenize.Tokenizer;

import org.apache.lucene.document.Document;
import org.apache.lucene.queryParser.ParseException;
import org.jiangwei.cmput696.entitylinking.ELCandidate;
import org.jiangwei.cmput696.entitylinking.ELQueriedDocument;
import org.jiangwei.cmput696.entitylinking.ELQuery;
import org.jiangwei.cmput696.entitylinking.IELAlgorithm;
import org.jiangwei.cmput696.entitylinking.tacindex.TacHelper;
import org.jiangwei.cmput696.entitylinking.tacindex.TacKBService;

public class SimpleCosineAlgorithm implements IELAlgorithm {

	private TacKBService tacService;

	public SimpleCosineAlgorithm() throws IOException {
		tacService = new TacKBService();
	}

	@Override
	public List<ELCandidate> search(ELQuery elQuery) {
		ELQueriedDocument qDoc = elQuery.getDocument();

		// search a2e index
		Set<String> psbNames = AlgoHelper.queryAliasIndex(qDoc.getSurface());

		Map<String, Document> candiMap = new HashMap<String, Document>();
		try {
			// add candidates from the index lucene-a2e
			for (String wikiName : psbNames) {
				Document doc = tacService.getDocByWikiName(wikiName);
				if (doc != null) {
					if ("Lucy Walsh".equals(wikiName)) {
						System.out.println("Lucy Walsh as candidate.");
					}
					candiMap.put(wikiName, doc);
				}
			}
			// add candidates from the tac-kb index created by
			// TacIndexConstructor
			//TODO
//			List<Document> list = tacService.searchWikiName(elQuery
//					.getSurface());
//			for (Document doc : list) {
//				String wikiName = doc.get(TacHelper.KEY_WIKI_TITLE);
//				if (!candiMap.containsKey(wikiName)) {
//					if ("Lucy Walsh".equals(wikiName)) {
//						System.out.println("Lucy Walsh as candidate.");
//					}
//					candiMap.put(wikiName, doc);
//				}
//			}
		} catch (IOException e) {
			e.printStackTrace();
		} 
//		catch (ParseException e) {
//			// TODO Auto-generated catch block
//			e.printStackTrace();
//		}

		// System.out.println(qDoc.getSurface() + ": " + candiMap.size());

		List<ELCandidate> ranked = rank(qDoc, candiMap);

		return ranked;
	}
	private List<String> generateSeeds(String surface) {
		Tokenizer tokenizer = AlgoHelper.getTokenizer();
		
		List<String> seedList = new ArrayList<String>();
		String tmp = "";
		for (String seed : tokenizer.tokenize(surface)) {
			if (seedList.size() == 0 || seed.length() > 2) {
				seedList.add(seed.toLowerCase());
			} else {
				tmp += seed.toLowerCase() + " ";
				if (tmp.length() > 4) {
					seedList.add(tmp.trim());
					tmp = "";
				}
			}
		}
		if (tmp.length() > 0) {
			int i = seedList.size() - 1;
			seedList.set(i, seedList.get(i) + " " + tmp);
		}
		System.out.print("seeds: ");
		for (String seed : seedList) {
			System.out.print(seed + ", ");
		}
		System.out.println();
		
		return seedList;
	}


	private List<ELCandidate> rank(ELQueriedDocument qDoc,
			Map<String, Document> candiMap) {
		
		String surface = qDoc.getSurface();
		List<String> seeds = generateSeeds(surface);

		// use bag-of-words
		List<BagOfWords> bagList = new ArrayList<BagOfWords>();
		for (Map.Entry<String, Document> kv : candiMap.entrySet()) {
			String title = kv.getValue().get(TacHelper.KEY_WIKI_TITLE);
			String text = title + ". " + kv.getValue().get(TacHelper.KEY_TEXT);
			BagOfWords bow = new BagOfWords(kv.getKey(), text,
					qDoc.getSurface(), seeds);
			bagList.add(bow);
		}
		BagOfWords queryBag = new BagOfWords(null, qDoc.getText() + ". "
				+ qDoc.getText(), qDoc.getSurface(), seeds);
		List<ELCandidate> result = combineWithTFIDF(queryBag, bagList);

		// sort the results with scores in decreasing order
		Collections.sort(result, new Comparator<ELCandidate>() {
			@Override
			public int compare(ELCandidate arg0, ELCandidate arg1) {
				return arg0.getScore() > arg1.getScore() ? -1 : (arg0
						.getScore() == arg1.getScore() ? 0 : 1);
			}
		});

		result = result.subList(0, Math.min(30, result.size()));
		int i = result.size() - 1;
		while (i > 0
				&& result.get(i).getScore() < 0.1) {
			result.remove(i);
			--i;
		}

                System.out.println(qDoc.getSurface());
		for (ELCandidate candidate : result) {
			System.out.println(candidate.getScore() + " "
					+ candidate.getAnswer());
		}
                
		return result;
	}

	private List<ELCandidate> combineWithTFIDF(BagOfWords queryBag,
			List<BagOfWords> bagList) {

		double N = bagList.size() + 1;
		Map<String, Integer> dfMap = new HashMap<String, Integer>();
		for (BagOfWords bag : bagList) {
			updateDF(dfMap, bag);
		}
		updateDF(dfMap, queryBag);

		List<ELCandidate> result = new ArrayList<ELCandidate>();
		for (BagOfWords bag : bagList) {
			ELCandidate candidate = new ELCandidate();
			candidate.setAnswer(bag.getWikiName());
			candidate.setScore(similarity(queryBag, bag, dfMap, N));

			if (candidate.getAnswer().equals("Lucy Walsh")) {
				System.out.println("Lucy Walsh score: " + candidate.getScore());
			}

			result.add(candidate);
		}

		return result;
	}

	private double similarity(BagOfWords a, BagOfWords b,
			Map<String, Integer> dfMap, double n) {

		Map<String, Double> va = a.getVector();
		Map<String, Double> vb = b.getVector();

		Map<String, Double> tfa = new HashMap<String, Double>();
		Map<String, Double> tfb = new HashMap<String, Double>();

		for (String token : va.keySet()) {
			if (vb.keySet().contains(token)) {
				tfa.put(token, getTFIDF(va.get(token), dfMap.get(token), n));
				tfb.put(token, getTFIDF(vb.get(token), dfMap.get(token), n));
			}
		}

		// calc similarity
		double score = 0;
		for (Map.Entry<String, Double> entry : tfa.entrySet()) {
			score += entry.getValue() * tfb.get(entry.getKey());
			if (b.getWikiName().equals("Lucy Walsh")) {
				System.out.println("(" + entry.getKey() + ","
						+ entry.getValue() + "," + tfb.get(entry.getKey()));
			}
		}

		return score;
	}

	private Double getTFIDF(Double tf, Integer df, double n) {
		return tf * Math.log(n / Math.max(df, 1));
	}

	private void updateDF(Map<String, Integer> dfMap, BagOfWords bag) {
		for (String token : bag.getVector().keySet()) {
			addToVector(dfMap, token);
		}
	}

	private void addToVector(Map<String, Integer> vector, String token) {
		token = token.toLowerCase();
		if (!vector.containsKey(token)) {
			vector.put(token, 1);
		} else {
			vector.put(token, vector.get(token) + 1);
		}
	}

}
