package thuai.ccf2013.driver;

import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import thu.ccf2013.listnet.PredictLinearNeuralModel;
import thu.ccf2013.listnet.TrainLinearNeuralModel;
import thu.ccf2013.wiki.Infobox;
import thuai.ccf2013.query.Entity;
import thuai.ccf2013.query.FeatureExtractor;
import thuai.ccf2013.query.Query;
import thuai.ccf2013.util.MyFileReader;
import thuai.ccf2013.util.MyFileWriter;
import thuai.ccf2013.util.MyStrUtil;

public class Driver {

	// the directory of all data
	public static String DATADIR = "D:\\CCF2013\\";

	public static void main(String[] args) {
		long begin = System.currentTimeMillis();
		initRelatedNameDict();
		initExtendNameDict();
		initLuceneRank();
//		findEntity("query_0525_ALL.txt");
//		findEntity("query_0501.txt");
//		Map<String, List<String>> nameToId = listKBNames();
//		parseNameToId(nameToId);
		
//		accurateEntityNames("query_0501.txt");
//		SearchEngine.parseResult(DATADIR + "serp");
//		SearchEngine.parseResult();
//		linkFromMapper();
		
//		ruleBasedLinking("query_0501.txt");
		roundOne();
		
//		mergeRoundOneResult();
//		roundTwo();
//		Set<Infobox> infoboxSet = Infobox.loadFromFile(DATADIR + "KB_BD_nomerge_link.xml");
		
		long end = System.currentTimeMillis();
		System.out.println("Overall time consumption: " + (end - begin) + "ms");
	}
	
	public static void roundTwo() {
		List<Query> queryList = Query.loadFromFile(DATADIR + "query_0501.txt");
		Set<Infobox> infoboxSet = Infobox.loadFromFile(DATADIR + "KB_BD_nomerge_link.xml");
		Map<String, Set<Infobox>> nameToId = listKBNames(infoboxSet);
		List<Entity> entityList = Query.mergeQueryList(queryList);
		Map<String, Infobox> dict = new HashMap<String, Infobox>();
		for (Infobox box : infoboxSet) {
			dict.put(box.getId(), box);
		}
		
		List<Query> ql = Query.loadFromFile(DATADIR + "query_0525_ALL.txt");
		List<Entity> el = Query.mergeQueryList(ql);
		Map<String, String> round1 = getRoundOneResult();
		Iterator<Entity> eIter = el.iterator();
		while (eIter.hasNext()) {
			Entity e = eIter.next();
			if (round1.containsKey(e.getName())) {
				e.setKbLink(round1.get(e.getName()));
				entityList.add(e);
			}
		}
		
		eIter = entityList.iterator();
		while (eIter.hasNext()) {
			Entity e = eIter.next();
			if (el.contains(e)) {
				el.remove(e);
			}
		}
			
		System.out.println(entityList.size());
		System.out.println(el.size());
		
		List<List<double[]>> data = new ArrayList<List<double[]>>();
		for (Entity entity : entityList) {
			if (entity.getKbLink().equals("NIL")) {
				continue;
			}
			Set<Infobox> cand = findAmbiguousEntity(entity, nameToId);
			if (cand.size() == 0) continue;
			if (dict.containsKey(entity.getKbLink()) && 
					cand.contains(dict.get(entity.getKbLink()))) {
				List<double[]> listForEntity = new ArrayList<double[]>();
				Infobox target = dict.get(entity.getKbLink());
				double[] f = FeatureExtractor.evaluate(entity, target);
				listForEntity.add(f);
				for (Infobox box : cand) {
					if (box.equals(target)) {
						continue;
					}
					f = FeatureExtractor.evaluate(entity, box);
					listForEntity.add(f);
				}
				PredictLinearNeuralModel.NormalizeData(listForEntity);
				data.add(listForEntity);
			} else {
				if (!cand.contains(dict.get(entity.getKbLink()))) {
//					System.out.println(entity.getName());
				}
			}
		}
		
		double[] w = TrainLinearNeuralModel.train(data);
		System.out.println(Arrays.toString(w));
		String dir = "result_round_2\\";
		int order = 1;
		for (Entity entity : el) {
			Set<Infobox> cand = findAmbiguousEntity(entity, nameToId);
			List<double[]> listForEntity = new ArrayList<double[]>();
			List<Infobox> li = new ArrayList<Infobox>();
			li.addAll(cand);
			for (Infobox box : li) {
				double[] f = FeatureExtractor.evaluate(entity, box);
				listForEntity.add(f);
			}
			if (cand.size() == 0) continue;
			PredictLinearNeuralModel.NormalizeData(listForEntity);
			int pos = PredictLinearNeuralModel.predict(w, listForEntity);
			String resultFile = order + "_" + entity.getName() + ".txt";
			order++;
			MyFileWriter mfw = new MyFileWriter(DATADIR + dir + resultFile, true);
			
			mfw.write(li.get(pos).toString() + "\n\n");
			mfw.write(entity.getParentQuery().getContent() + "\n");
			
			mfw.close();
		}
		
	}
	
	public static Map<String, String> getRoundOneResult() {
		MyFileReader mfr = new MyFileReader(DATADIR + "link_round1.txt");
		String temp = null;
		Map<String, String> dict = new HashMap<String, String>();
		while ((temp = mfr.getNextLine()) != null) {
			String[] seg = temp.split("\t");
			dict.put(seg[0], seg[1]);
		}
		
		return dict;
	}
	
	public static Map<String, String> mergeRoundOneResult() {
		String sourceDir = "result_iter1\\";
		File f = new File(DATADIR + sourceDir);
		if (!f.exists() || !f.isDirectory()) {
			return null;
		}
		String[] fl = f.list();
		Map<String, String> dict = new HashMap<String, String>();
		
		Pattern pat = Pattern.compile(".*_(.*)\\.txt");
		
		for (int i = 0; i < fl.length; i++) {
			MyFileReader mfr = new MyFileReader(DATADIR + sourceDir + fl[i]);
			String[] seg = mfr.getNextLine().split("\t");
			Matcher matcher = pat.matcher(fl[i]);
			if (matcher.find()) {
				if (dict.containsKey(matcher.group(1))) {
					if (seg[0].equals(dict.get(matcher.group(1)))) {
						
					} else {
						System.out.println("Not match!");
					}
				} else {
					dict.put(matcher.group(1), seg[0]);
				}
				
			} else {
				System.out.println("Cannot match " + fl[i]);
			}
			mfr.close();
		}
		MyFileWriter mfw = new MyFileWriter(DATADIR + "link_round1.txt", true);
		for (String key : dict.keySet()) {
			mfw.write(key + "\t" + dict.get(key) + "\n");
		}
		mfw.close();
		return dict;
	}
	
	public static void roundOne() {
		double[] weight = trainListNet();
		testListNet(weight);
	}
	
	public static Map<String, List<String>> extendNameDict = null;
	public static Map<String, List<String>> tittNameDict = null;
	public static Map<String, List<String>> summNameDict = null;
	public static Map<String, Map<String, Integer>> luceneRank = null;
	
	public static void initLuceneRank() {
		luceneRank = new HashMap<String, Map<String, Integer>>();
		
		String lucDir = "lucenesearch\\";
		
		File f = new File(DATADIR + lucDir);
		String[] targetList = null;
		if (f.exists() && f.isDirectory()) {
			targetList = f.list();
		} else return;
		
		for (int i = 0; i < targetList.length; i++) {
			MyFileReader mfr = new MyFileReader(DATADIR + lucDir + targetList[i]);
			String temp = null;
			Map<String, Integer> map = new HashMap<String, Integer>();
			while ((temp = mfr.getNextLine()) != null) {
				String[] seg = temp.split("\t");
				if (map.containsKey(seg[0])) {
					Integer prev = map.get(seg[0]);
					map.put(seg[0], prev + 1);
				} else {
					map.put(seg[0], 1);
				}
			}
			String name = targetList[i].substring(0, targetList[i].indexOf("."));
			luceneRank.put(name, map);
		}
		
	}
	
	public static void initRelatedNameDict() {
		tittNameDict = new HashMap<String, List<String>>();
		summNameDict = new HashMap<String, List<String>>();
		
		String tittDir = "gram_titt_all\\";
		
		File f = new File(DATADIR + tittDir);
		String[] targetList = null;
		if (f.exists() && f.isDirectory()) {
			targetList = f.list();
		} else return;
		
		for (int i = 0; i < targetList.length; i++) {
			MyFileReader mfr = new MyFileReader(DATADIR + tittDir + targetList[i]);
			String temp = null;
			List<String> list = new ArrayList<String>();
			while ((temp = mfr.getNextLine()) != null) {
				String[] seg = temp.split("\t");
				list.add(seg[0]);
			}
			String name = targetList[i].substring(0, targetList[i].indexOf("."));
			tittNameDict.put(name, list);
		}
		
		String summDir = "gram_summ_all\\";
		f = new File(DATADIR + summDir);
		targetList = null;
		if (f.exists() && f.isDirectory()) {
			targetList = f.list();
		} else return;
		
		for (int i = 0; i < targetList.length; i++) {
			MyFileReader mfr = new MyFileReader(DATADIR + summDir + targetList[i]);
			String temp = null;
			List<String> list = new ArrayList<String>();
			while ((temp = mfr.getNextLine()) != null) {
				String[] seg = temp.split("\t");
				list.add(seg[0]);
			}
			String name = targetList[i].substring(0, targetList[i].indexOf("."));
			summNameDict.put(name, list);
		}
		
	}
	
	public static void initExtendNameDict() {
		extendNameDict = new HashMap<String, List<String>>();
		String extendDir = "gram\\";
		File f = new File(DATADIR + extendDir);
		String[] targetList = null;
		if (f.exists() && f.isDirectory()) {
			targetList = f.list();
		} else return;
		for (int i = 0; i < targetList.length; i++) {
			if (targetList[i].endsWith(".txt")) {
				targetList[i] = targetList[i].substring(0, targetList[i].length() - 4).toLowerCase();
			}
		}
		List<Query> ql = Query.loadFromFile(DATADIR + "query_0501.txt");
		List<Entity> el = Query.mergeQueryList(ql);
		List<Query> queryList = Query.loadFromFile(DATADIR + "query_0525_ALL.txt");
		List<Entity> entityList = Query.mergeQueryList(queryList);
		entityList.addAll(el);
		for (Entity entity : entityList) {
			String target = "";
			String name = entity.getName();
			int minDis = name.length();
			for (int i = 0; i < targetList.length; i++) {
				if (name.equals(targetList[i]) || targetList[i].contains(name)) {
					target = targetList[i];
				}
				int dis = MyStrUtil.LevenshteinDistance(name, targetList[i]);
				if (dis < minDis || (dis == minDis && target.length() > targetList[i].length())) {
					target = targetList[i];
				}
			}
			if (target.length() > 0) {
//				if (entity.getName().equals("推特")) {
//					System.out.println(entity.getName() + " -- " + target);
//				}
				List<String> synonyms = new ArrayList<String>();
				MyFileReader mfr = new MyFileReader(DATADIR + extendDir + target + ".txt");
				String temp = null;
				while ((temp = mfr.getNextLine()) != null) {
					String[] seg = temp.split("\t");
					if (seg.length == 2 && seg[0].length() > 1) {
						synonyms.add(seg[0]);
					}
				}
				extendNameDict.put(name, synonyms);
			}
		}
		System.out.println("Extend name dict init done");
	}
	
	public static void testListNet(double[] w) {
		List<Query> queryList = Query.loadFromFile(DATADIR + "query_0525_ALL.txt");
		Set<Infobox> infoboxSet = Infobox.loadFromFile(DATADIR + "KB_BD_nomerge_nolink.xml");
		Map<String, Set<Infobox>> nameToId = listKBNames(infoboxSet);
		List<Entity> entityList = Query.mergeQueryList(queryList);
		Map<String, Infobox> dict = new HashMap<String, Infobox>();
		for (Infobox box : infoboxSet) {
			dict.put(box.getId(), box);
		}
		int order = 1;
		String dir = "result//";
		for (Entity entity : entityList) {
			if (entity.getKbLink().equals("NIL")) {
				continue;
			}
//			Set<Infobox> cand = infoboxSet;
			Set<Infobox> cand = findAmbiguousEntity(entity, nameToId);
			List<double[]> listForEntity = new ArrayList<double[]>();
			List<Infobox> li = new ArrayList<Infobox>();
			li.addAll(cand);
			for (Infobox box : li) {
				double[] f = FeatureExtractor.evaluate(entity, box);
				listForEntity.add(f);
			}
			if (cand.size() == 0) continue;
			PredictLinearNeuralModel.NormalizeData(listForEntity);
			int pos = PredictLinearNeuralModel.predict(w, listForEntity);
			String resultFile = order + "_" + entity.getName() + ".txt";
			order++;
			MyFileWriter mfw = new MyFileWriter(DATADIR + dir + resultFile, true);
			
			mfw.write(li.get(pos).toString() + "\n\n");
			mfw.write(entity.getParentQuery().getContent() + "\n");
			
			mfw.close();
		}
		
		
		
		
	}
		
	
	public static double[] trainListNet() {
		List<Query> queryList = Query.loadFromFile(DATADIR + "query_0501.txt");
		Set<Infobox> infoboxSet = Infobox.loadFromFile(DATADIR + "KB_BD_nomerge_nolink.xml");
		Map<String, Set<Infobox>> nameToId = listKBNames(infoboxSet);
		List<Entity> entityList = Query.mergeQueryList(queryList);
		Map<String, Infobox> dict = new HashMap<String, Infobox>();
		
		for (Infobox box : infoboxSet) {
			dict.put(box.getId(), box);
		}
		
		int availableSample = 0;
		int nonnil = 0;
		int canCount = 0;
		List<List<double[]>> data = new ArrayList<List<double[]>>();
		for (Entity entity : entityList) {
			if (entity.getKbLink().equals("NIL")) {
				continue;
			}
			nonnil++;
//			Set<Infobox> cand = infoboxSet;
			Set<Infobox> cand = findAmbiguousEntity(entity, nameToId);
			if (dict.containsKey(entity.getKbLink()) && 
					cand.contains(dict.get(entity.getKbLink()))) {
				availableSample++;
				List<double[]> listForEntity = new ArrayList<double[]>();
				Infobox target = dict.get(entity.getKbLink());
				double[] f = FeatureExtractor.evaluate(entity, target);
//				System.out.println(Arrays.toString(f));
				listForEntity.add(f);
				for (Infobox box : cand) {
					if (box.equals(target)) {
						continue;
					}
					f = FeatureExtractor.evaluate(entity, box);
					listForEntity.add(f);
				}
//				System.out.println(listForEntity.size());
				canCount += listForEntity.size();
				PredictLinearNeuralModel.NormalizeData(listForEntity);
				data.add(listForEntity);
			} else {
				if (!cand.contains(dict.get(entity.getKbLink()))) {
					System.out.println(entity.getName());
				}
			}
		}
		System.out.println(canCount + "/" + nonnil + " = " + ((double)canCount / nonnil));
		System.out.println(availableSample + "/" + nonnil + " = " + ((double)availableSample / nonnil));
		System.out.println(data.size());
		double[] w = TrainLinearNeuralModel.train(data);
		System.out.println(Arrays.toString(w));
		return w;
	}
	
	public static void ruleBasedLinking(String queryFile) {
		List<Query> queryList = Query.loadFromFile(DATADIR + queryFile);
		Set<Infobox> infoboxSet = Infobox.loadFromFile(DATADIR + "KB_BD_nomerge_nolink.xml");
		Map<String, Set<Infobox>> nameToId = listKBNames(infoboxSet);
		List<Entity> entityList = Query.mergeQueryList(queryList);
		Map<String, Infobox> dict = new HashMap<String, Infobox>();
		
		for (Infobox box : infoboxSet) {
			dict.put(box.getId(), box);
		}
		
		String subdir = "error\\";
		
		int correct1 = 0, correct2 = 0, all1 = 0, all2 = 0;
		int correct = 0, all = 0;
		for (Entity entity : entityList) {
			Set<Infobox> cand = findAmbiguousEntity(entity, nameToId);
			List<Infobox> list = sort(entity, cand);
			all++;
			if (entity.getKbLink().equals("NIL")) all1++;
			else all2++;
			
			boolean isAcc = false;
			if (list.size() == 0) {
				list.add(Infobox.getNil());
			} else {
				if (!validation(entity, list.get(0))) {
					list.add(0, Infobox.getNil());
				}
			}
			if (entity.getKbLink().equals(list.get(0).getId())) {
				isAcc = true;
				correct++;
			}
			if (!isAcc) {
				String candidateFile = cand.size() + "_" + entity.getName() + "_" + entity.getKbLink()
						+ ".txt";
				MyFileWriter mfw = new MyFileWriter(DATADIR + subdir + candidateFile, true);
//				MyFileWriter mfw = new MyFileWriter(DATADIR + "test.txt", false);
				
				mfw.write("Weibo content:\n");
				mfw.write(entity.getParentQuery().getContent() + "\n\n");
				
				mfw.write("Correct link:\n");
				if (entity.getKbLink().equals("NIL")) {
					mfw.write("NIL\n\n");
				} else {
					if (dict.containsKey(entity.getKbLink())) {
						mfw.write(dict.get(entity.getKbLink()).toString() + "\n\n");
					} else {
						System.out.println("Cannot find " + entity.getKbLink());
						if (dict.containsKey(entity.getKbLink().toLowerCase())) {
							System.out.println("Capital on?");
						}
					}
				}
				mfw.write("My Prediction:\n");
				
				for (Infobox box : list) {
					mfw.write(box.toString() + "\n\n");
				}
				mfw.close();
			}
		}
//		System.out.println(correct1 + "/" + all1 + " = " + ((double)correct1 / all1));
//		System.out.println(correct2 + "/" + all2 + " = " + ((double)correct2 / all2));
//		System.out.println((double)(correct1 + correct2) / (all1 + all2));
		System.out.println(correct + "/" + all + " = " + ((double)correct / all));
	}
	
	
	
	public static boolean validation(Entity entity, Infobox box) {
		
		String en = entity.getName();
		String bn = box.getName();
		String an = box.getAnchorText();
		
		if (en.equals(bn) || en.equals(an)) {
			return true;
		}
		
		int nl = bn.length();
		int al = an.length();
		int el = en.length();
		int bl = al < nl? al : nl;
		if (el < bl && el * 3 < bl) return false;
		if (el > bl && el > bl * 3) return false;

		if (en.contains(bn) && bl < 4) {
			if (MyStrUtil.getRidOfSuffix(en).equals(bn)) return true;
			else return false;
		}
		if (en.contains(an) && al < 4) {
			if (MyStrUtil.getRidOfSuffix(en).equals(an)) return true;
			else return false;
		}
		if (bl < 4 && el < 4) {
			an = MyStrUtil.getRidOfSuffix(an);
			bn = MyStrUtil.getRidOfSuffix(bn);
			en = MyStrUtil.getRidOfSuffix(en);
			if (an.equals(en) || bn.equals(en)) {
				return true;
			} else {
				return false;
			}
		}
		
		return true;
	}
	
	public static List<Infobox> sort(Entity entity, Set<Infobox> set) {
		Map<Double, List<Infobox>> map = new TreeMap<Double, List<Infobox>>();
		for (Infobox box : set) {
			double[] f = FeatureExtractor.evaluate(entity, box);
			Double score = f[0];
			if (map.containsKey(score)) {
				map.get(score).add(box);
			} else {
				List<Infobox> list = new ArrayList<Infobox>();
				list.add(box);
				map.put(score, list);
			}
		}
		
		List<Infobox> list = new ArrayList<Infobox>();
		for (Double score : map.keySet()) {
			list.addAll(map.get(score));
		}
		return list;
	}
	
	public static Set<Infobox> findAmbiguousEntity(Entity entity, Map<String, Set<Infobox>> dict) {
		Set<Infobox> set = new HashSet<Infobox>();
		String target = MyStrUtil.getRidOfSymbol(entity.getName());
		List<String> synonym = new ArrayList<String>();
		if (extendNameDict == null) initExtendNameDict();
		if (extendNameDict.containsKey(target)) {
			synonym.addAll(extendNameDict.get(target));
//			System.out.println(synonym.size());
//			if (target.equals("推特")) {
//				for (int i = 0; i < synonym.size(); i++) {
//					System.out.println("\t" + synonym.get(i));
//				}
//			}
		}
//		if (tittNameDict != null && tittNameDict.containsKey(target)) {
//			synonym.addAll(tittNameDict.get(target));
//		}
//		if (summNameDict != null && summNameDict.containsKey(target)) {
//			synonym.addAll(summNameDict.get(target));
//		}
		for (String name : dict.keySet()) {
			
//			if (MyStrUtil.hasCommonChar(name, target)) {
//				set.addAll(dict.get(name));
//				continue;
//			}
			
			int dis = MyStrUtil.LevenshteinDistance(name, target);
			if (name.length() == 1) continue;
			if (name.length() + dis == target.length() || name.length() == dis + target.length()) {
				set.addAll(dict.get(name));
				continue;
			}
			if (synonym != null) {
				Iterator<String> iter = synonym.iterator();
				while (iter.hasNext()) {
					String syn = iter.next();
//					if (target.equals("推特")) {
//						System.out.println("\t" + syn);
//						if (MyStrUtil.hasCommonChar(name, syn)) {
//							System.out.println("\t\t" + name);
//						}
//					}
					dis = MyStrUtil.LevenshteinDistance(syn, name);
					if (syn.length() + name.length() - dis > 3) {
						set.addAll(dict.get(name));
						continue;
					}
				}
			}
		}
		return set;
	}
	
	public static void accurateEntityNames(String queryFile) {
		List<Query> queryList = Query.loadFromFile(DATADIR + queryFile);
		Set<Infobox> infoboxSet = Infobox.loadFromFile(DATADIR + "KB_BD_nomerge_nolink.xml");
		Map<String, Set<Infobox>> nameToId = listKBNames(infoboxSet);
		List<Entity> entityList = Query.mergeQueryList(queryList);
		Map<String, Infobox> kb = new HashMap<String, Infobox>();
		
		String candidateDir = "candidate\\";
		
		for (Infobox box : infoboxSet) {
			kb.put(box.getId(), box);
		}
		
		int correct = 0, contained = 0, nonnil = 0;
		int relax = 0, relaxCorrect = 0;
		int nil = 0, nilcon = 0, nilrelax = 0;
		// try each entity
		int pos = 1;
		for (Entity entity : entityList) {
			Set<Infobox> cand = new HashSet<Infobox>();
			if (!entity.getKbLink().equals("NIL")) {
				nonnil++;
				if (nameToId.containsKey(entity.getName())) {
					contained++;
					Set<Infobox> idList = nameToId.get(entity.getName());
					cand.addAll(idList);
//					System.out.println("strict: " + idList.size());
					if (idList.contains(entity.getKbLink())) {
						correct++;
					}
				} else {
					cand = findAmbiguousEntity(entity, nameToId);
					System.out.println("ambiguous: " + cand.size() + "\t" + entity.getName());
					if (cand.size() > 0) {
						relax++;
						if (cand.contains(entity.getKbLink())) {
							relaxCorrect++;
						}
					}
				}
			} else {
				nil++;
				if (nameToId.containsKey(entity.getName())) {
					nilcon++;
				} else {
					cand = findAmbiguousEntity(entity, nameToId);
					System.out.println("ambiguous: " + cand.size() + entity.getName());
					if (cand.size() > 0) {
						nilrelax++;
					}
				}
			}
			String candidateFile = pos + "_" + cand.size() + "_" + entity.getName() + "_" + entity.getKbLink()
					+ ".txt";
			MyFileWriter mfw = new MyFileWriter(DATADIR + candidateDir + candidateFile, true);
			for (Infobox box : cand) {
				mfw.write(box.toString() + "\n\n");
			}
			pos++;
		}
		System.out.println(nonnil + " non NIL links with " + contained + 
				" contained in which " + correct + " are correct");
		System.out.println("After relaxing, " + relax + " more entities are found with " 
				+ relaxCorrect + " correct");
		System.out.println(nil + " NIL links with " + nilcon + " contained and " + nilrelax + " relaxed");
		
		
		
		
	}
	
	public static void parseNameToId(Map<String, List<String>> nameToId) {
		Map<Integer, List<String>> counter = new TreeMap<Integer, List<String>>();
		for (String name : nameToId.keySet()) {
			List<String> list = nameToId.get(name);
			if (counter.containsKey(list.size())) {
				counter.get(list.size()).add(name);
			} else {
				List<String> namelist = new ArrayList<String>();
				namelist.add(name);
				counter.put(list.size(), namelist);
			}
		}
		MyFileWriter mfw = new MyFileWriter(DATADIR + "NameStat.txt", true);
		for (Integer time : counter.keySet()) {
			mfw.write(time + "\n");
			List<String> list = counter.get(time);
			for (String name : list) {
				mfw.write(name + "\t");
			}
			mfw.write("\n\n");
		}
		mfw.close();
	}
	
	/**
	 * Construct a HashMap, the keys are possible names of a knowledge item, and
	 * the value is a list of possible knowledge item ids.
	 * @return
	 */
	public static Map<String, Set<Infobox>> listKBNames(Set<Infobox> set) {
		Map<String, Set<Infobox>> nameToId = new HashMap<String, Set<Infobox>>();
		String[] tags = new String[] {
			"中文名", "外文名", "别名", "中文名称", "中文学名", "外文名称", "别称", "原名",
			"作品名称", "公司名称", "二名法", "英文名称", "其它译名", "名称", "中文队名",
			"外文队名", "作品别名", "中文全名", "英文全名", "其他名称", "又名", "西医学名", 
			"谥号", "英文名", "书名", "拉丁学名", 
		};
		String splitter = "._-,\".!，。——·“”<>《》()（）[]【】";
		
		MyFileWriter mfw = new MyFileWriter(DATADIR + "Knowledge_Names.txt", true);
		for (Infobox box : set) {
			Set<String> names = new HashSet<String>();
			
			// the original name
			names.add(box.getName());
			
			// the anchor text
			names.addAll(MyStrUtil.multiCut(box.getAnchorText(), splitter));
			
			// related attributes
			Map<String, String> facts = box.getFacts();
			for (int i = 0; i < tags.length; i++) {
				if (facts.containsKey(tags[i])) {
					names.addAll(MyStrUtil.multiCut(facts.get(tags[i]), splitter));
				}
			}
			
			StringBuilder sb = new StringBuilder();
			for (String name : names) {
				sb.append(name + "\t");
				if (nameToId.containsKey(name)) {
					nameToId.get(name).add(box);
				} else {
					Set<Infobox> idList = new HashSet<Infobox>();
					idList.add(box);
					nameToId.put(name, idList);
				}
			}
			String line = sb.toString();
			line = line.substring(0, line.length() - 1);
			mfw.write(line + "\n");
		}
		mfw.close();
		return nameToId;
	}
	
	public enum State {
		Perfect, Find, Miss
	};
	
	/**
	 * Select candidate list for each entity from knowledge base, given a query file.
	 * @param queryFile
	 */
	public static void findEntity(String queryFile) {
		List<Query> queryList = Query.loadFromFile(DATADIR + queryFile);
		String candidateDir = "candidate\\";
		System.out.println(queryList.size());
		Set<Infobox> infoboxList = Infobox.loadFromFile(DATADIR + "KB_BD_nomerge_nolink.xml");
		List<Entity> entityList = Query.mergeQueryList(queryList);
		System.out.println(entityList.size());

		Map<String, Infobox> dict = new HashMap<String, Infobox>();
		for (Infobox box : infoboxList) {
			dict.put(box.getId(), box);
		}
		
		int pos = 1;
		int perfect = 0, find = 0, miss = 0;
		int pCount = 0, fCount = 0, mCount = 0;
		for (Entity entity : entityList) {
			Set<Infobox> candidate = Infobox.findByEntityName(infoboxList, entity.getName());
			System.out.print(pos + "\t" + entity.getName() + "\t" + candidate.size() + "\t");
			candidate = Infobox.washCandidate(candidate, entity.getName());
			System.out.print(candidate.size() + "\t");
			State status = null;
			if (entity.getKbLink().equals("NIL")) {
				if (candidate.size() == 0) {
					status = State.Perfect;
					perfect++;
				} else {
					status = State.Find;
					find++;
				}
			} else {
				Infobox box = Infobox.findById(candidate, entity.getKbLink());
				if (box == null) {
					status = State.Miss;
					miss++;
				} else {
					if (candidate.size() == 1) {
						status = State.Perfect;
						perfect++;
					} else {
						status = State.Find;
						find++;
					}
				}
			}
			switch (status) {
			case Perfect: 	pCount += candidate.size(); break;
			case Find: 		fCount += candidate.size(); break;
			case Miss:		mCount += candidate.size(); break;
			}
			System.out.println(status);
			String candidateFile = pos + "_" + candidate.size() + "_" + entity.getName() + "_" + entity.getKbLink()
					+ "_" + status + ".txt";
			MyFileWriter mfw = new MyFileWriter(DATADIR + candidateDir + candidateFile, true);
			if (status != State.Miss && !entity.getKbLink().equals("NIL")) {
				mfw.write(dict.get(entity.getKbLink()).toString() + "\n");
			}
			for (Infobox box : candidate) {
				mfw.write(box.toString() + "\n\n");
			}
			mfw.close();
			pos++;
		}
		System.out.println("perfect = " + perfect + ", find = " + find + ", miss = " + miss);
		
		if (perfect > 0)
			System.out.println("perfect average size = " + (double)(pCount / perfect));
		
		if (find > 0)
			System.out.println("find average size = " + (double)(fCount / find));
		
		if (miss > 0)
			System.out.println("miss average size = " + (double)(mCount / miss));
		
		int all = pCount + fCount + mCount;
		if (entityList.size() > 0)
			System.out.println("overall average size = " + (double)(all / entityList.size()));
		
	}

	public static void parseInfobox() {
		Set<Infobox> infoboxList = Infobox.loadFromFile(DATADIR + "KB_BD_nomerge_nolink.xml");
		System.out.println("There are " + infoboxList.size() + " infoboxes");
		Map<String, Integer> tagCount = Infobox.countTags(infoboxList);
		System.out.println("There are " + tagCount.size() + " tags");
		Map<Integer, List<String>> tagSort = Infobox.sortTagByCount(tagCount);
		
		for (Integer time : tagSort.keySet()) {
			System.out.println(time);
			List<String> list = tagSort.get(time);
			for (String tag : list) {
				System.out.println("\t" + tag);
			}
		}
	}
	
}
