package cn.edu.scut.suggestion;

/**
 * 使用模块度算法，对第一次查询进行为相关反馈算法实现查询推荐
 * @author tian.yuchen
 */

import java.io.File;
import java.rmi.RemoteException;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Vector;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import net.sf.json.JSONArray;
import net.sf.json.JSONObject;

import org.apache.lucene.util.PriorityQueue;

import cn.edu.scut.hsrc.bean.CandidateWord;
import cn.edu.scut.hsrc.bean.WordSet;
import cn.edu.scut.suggestion.corpus.MysqlHelper;
import cn.edu.scut.suggestion.corpus.TermInfo;
import cn.edu.scut.suggestion.segmentation.Segment;
import cn.edu.scut.suggestion.tools.Configuration;

public class DiverseSuggest {
	public static int NumTerms = 100;
	public static int MAXWIDTH=9999;
	Configuration conf;
	File file;
	Vector<Integer> root;// 并查集
	int N;
	double w[][];
	double sigmaAW[];
	Vector<TermInfo> fbTermInfos;

	public DiverseSuggest() throws RemoteException {
		super();
		conf = new Configuration();
		file = new File(conf.get("corpus.db").toString());
		root = new Vector<Integer>();
		fbTermInfos = new Vector<TermInfo>();
	}

	public JSONObject getDiverseSugg(String query, String feedbackStr) {
	//	Segment sg = new Segment();
		MysqlHelper mHelper = new MysqlHelper();

		Vector<String> fbterms = Segment.split(feedbackStr);
		

		Vector <TermInfo> tmptermiInfos=new Vector<TermInfo>();
		for (int i = 0; i < fbterms.size(); i++) {
			if (!isIllegaleWord(fbterms.get(i))) {
			//	System.out.println("fbterm:" + fbterms.get(i));
				TermInfo tmpInfo = mHelper.getTermWithDocsByText(
						fbterms.get(i), file);
				if (tmpInfo != null) {
					tmptermiInfos.add(tmpInfo);
				}		
			}
		}
		String maxFreqQueryString=getMaxItemFromSrcQuery(query);
		if (maxFreqQueryString==null) return null;
		fbTermInfos=getQueryRelateTerm(NumTerms, mHelper.getTermWithDocsByText(maxFreqQueryString, file), tmptermiInfos);
		for(int i=0;i<fbTermInfos.size();i++){
			root.add(i);// 每一个节点的父结点都是自己
		}
		
		System.out.println("#####################");
		System.out.println("fbterms.size() "+fbterms.size());
		for(int i=0;i<fbTermInfos.size();i++){
			System.out.print(fbTermInfos.get(i).term.text()+"  ");
		}
		System.out.println();
		System.out.println("#####################");
		
		N = fbTermInfos.size();
		iniW();
		// System.out.println(targetQ(1,5));
		// System.out.println(targetQ(3,7));

		//getBestQ();  // commit for experiment KMeans;
		getKmeans(5, N);
		// dumpRoot();
		// dumpSigmaAW();

		return dumpText();
	}

	/*
	public List<CandidateWord> XXgetCanWordComunitied(List<CandidateWord> srcCandidateWords,String query){
		MysqlHelper mHelper = new MysqlHelper();
		Vector <TermInfo> tmptermiInfos=new Vector<TermInfo>();
		int nsize=srcCandidateWords.size()<NumTerms?srcCandidateWords.size():NumTerms;
		for (int i = 0; i < nsize; i++) {
				TermInfo tmpInfo = mHelper.getTermWithDocsByText(srcCandidateWords.get(i).getText(), file);			
				if (tmpInfo != null) {
					tmpInfo.setCandidateWord(srcCandidateWords.get(i));
					tmptermiInfos.add(tmpInfo);
				}		
		}
		String maxFreqQueryString=getMaxItemFromSrcQuery(query);
		if (maxFreqQueryString==null) return null;
		fbTermInfos=getQueryRelateTerm(NumTerms, mHelper.getTermWithDocsByText(maxFreqQueryString, file), tmptermiInfos);
		
		
		for(int i=0;i<fbTermInfos.size();i++){
			root.add(i);// 每一个节点的父结点都是自己
		}
		N = fbTermInfos.size();
		iniW();
		dumpW();
		getBestQ();
		System.out.println(dumpText().toString());
		List<CandidateWord> reslist=new ArrayList<CandidateWord>();
		for(int i=0;i<fbTermInfos.size();i++){
			CandidateWord tmpWord=new CandidateWord(fbTermInfos.get(i).getCandidateWord());
			tmpWord.setComunity(getRoot(i));
			reslist.add(tmpWord);
		}
		return reslist;
	}
	*/
	public List<CandidateWord> getCanWordComunitied2(String query,WordSet allwords){
		MysqlHelper mHelper = new MysqlHelper();
		Vector <TermInfo> tmptermiInfos=new Vector<TermInfo>();
		int nsize=allwords.count()<NumTerms?allwords.count():NumTerms;
		for (int i = 0; i < nsize; i++) {
				TermInfo tmpInfo = mHelper.getTermWithDocsByText(allwords.getWord(i).getText(), file);			
				if (tmpInfo != null) {
					tmpInfo.setCandidateWord(new CandidateWord(allwords.getWord(i), i, i));
					tmptermiInfos.add(tmpInfo);
				}		
		}
		String maxFreqQueryString=getMaxItemFromSrcQuery(query);
		if (maxFreqQueryString==null) return null;
		fbTermInfos=getQueryRelateTerm(NumTerms, mHelper.getTermWithDocsByText(maxFreqQueryString, file), tmptermiInfos);
		
		System.out.println("#####################");
		System.out.println("WordSet size: "+allwords.count());
		for(int i=0;i<fbTermInfos.size();i++){
			System.out.print(fbTermInfos.get(i).term.text()+"  ");
		}
		System.out.println();
		System.out.println("#####################");
		
		
		for(int i=0;i<fbTermInfos.size();i++){
			root.add(i);// 每一个节点的父结点都是自己
		}
		N = fbTermInfos.size();
		iniW();
		getBestQ();
		List<CandidateWord> reslist=new ArrayList<CandidateWord>();
		for(int i=0;i<fbTermInfos.size();i++){
			CandidateWord tmpWord=new CandidateWord(fbTermInfos.get(i).getCandidateWord());
			tmpWord.setComunity(getRoot(i));
			reslist.add(tmpWord);
		}
		return reslist;
	}
	
	
	//Use ModuleDegree to solve the Cluster
	public void getBestQ() {
		double maxQ = 0;
		int ivroot = -1, jwroot = -1;
		System.out.println("begin to caculate");
		boolean changeflag;
		do {
			changeflag = false;
			for (int i = 0; i < N; i++) {
				for (int j = i + 1; j < N; j++) {
					if (getRoot(i) != getRoot(j)) {
						double tq = targetQ(i, j);
						if (maxQ <= tq) {
							ivroot = getRoot(i);
							jwroot = getRoot(j);
							maxQ = tq;
							changeflag = true;
						}
					}
				}
			}

			if (changeflag) {
				root.set(jwroot, ivroot);
			}
		} while (changeflag);
	}

	//inilize the WeightMatrix
	public void iniW() {
		w = new double[N][N];
		sigmaAW = new double[N];
		for (int i = 0; i < N; i++) {
			for (int j = i; j < N; j++) {
				double ww = computeW(fbTermInfos.get(i), fbTermInfos.get(j));
				w[i][j] = ww;
				w[j][i] = ww;
			}
		}
		
		 //normalize 
		for(int i=0;i<N;i++){ double ww=0; for(int j=0;j<N;j++){
		 ww+=w[i][j]; }
		 
		 for(int j=i;j<N;j++){ double www=w[i][j]/ww; w[i][j]=www;
		 w[j][i]=www; } }
		 

		// caculate sigmaAW
		for (int i = 0; i < N; i++) {
			double sigma = 0;
			for (int j = 0; j < N; j++) {
				sigma += w[i][j];
			}
			sigmaAW[i] = sigma;
		}
	}

	public double targetQ(int vv, int ww) {
		int wroot = getRoot(ww);
		root.set(wroot, vv);// ww add into vv's tree
		double q = 0;
		for (int iv = 0; iv < N; iv++) {
 			for (int jw = 0; jw < N; jw++) {
				int delt = delta(iv, jw);
				q += (w[iv][jw] - (double) 1 / (double) N * sigmaAW[jw]) * delt;
			}
		}
		root.set(wroot, wroot);
		return q / (double) N;
	}

	public static double computeW(TermInfo x, TermInfo y) {
		if (x.equals(y))
			return 1.0;
		HashSet<Integer> interset = new HashSet<Integer>();
		TermInfo xx = x;
		TermInfo yy = y;
		int inter = 0;
		for (Integer xt : xx.docs) {
			interset.add(xt);
		}
		for (Integer yt : yy.docs) {
			if (interset.contains(yt)) {
				inter++;
			}
		}
		double res = (double) inter
				/ Math.sqrt(xx.docs.size())/ Math.sqrt( yy.docs.size());
		return res;
	}

	public int getRoot(int index) {
		if (root.get(index) != index) {
			return getRoot(root.get(index));
		} else {
			return index;
		}
	}

	public int delta(int v, int w) {
		if (getRoot(w) == getRoot(v)) {
			return 1;
		} else {
			return 0;
		}
	}

	public static boolean isIllegaleWord(String word) {
		String pstr = "[\u4e00-\u9fa5]{2}+";
		Pattern p = Pattern.compile(pstr);
		Matcher m = p.matcher(word);
		return !m.find();
	}

	public void dumpW() {
		DecimalFormat dFormat = new DecimalFormat("0.000000");
		System.out.println("dump");
		for (int i = 0; i < N; i++) {
			for (int j = 0; j < N; j++) {
				System.out.print(dFormat.format(w[i][j]) + " ");
			}
			System.out.println();
		}
	}

	public void dumpRoot() {
		for (int i = 0; i < root.size(); i++) {
			System.out.print(getRoot(i) + " ");
		}
		System.out.println();
	}

	public void dumpSigmaAW() {
		for (int i = 0; i < N; i++) {
			System.out.print(sigmaAW[i] + " ");
		}
		System.out.println();
	}

	public JSONObject dumpText() {
		HashMap<Integer, Vector<Integer>> alllist = new HashMap<Integer, Vector<Integer>>();
		for (int i = 0; i < N; i++) {
			if (alllist.containsKey(getRoot(i))) {
				alllist.get(getRoot(i)).add(i);
			} else {
				Vector<Integer> vi = new Vector<Integer>();
				vi.add(i);
				alllist.put(getRoot(i), vi);
			}
		}

		JSONObject resJson=new JSONObject();
		JSONArray resArray=new JSONArray();
		for (Iterator<Integer> aIterator = alllist.keySet().iterator(); aIterator
				.hasNext();) {
			Integer key = aIterator.next();
			Vector<Integer> vlist = alllist.get(key);
			JSONArray clusterArray=new JSONArray();
			for (Integer integer : vlist) {
				System.out.print(fbTermInfos.get(integer).term.text());
				clusterArray.add(fbTermInfos.get(integer).term.text());
			}
			resArray.add(clusterArray);
			System.out.println();
		}
		resJson.put("data", resArray);

		return resJson;
	}

	public static void main(String[] args) throws Exception {
		
		DiverseSuggest ds = new DiverseSuggest();
		ds.getDiverseSugg("你好", "大学学生美女帅哥明星华南清华阅读图书馆北大南京江苏政治学校理论苏州手机苹果社会");
		// ds.dumpW();
		 
		//getKmeans(4,80);
	}

	public Vector<TermInfo> getQueryRelateTerm(int size, TermInfo query,
			Vector<TermInfo> src) {
		TermInfoQueue tiq = new TermInfoQueue(size);
		double minsim = 0;
		for (TermInfo termInfo : src) {
			double sim = DiverseSuggest.computeW(query, termInfo);
			if (sim > minsim) {
				tiq.insertWithOverflow(new SimTermInfo(termInfo, sim));
				if (tiq.size() >= size) // if tiq overfull
				{
					tiq.pop(); // remove lowest in tiq
					minsim = ((SimTermInfo) tiq.top()).sim; // reset minFreq
				}
			}
		}
		
		Vector<TermInfo> resInfos=new Vector<TermInfo>();
		while(tiq.size()>0){
			resInfos.add(tiq.pop().termInfo);
		}
		return resInfos;
	}

	public String getMaxItemFromSrcQuery(String srcQuery){
		Vector<String> items=Segment.split(srcQuery);
		MysqlHelper mHelper = new MysqlHelper();
		String resString=null;
		int minfreq=0;
		for (String str : items) {
			TermInfo tiInfo=mHelper.getTermByText(str);
			if(tiInfo!=null){
				if(tiInfo.docFreq>minfreq){
					minfreq=tiInfo.docFreq;
					resString=tiInfo.term.text();
				}
			}
		}
		
		return resString;
	}
	
	//Use Kmeans to solve the Cluster
	private void getKmeans(int k, int NN) {
		if (NN < k)
			return;
		// produce k ramdom number;
		HashSet<Integer> kSet = new HashSet<Integer>();
		Vector<Integer> kCenterVec = new Vector<Integer>();
		while (kSet.size() < k) {
			Integer num = (int) (Math.random() * NN);
			if (!kSet.contains(num)) {
				kSet.add(num);
			//	kCenterVec.add(num);
			}
		}

		int nChange = 0;
		int count=0;
		while (nChange == 0) {
			kCenterVec.clear();
			for (Integer integer : kSet) {
				kCenterVec.add(integer);
			}
			nChange = 1;
			dumpRoot();
			System.out.println(count++);
			// begin inilize the comunity
			for (int i = 0; i < NN; i++) {
				if (kSet.contains(i)) {
					continue;
				}
				double minsim = MAXWIDTH;
				for (Integer center : kSet) {
					if (w[center][i]<minsim) {
						root.set(i, center);// cluster
						minsim = w[center][i];
					}
				}
			}

			kSet.clear();
			for (Integer kcenter : kCenterVec) {
				int tmpCenter = kcenter;
				double minWidthSum = MAXWIDTH;
				for (int i = 0; i < NN - 1; i++) {
					double iWidthsum = 0;
					if (getRoot(i) != kcenter)
						continue;
					for (int j = i + 1; j < NN; j++) {
						if (getRoot(j) != kcenter)
							continue;
						iWidthsum += w[i][j];
					}
					if (iWidthsum < minWidthSum) {
						tmpCenter = i;
						minWidthSum=iWidthsum;
					}
				}

				if (kcenter != tmpCenter) {
					nChange = 0;
				} 
				root.set(tmpCenter, tmpCenter);
				kSet.add(tmpCenter);
			}

		}

	}
}

final class TermInfoQueue extends PriorityQueue<SimTermInfo> {
	TermInfoQueue(int size) {
		initialize(size);
	}

	protected final boolean lessThan(SimTermInfo a, SimTermInfo b) {
		return a.sim < b.sim;
	}
}

final class SimTermInfo {
	TermInfo termInfo;
	double sim;

	SimTermInfo(TermInfo termInfo, double sim) {
		this.termInfo = termInfo;
		this.sim = sim;
	}
}
