package org.clockwise.srw;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;

import org.clockwise.feature.Account;
import org.clockwise.feature.AccountPool;
import org.clockwise.feature.Query;
import org.clockwise.feature.QueryContext;
import org.clockwise.util.MyMath;

import Listwise.PredictLinearNeuralModel;

public class RefGraph {
	
	double[][] matrix = null;					// the relation graph
	
	//	when set alpha = 0.15, it seems that listwise algorithm plays too weak a role.
	//	when set alpha = 0.5, the precision seems to rise from 86% to 88%
	// 	when set alpha = 0.75, the precision remains the same with that of alpha = 0.5
	
	public static double alpha = 0.5;		// the jump possibility
	
	HashMap<Long, Account> dict;			// the reference to the user dictionary
	QueryContext context;
	/*
	 * The start of each query. For example, start[1] = 4, start[2] = 9 means
	 * candidates of query_1 are on dimension 4~8 (5 candidates).
	 * start[0] should always be 0.
	 */
	int[] start = null;
	List<Long> idlist = null;				// the user list of all queries
//	List<double[]> feature = null;			// the user feature list
	
	/*
	 * The data is trained from 996 labels, but with a better set
	 * of rules to find candidate.
	 */
	public static double[] weight = {
		0.6089669368723591,
		3.3647889855899615,
		1.5016575436327886,
		2.8426752121171672,
		0.6940842844863682,
		0.43354839193344596,
		-0.584240254111944,
		0.20145046293055371,
		-3.079647227496348,
		2.141369211879447,
		1.3279510584160976,
		-1.1563760293876109,
		-0.6323295694490417,
		3.0141977057755773,
		0.5797298751630131,
		0.09146602803381661,
		1.4803476457814204,
		0.9999999999999252,
	};
	
	public static int LARGE_INDICATOR = 1414;
	
	public RefGraph(QueryContext qc, AccountPool ap) {
		this.context = qc;
		dict = ap.userDict;
		
		start = new int[qc.queryList.size() + 1];
		start[0] = 0;
		idlist = new ArrayList<Long>();
		Iterator<Query> iter = qc.queryList.iterator();
		int pos = 1;
		while (iter.hasNext()) {
			Query query = iter.next();
//			if (query.potential.size() == 0 || !query.findCorrect || !query.isLinked) {
//				continue;
//			}
			if (query.potential.size() != query.featureList.size()) {
				System.out.println("Fatal Error! Potential and FeatureList are not match!");
			}
			start[pos] = start[pos - 1] + query.featureList.size();
			Iterator<Account> aIter = query.potential.iterator();
			while (aIter.hasNext()) {
				idlist.add(aIter.next().id);
			}
			pos++;
		}
		if (start[start.length - 1] != idlist.size()) {
			System.out.println("Fatal Error! Idlist and matrix are not match!");
		}
		matrix = new double[idlist.size()][];
		for (int i = 0; i < matrix.length; i++) {
			matrix[i] = new double[matrix.length];
		}
	}
	
	public int predict() {
		if (matrix.length > LARGE_INDICATOR) {
			System.out.println("The matrix is too large!");
			return 0;
		}
		else {
			System.out.println("The scale is " + matrix.length);
		}
		int dim = matrix.length;
		
		// P = (1-alpha)P' + alpha*E
		// P' = normalize(P'')
		
		// Step 1: get ready for P''
		for (int i = 0; i < dim; i++) {
			for (int j = i + 1; j < dim; j++) {
				Long id1 = idlist.get(i);
				Long id2 = idlist.get(j);
				double r = AccountRelationPool.evaluate(id1, id2);
				matrix[i][j] = r;
				matrix[j][i] = r;
			}
		}
		
		// Step 2: normalize P'' and get P'
		for (int i = 0; i < dim; i++) {
			double sum = 0.0;
			for (int j = 0; j < dim; j++) {
				sum += matrix[i][j];
			}
			if (sum != 0.0) {
				for (int j = 0; j < dim; j++) {
					matrix[i][j] /= sum;
				}
			}
			else {
				double avg = 1.0 / (double)dim;
				for (int j = 0; j < dim; j++) {
					matrix[i][j] = avg;
				}
			}
		}
		
		// Step 3: get jump vector and then E could be calculated
		
		// 		substep 1: get original jump vector
		double[] jump = new double[dim];
		List<Query> ql = context.queryList;
		for (int i = 0; i < ql.size(); i++) {
			List<double[]> feature = ql.get(i).featureList;
			for (int j = 0; j < feature.size(); j++) {
				jump[start[i] + j] = Math.exp(MyMath.product(feature.get(j), weight));
			}
		}
		
		// 		substep 2: normalize jump vector
		for (int i = 0; i < ql.size(); i++) {
			double sum = 0.0;
			for (int j = start[i]; j < start[i + 1]; j++) {
				sum += jump[j];
			}
			if (sum == 0.0) {
				if (start[i] != start[i + 1]) {
					System.out.println("Fatal Error! Unexpected zero in query jump segment");
					System.out.println("The temporary query is " + (start[i]) + " - " + start[i + 1]);
				}
			}
			else {
				sum *= ql.size();
				for (int j = start[i]; j < start[i + 1]; j++) {
					jump[j] /= sum;
				}
			}
		}
		
		// Step 4: add E and P' with weights and get P
		double[] jumpCopy = Arrays.copyOf(jump, jump.length);
		double factor = 1.0 - alpha;
		for (int i = 0; i < dim; i++) {
			jump[i] *= alpha;
			for (int j = 0; j < dim; j++) {
				matrix[i][j] *= factor;
				matrix[i][j] += jump[i];
			}
		}
		
		// Step 5: finally, calculate the eigen vector of P
		double[] eigen = MyMath.calEigen(matrix, jumpCopy, MyMath.EPSILON);
		
		// count the result. If a link is right, eigen[start[i]] should be
		// the maximum among eigen[start[i] : start[i + 1]]
		int acc = 0;
		for (int i = 0; i < ql.size(); i++) {
			double expected = eigen[start[i]];
			boolean isCorrect = true;
			for (int j = start[i] + 1; j < start[i + 1]; j++) {
				if (expected < eigen[j]) {
					isCorrect = false;
					break;
				}
			}
			if (isCorrect) {
				acc++;
			}
		}
		
		return acc;
	}
	
}
