package algorithms;

import java.util.ArrayList;

import org.apache.commons.collections15.Transformer;

import puppy.graph.AbstractIterativeScorerWithPriorsCustom;
import puppy.graph.DeliciousEdge;
import puppy.graph.DeliciousEdgeTransformer;
import puppy.graph.DeliciousNode;
import puppy.graph.DeliciousNodeTransformer;
import util.math.LogProb;
import edu.uci.ics.jung.algorithms.scoring.util.UniformDegreeWeight;
import edu.uci.ics.jung.graph.Hypergraph;

public class RandomWalkBackward<V, E> extends
		AbstractIterativeScorerWithPriorsCustom<V, E, Double> {

	/**
	 * Creates an instance with the specified graph, edge weights, vertex
	 * priors, and 'random jump' probability (alpha).
	 * 
	 * @param graph
	 *            the input graph
	 * @param edge_weights
	 *            the edge weights, denoting transition probabilities from
	 *            source to destination
	 * @param vertex_priors
	 *            the prior probabilities for each vertex
	 * @param alpha
	 *            the probability of executing a 'random jump' at each step
	 */

	DeliciousEdgeTransformer edge_transformer = null;
	DeliciousNodeTransformer node_transformer = null;
	
	boolean doForward=true;
	double gamma=0.0;
	public RandomWalkBackward(Hypergraph<V, E> g,
			Transformer<E, ? extends Number> edge_weights,
			Transformer<V, ? extends Double> vertex_priors, double alpha, double gamma) {

		super(g, edge_weights, vertex_priors, alpha);
			this.gamma=gamma;
		// TODO Auto-generated constructor stub
	}

	public void initModifiers(DeliciousNodeTransformer node_transformer,
			DeliciousEdgeTransformer edge_transformer) {

		this.node_transformer = node_transformer;
		this.edge_transformer = edge_transformer;

		// TODO Auto-generated constructor stub
	}

	/**
	 * Maintains the amount of potential associated with vertices with no
	 * out-edges.
	 */
	protected double disappearing_potential = 0.0;

	/**
	 * Creates an instance with the specified graph, vertex priors, and 'random
	 * jump' probability (alpha). The outgoing edge weights for each vertex will
	 * be equal and sum to 1.
	 * 
	 * @param graph
	 *            the input graph
	 * @param vertex_priors
	 *            the prior probabilities for each vertex
	 * @param alpha
	 *            the probability of executing a 'random jump' at each step
	 */

	public RandomWalkBackward(Hypergraph<V, E> g,Transformer<V, ? extends Double> vertex_priors, double alpha, double gamma) {
		super(g, vertex_priors, alpha );
		this.gamma=gamma;
		this.edge_weights = new UniformDegreeWeight<V, E>(graph);
		// TODO Auto-generated constructor stub
	}

	

	public double update(V v) {
	//	System.out.println(node_transformer.max_kl + "\t"+ node_transformer.min_kl  );
		
		
		if(doForward){
			
			return updateForward(v);
		}else{
			return updateBackward(v);
			
		}
	}

	/**
	
	 *   
	 * i <-- j  (backward) in reality j-->i
	 * 
	 * so we need    wf(j,i)    and   sum_k [ wf(j,k)]
	 * 
	 * 
	 * @param j
	 * @return
	 */
	public double getBackwardAggregate(V i, V j){
		
		
		double total=Double.POSITIVE_INFINITY;
		double w_ji=0.0;
		
		//take all edges of j
		for (E e : graph.getInEdges(j)) {
		

			DeliciousEdge edge = (DeliciousEdge) e;
				// 
			for (V w : graph.getIncidentVertices(e)) {
				graph.getIncidentEdges(w);
				
				

				if ((!w.equals(j) || hyperedges_are_self_loops) ) {
					
					
					double temp  = getForwardWeightSimple(j,w,e);
					
					if(w.equals(i)){
						
						w_ji=temp;
					}
					
					if(total== Double.POSITIVE_INFINITY){
						
						total = temp;
					}else{
						total = LogProb.sumProbLog(total,temp,true);
					}
				}
				
				
				
			}
		}
		
		return LogProb.divisionProbLog(w_ji, total, true);
	}
		
	/**
	 * Returns the value of p(j|i) using edge weights
	 * 
	 * that is , normalized 
	 * 
	 * 
	 * j <-- i 
	 * 
	 * 
	 * using w(i,j)/ sum [i,k] 
	 * 
	 * 
	 * 
	 * @param a
	 * @param b
	 * @return
	 */
	public double getForwardWeight(V i , V j){
		
		/*
		 * normalized through all outgoing edges of i
		 * 
		 */
		DeliciousNode target = (DeliciousNode)j;
		
		double w_ij=0.0;
		
		double total=Double.POSITIVE_INFINITY;
		for (E e : graph.getInEdges(i)) {
		
			
			DeliciousEdge edge = (DeliciousEdge)e;
			if (target.isTag()) {
				edge.setType(true);
			} else {

				edge.setType(false);
			}
			
			for (V w : graph.getIncidentVertices(e)) {
			
			if ((!w.equals(i) || hyperedges_are_self_loops)) {
			//	DeliciousNode incident_node = (DeliciousNode) w;
				
				double edgeWeight = 0.0d;
				
				double currentValue = 0.0d;
				//double nodePrior = 0.0d;
				
				edgeWeight = edge_transformer.getEdgeFreq(edge);
				edgeWeight= Math.log(edgeWeight);
				currentValue = getCurrentValue(w);
			//	double nodeQueryProb = this.node_transformer.calculateQueryProb(incident_node);
				
				/**
				 * two cases   w is equal j
				 * or not
				 * 
				 */
				double temp= LogProb.productProbLog(edgeWeight, currentValue,
						true);
				
				
				if(w.equals(j)){
					//store current w(i,j)
					
					w_ij=  temp;
					
				}
				
				if(total==Double.POSITIVE_INFINITY) {
					total= temp;
				}else{
				total = LogProb.sumProbLog(temp, total,
						true);
				
				}
				//query weighted

				/*double current_input_value = 0.0;

				if (!node.isTag()) { // true for url
					// current_input_value = edgeWeight * currentValue;
					// current_input_value =
					// LogProb.productProbLog(edgeWeight, currentValue,
					// true);

					current_input_value = currentValue;
				} else { // false for tag
					// current_input_value =
					// LogProb.productProbLog(edgeWeight, currentValue,
					// true);

					// System.out.println(nodeQueryProb);
					current_input_value = currentValue;
					current_input_value = LogProb.productProbLog(
							current_input_value, nodeQueryProb, true);

				}
				*/
				
			
			} //end if w different of 
		} //end of iterator trhough vertices of edge
		} //end trhough iterator of edges of i
	
		
		/*
		 * calculate normalized weight value
		 */
		
		//System.out.println("forward_calculation:" +LogProb.divisionProbLog(w_ij, total, true) );
		return LogProb.divisionProbLog(w_ij, total, true);
		
		
	
	}
	
	
	/**
	 * i -->j
	 * 
	 * @param i
	 * @param j
	 * @param e
	 * @return
	 */
	public double getForwardWeightSimple(V i , V j, E e){
		
		double incident_count = graph.getIncidentEdges(i).size();
		incident_count= Math.log(incident_count);
		DeliciousEdge edge =(DeliciousEdge)e;
		DeliciousNode node = (DeliciousNode)i;
		double edgeWeight = 0.0d;
				
		double currentValue = 0.0d;
				//double nodePrior = 0.0d;
				
		edgeWeight = edge_transformer.getEdgeFreq(edge);
		edgeWeight=1.0;
		
		//if(node.isTopic()){
		//	edgeWeight=1;
		//}
		
		edgeWeight= Math.log(edgeWeight);
		currentValue = getCurrentValue(i);
		
	
		
		double temp= LogProb.productProbLog(edgeWeight, currentValue,true);
		//adding kl weight
		double kl = node_transformer.calculateKL((DeliciousNode)i);
		//kl = Math.log(kl)*10;
	//	double prob_node=  node_transformer.calculateQueryProb((DeliciousNode)i);
		
	//	System.out.println(LogProb.divisionProbLog(temp, incident_count, true)  + "\t"+  Math.log(kl) + "\t"+ kl);
	//	double child_prob=  node_transformer.calculateForegroundProb((DeliciousNode)i);
	 //   double child_prob=  node_transformer.calculateBackgroundProb((DeliciousNode)i);
	//child_prob= Math.log(child_prob);
		double ret = LogProb.divisionProbLog(temp, incident_count, true);//+prob_node;
	//	ret= LogProb.productProbLog(ret, child_prob, true);
		double weight=0.0;
		if(node.isTopic()){
		//	System.out.println("Node is topic:" + node.getName());
			//System.exit(1);
			weight = Math.log(gamma);}
		else{
			weight = Math.log(1-gamma);
		}
		
		ret = ret + weight;
		
		
		
		
		return ret ;
		
		

		
		
		
		} //end of iterator trhough vertices of edge
	
	
	/**
	 * 
	 * version used in the PR BASELINE 
	 * and contexttual model baseline
	 * 
	 * @param i
	 * @param j
	 * @param e
	 * @return
	 */
	
public double getForwardWeightSimplePR(V i , V j, E e){
		
		double incident_count = graph.getIncidentEdges(i).size();
		incident_count= Math.log(incident_count);
		DeliciousEdge edge =(DeliciousEdge)e;
		DeliciousNode node = (DeliciousNode)i;
		double edgeWeight = 0.0d;
				
		double currentValue = 0.0d;
				//double nodePrior = 0.0d;
				
		edgeWeight = edge_transformer.getEdgeFreq(edge);
		//edgeWeight=1.0;
		
		//if(node.isTopic()){
		//	edgeWeight=1;
		//}
		
		edgeWeight= Math.log(edgeWeight);
		currentValue = getCurrentValue(i);
		
	
		
		double temp= LogProb.productProbLog(edgeWeight, currentValue,true);

		
		
		//adding kl weight
		double kl = node_transformer.calculateKL((DeliciousNode)i);
	//	kl = Math.log(kl)*10;
		//double prob_node=  node_transformer.calculateQueryProb((DeliciousNode)i);
		
	//	System.out.println(LogProb.divisionProbLog(temp, incident_count, true)  + "\t"+  Math.log(kl) + "\t"+ kl);
	//	double child_prob=  node_transformer.calculateForegroundProb((DeliciousNode)i);
		double child_prob=  node_transformer.calculateBackgroundProb((DeliciousNode)i);
		child_prob= Math.log(child_prob);
		double ret = LogProb.divisionProbLog(temp, incident_count, true) -child_prob;//+prob_node;
	//	ret= LogProb.productProbLog(ret, child_prob, true);
		double weight=0.0;
		if(node.isTopic()){
		//	System.out.println("Node is topic:" + node.getName());
			//System.exit(1);
			weight = Math.log(gamma);}
		else{
			weight = Math.log(1-gamma);
		}
		
		ret = ret + weight;
		
		
		
		
		return ret ;
		
		

		
		
		
		} //end of iterator trhough vertices of edge
	
	
	// i--> j
	public double getForwardWeightSimpleRW(V i , V j, E e){
		
		double incident_count = graph.getIncidentEdges(i).size();
		incident_count= Math.log(incident_count);
		DeliciousEdge edge =(DeliciousEdge)e;
		DeliciousNode node = (DeliciousNode)i;
		double edgeWeight = 0.0d;
				
		double currentValue = 0.0d;
				//double nodePrior = 0.0d;
				
		edgeWeight = edge_transformer.getEdgeFreq(edge);
		
		
		//if(node.isTopic()){
		//	edgeWeight=1;
		//}
		
		edgeWeight= Math.log(edgeWeight);
		currentValue = getCurrentValue(i);
		
		//calculate 
		
		double temp= LogProb.productProbLog(edgeWeight, currentValue,true);

		
		
		double ret = LogProb.divisionProbLog(temp, incident_count, true) ;//+ Math.log(kl);
		double weight=0.0;
		if(node.isTopic()){
		//	System.out.println("Node is topic:" + node.getName());
			//System.exit(1);
			weight = Math.log(gamma);}
		else{
			weight = Math.log(1-gamma);
		}
		
		ret = ret + weight;
		
		
		
		
		return ret ;
		
		

		
		
		
		} //end of iterator trhough vertices of edge
	
	
	public double getForwardWeightSimpleTopic(V i , V j, E e){
		
		double incident_count = graph.getIncidentEdges(i).size();
		incident_count= Math.log(incident_count);
		DeliciousEdge edge =(DeliciousEdge)e;
		DeliciousNode node = (DeliciousNode)i;
		double edgeWeight = 0.0d;
				
		double currentValue = 0.0d;
				//double nodePrior = 0.0d;
				
		edgeWeight = edge_transformer.getEdgeFreq(edge);
		
		if(!node.isTopic()) edgeWeight=1;
		
		edgeWeight= Math.log(edgeWeight);
		
		
		currentValue = getCurrentValue(i);
		double temp= LogProb.productProbLog(edgeWeight, currentValue,true);

		//adding kl weight
		double kl = node_transformer.calculateKL((DeliciousNode)i);
		
	//	System.out.println(LogProb.divisionProbLog(temp, incident_count, true)  + "\t"+  Math.log(kl) + "\t"+ kl);
		double ret= LogProb.divisionProbLog(temp, incident_count, true) ;//+ Math.log(kl);
		
		
		if(node.isTopic()){
		ret = ret + Math.log(gamma);
		}
		else{
			ret = ret + Math.log(1-gamma);
		}
		
		return LogProb.divisionProbLog(temp, incident_count, true);// + Math.log(kl) ;
		
		
		} //end of

	
	public double updateForward(V v) {

		collectDisappearingPotential(v);

		Double v_input =null;
	
		for (E e : graph.getInEdges(v)) {
			
			for (V w : graph.getIncidentVertices(e)) {
			
				if (!w.equals(v) || hyperedges_are_self_loops) {
					
					
					double temp= getForwardWeightSimple(w,v,e);
					
					if(Double.isNaN(temp)){
											
						System.out.println("Not a number");
					//	System.exit(0);
					}
					if(v_input==null){
						v_input = temp;
					}else{
						
						
						
						v_input = LogProb.sumProbLog(v_input, temp, true);
						
					}
					
				}
			}

		}
	
		//normalized take average of all the incoming links
	
		v_input = LogProb.divisionProbLog(v_input, Math.log(graph.getInEdges(v).size()), true);

		double first = v_input + Math.log(1 - alpha);
		double second = getVertexPrior(v) + Math.log(alpha);

	//	double sum_fs = LogProb.productProbLog(node_query_prob, LogProb.sumProbLog(first, second, true), true);
		double sum_fs = 	LogProb.sumProbLog(first, second, true);
		double new_value = alpha > 0 ? sum_fs : v_input;
		// modify total_input according to alpha
		// double new_value = alpha > 0 ?
		// v_input * (1 - alpha) + getVertexPrior(v) * alpha :v_input;
		setOutputValue(v, new_value);

		return Math.abs(getCurrentValue(v) - new_value);
	}

	
	public double updateBackward(V v) {

		collectDisappearingPotential(v);

		double v_input = Double.POSITIVE_INFINITY;

		DeliciousNode node = (DeliciousNode) v;

	//	double node_query_prob = this.node_transformer.calculateQueryProb(node);
		// System.out.println("Updating node: "+ node.getName() + " -----" +
		// "\t isTag" + node.isTag());

		
		for (E e : graph.getInEdges(v)) {
			
			for (V w : graph.getIncidentVertices(e)) {
			
				if (!w.equals(v) || hyperedges_are_self_loops) {
					
					
					double temp= getBackwardAggregate(v,w);
					
					if(v_input==Double.POSITIVE_INFINITY){
						v_input = temp;
					}else{
						
						v_input = LogProb.sumProbLog(v_input, temp, true);
					}
						
					
				}
			}

		}
		
		
		v_input = LogProb.divisionProbLog(v_input, Math.log(graph.getInEdges(v).size()), true);
		double first = v_input + Math.log(1 - alpha);
		double second = getVertexPrior(v) + Math.log(alpha);

	//	double sum_fs = LogProb.productProbLog(node_query_prob, LogProb.sumProbLog(first, second, true), true);
		double sum_fs = 	LogProb.sumProbLog(first, second, true);
		double new_value = alpha > 0 ? sum_fs : v_input;
		// modify total_input according to alpha
		// double new_value = alpha > 0 ?
		// v_input * (1 - alpha) + getVertexPrior(v) * alpha :v_input;
		setOutputValue(v, new_value);

		return Math.abs(getCurrentValue(v) - new_value);
	}

	

	public double getArraySum(ArrayList<Double> list) {

		double r = 0.0;
		for (int i = 0; i < list.size(); i++) {
			r = list.get(i) + r;

		}

		return r;
	}

	public double updatePageRank(V v) {
		collectDisappearingPotential(v);

		double v_input = 0;
		for (E e : graph.getInEdges(v)) {
			// For graphs, the code below is equivalent to
			// V w = graph.getOpposite(v, e);
			// total_input += (getCurrentValue(w) *
			// getEdgeWeight(w,e).doubleValue());
			// For hypergraphs, this divides the potential coming from w
			// by the number of vertices in the connecting edge e.
			int incident_count = getAdjustedIncidentCount(e);

			for (V w : graph.getIncidentVertices(e)) {
				DeliciousNode node = (DeliciousNode) v;

				if (!w.equals(v) || hyperedges_are_self_loops)
					v_input += (getCurrentValue(w) * 1.0 / incident_count);

				// if(node.getName().equals("star_wars")){
				// System.out.println("Star_wars\tcurrent;" + getCurrentValue(w)
				// + "\tv_input"+ v_input + "\tprior:"+getVertexPrior(v));

				// }

			}
		}

		// modify total_input according to alpha
		double new_value = alpha > 0 ? v_input * (1 - alpha)
				+ getVertexPrior(v) * alpha : v_input;
		setOutputValue(v, new_value);

		return Math.abs(getCurrentValue(v) - new_value);
	}

	/**
	 * Cleans up after each step. In this case that involves allocating the
	 * disappearing potential (thus maintaining normalization of the scores)
	 * according to the vertex probability priors, and then calling
	 * <code>super.afterStep</code>.
	 */
	@Override
	protected void afterStep() {
		// distribute disappearing potential according to priors
		if (disappearing_potential > 0) {
			for (V v : graph.getVertices()) {
				setOutputValue(v, getOutputValue(v) + (1 - alpha)
						* (disappearing_potential * getVertexPrior(v)));
			}
			disappearing_potential = 0;
		}

		super.afterStep();
	}

	protected void afterStepPageRank() {
		// distribute disappearing potential according to priors
		if (disappearing_potential > 0) {
			for (V v : graph.getVertices()) {
				setOutputValue(v, getOutputValue(v) + (1 - alpha)
						* (disappearing_potential * getVertexPrior(v)));
			}
			disappearing_potential = 0;
		}

		super.afterStep();
	}

	/**
	 * Collects the "disappearing potential" associated with vertices that have
	 * no outgoing edges. Vertices that have no outgoing edges do not directly
	 * contribute to the scores of other vertices. These values are collected at
	 * each step and then distributed across all vertices as a part of the
	 * normalization process.
	 */
	@Override
	protected void collectDisappearingPotential(V v) {
		if (graph.outDegree(v) == 0) {
			if (isDisconnectedGraphOK())
				disappearing_potential += getCurrentValue(v);
			else
				throw new IllegalArgumentException("Outdegree of " + v
						+ " must be > 0");
		}
	}
}