package algorithms;

import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;

import org.apache.commons.collections15.Transformer;

import puppy.graph.AbstractIterativeScorerWithPriorsCustom;
import puppy.graph.DeliciousEdge;
import puppy.graph.DeliciousEdgeTransformer;
import puppy.graph.DeliciousNode;
import puppy.graph.DeliciousNodeTransformer;
import util.math.LogProb;

import edu.uci.ics.jung.algorithms.scoring.AbstractIterativeScorerWithPriors;
import edu.uci.ics.jung.algorithms.scoring.util.UniformDegreeWeight;
import edu.uci.ics.jung.graph.Hypergraph;
import edu.uci.ics.jung.graph.event.GraphEvent.Edge;

public class RandomWalkWIthPriors<V, E> extends
		AbstractIterativeScorerWithPriorsCustom<V, E, Double> {

	/**
	 * Creates an instance with the specified graph, edge weights, vertex
	 * priors, and 'random jump' probability (alpha).
	 * 
	 * @param graph
	 *            the input graph
	 * @param edge_weights
	 *            the edge weights, denoting transition probabilities from
	 *            source to destination
	 * @param vertex_priors
	 *            the prior probabilities for each vertex
	 * @param alpha
	 *            the probability of executing a 'random jump' at each step
	 */

	DeliciousEdgeTransformer edge_transformer = null;
	DeliciousNodeTransformer node_transformer = null;

	public RandomWalkWIthPriors(Hypergraph<V, E> g,
			Transformer<E, ? extends Number> edge_weights,
			Transformer<V, ? extends Double> vertex_priors, double alpha) {

		super(g, edge_weights, vertex_priors, alpha);
		// TODO Auto-generated constructor stub
	}

	public void initModifiers(DeliciousNodeTransformer node_transformer,
			DeliciousEdgeTransformer edge_transformer) {

		this.node_transformer = node_transformer;
		this.edge_transformer = edge_transformer;

		// TODO Auto-generated constructor stub
	}

	/**
	 * Maintains the amount of potential associated with vertices with no
	 * out-edges.
	 */
	protected double disappearing_potential = 0.0;

	/**
	 * Creates an instance with the specified graph, vertex priors, and 'random
	 * jump' probability (alpha). The outgoing edge weights for each vertex will
	 * be equal and sum to 1.
	 * 
	 * @param graph
	 *            the input graph
	 * @param vertex_priors
	 *            the prior probabilities for each vertex
	 * @param alpha
	 *            the probability of executing a 'random jump' at each step
	 */

	public RandomWalkWIthPriors(Hypergraph<V, E> g,

	Transformer<V, ? extends Double> vertex_priors, double alpha) {
		super(g, vertex_priors, alpha);
		this.edge_weights = new UniformDegreeWeight<V, E>(graph);
		// TODO Auto-generated constructor stub
	}

	/**
	 * Updates the value for this vertex. Called by <code>step()</code>.
	 * 
	 * Calculates for url vertex
	 * 
	 * FOR EACH INCOMING edge
	 * 
	 * 
	 * p(url|tag)P(url) tag(i-1)
	 * 
	 * Calculates for tag vertex
	 * 
	 * 
	 * p(tag|url)p(tag) p(q|tag) url(i-i)
	 * 
	 */
	// @Override
	public double updateCurrent(V v) {
		collectDisappearingPotential(v);

		double v_input = 0;

		DeliciousNode node = (DeliciousNode) v;

		double node_query_prob = this.node_transformer.calculateQueryProb(node);
		// System.out.println("Updating node: "+ node.getName() + " -----" +
		// "\t isTag" + node.isTag());
		for (E e : graph.getInEdges(v)) {
			// For graphs, the code below is equivalent to
			// V w = graph.getOpposite(v, e);
			// total_input += (getCurrentValue(w) *
			// getEdgeWeight(w,e).doubleValue());
			// For hypergraphs, this divides the potential coming from w
			// by the number of vertices in the connecting edge e.

			DeliciousEdge edge = (DeliciousEdge) e;
			// System.out.println("\tIn edge of V" + edge.tag.getName() + "\t"+
			// edge.url.getName() + "\turl_to_Tag:"+ edge.url_to_tag());
			int incident_count = getAdjustedIncidentCount(e);
			for (V w : graph.getIncidentVertices(e)) {

				DeliciousNode incident_node = (DeliciousNode) w;
				
				
				int outgoing_edges = graph.getInEdges(w).size();
				// System.out.println("\t\t Incident vertice:" +
				// incident_node.getName() + "\t go_trhough:"+ !w.equals(v) +
				// "\t isTag:"+ incident_node.isTag());
				double edgeWeight = 0.0d;
				double currentValue = 0.0d;
				double nodePrior = 0.0d;
				
				if (!w.equals(v) || hyperedges_are_self_loops) {
					// set url_to_tag type=false

					
					if (node.isTag()) {
						edge.setType(true);
					} else {

						edge.setType(false);
					}

					// edgeWeight = getEdgeWeight(w, e).doubleValue();
					edgeWeight = 1.0;
					// System.out.println("\t\t\t url_to_tag after getEdgeWegith"+
					// edge.url_to_tag() + "\t" + edgeWeight);

					if (edge.url.getName().equals(
							"http://www.dailygrammar.com/")
							&& edge.tag.getName().equals("homeschooling")) {

						// edge.printFreqType();
						// System.exit(0);
					}

					currentValue = getCurrentValue(w);

					// use node transformer to get p(url) or p(tag)

					// ArrayList<DeliciousNode> arrayAdjacent =
					// tolist(graph.getIncidentVertices(e));
					// System.out.println(node_transformer);
					// nodePrior =
					// this.node_transformer.calculateNodePrior(node);
					// System.out.println(this.node_transformer + "\t"+
					// node.getName() + "\t"+ nodePrior);
					double nodeQueryProb = this.node_transformer
							.calculateQueryProb(incident_node);

					if (node.getName().equals("groundhog")
							|| node.getName().equals("re_sources")) {

						// System.out.println(node.getName()+"\t nodequeryprob:"+
						// nodeQueryProb);
					}
					
					double current_input_value = 0.0;
					edgeWeight = Math.log(edgeWeight);
					if (!node.isTag()) { // true for url

						// current_input_value = edgeWeight * currentValue;

						current_input_value = LogProb.productProbLog(
								edgeWeight, currentValue, true);
					} else { // false for tag
						current_input_value = LogProb.productProbLog(
								edgeWeight, currentValue, true);
						//	
						current_input_value = LogProb.productProbLog(
								current_input_value, nodeQueryProb, true);
						//	
						// current_input_value = LogProb.sumProbLog(edgeWeight,
						// currentValue, true);
						// current_input_value = LogProb.sumProbLog(edgeWeight,
						// currentValue, true);
						// current_input_value = edgeWeight * currentValue *
						// nodeQueryProb;

					}

					v_input = LogProb.sumProbLog(v_input, current_input_value,
							true)
							/ incident_count;
					// v_input += current_input_value;

				}
			}

		}

		// TODO: Experiment with this part

		double first = v_input + Math.log(1 - alpha);
		double second = getVertexPrior(v) + Math.log(alpha);

		double sum_fs = LogProb.productProbLog(node_query_prob, LogProb
				.sumProbLog(first, second, true), true);

		double new_value = alpha > 0 ? sum_fs : v_input;
		// modify total_input according to alpha
		// double new_value = alpha > 0 ?
		// v_input * (1 - alpha) + getVertexPrior(v) * alpha :v_input;
		setOutputValue(v, new_value);

		return Math.abs(getCurrentValue(v) - new_value);
	}

	public double update1(V v) {

		collectDisappearingPotential(v);

		double v_input = 0;

		DeliciousNode node = (DeliciousNode) v;

		double node_query_prob = this.node_transformer.calculateQueryProb(node);
		// System.out.println("Updating node: "+ node.getName() + " -----" +
		// "\t isTag" + node.isTag());

		ArrayList<Double> current_values = new ArrayList<Double>();

		ArrayList<Double> edge_values = new ArrayList<Double>();

		for (E e : graph.getInEdges(v)) {
			// int incident_count = getAdjustedIncidentCount(e);
			// For graphs, the code below is equivalent to
			// V w = graph.getOpposite(v, e);
			// total_input += (getCurrentValue(w) *
			// getEdgeWeight(w,e).doubleValue());
			// For hypergraphs, this divides the potential coming from w
			// by the number of vertices in the connecting edge e.

			DeliciousEdge edge = (DeliciousEdge) e;
			// System.out.println("\tIn edge of V" + edge.tag.getName() + "\t"+
			// edge.url.getName() + "\turl_to_Tag:"+ edge.url_to_tag());

			for (V w : graph.getIncidentVertices(e)) {
				graph.getIncidentEdges(w);
				DeliciousNode incident_node = (DeliciousNode) w;

				// System.out.println("\t\t Incident vertice:" +
				// incident_node.getName() + "\t go_trhough:"+ !w.equals(v) +
				// "\t isTag:"+ incident_node.isTag());
				double edgeWeight = 0.0d;
				edgeWeight = edge_transformer.getEdgeFreq(edge);
				double currentValue = 0.0d;
				double nodePrior = 0.0d;

				if (!w.equals(v) || hyperedges_are_self_loops) {
					// set url_to_tag type=false

					if (node.isTag()) {
						edge.setType(true);
					} else {

						edge.setType(false);
					}
					int outgoing_edges = graph.getInEdges(w).size();
				
				
					// edgeWeight=1.0;
					// System.out.println("\t\t\t url_to_tag after getEdgeWegith"+
					// edge.url_to_tag() + "\t" + edgeWeight);

					if (edge.url.getName().equals(
							"http://www.dailygrammar.com/")
							&& edge.tag.getName().equals("homeschooling")) {

						// edge.printFreqType();
						// System.exit(0);
					}

					currentValue = getCurrentValue(w);

					// use node transformer to get p(url) or p(tag)

					// ArrayList<DeliciousNode> arrayAdjacent =
					// tolist(graph.getIncidentVertices(e));
					// System.out.println(node_transformer);
					// nodePrior =
					// this.node_transformer.calculateNodePrior(node);
					// System.out.println(this.node_transformer + "\t"+
					// node.getName() + "\t"+ nodePrior);
					double nodeQueryProb = this.node_transformer
							.calculateQueryProb(incident_node);

					double current_input_value = 0.0;

					if (!node.isTag()) { // true for url
						// current_input_value = edgeWeight * currentValue;
						// current_input_value =
						// LogProb.productProbLog(edgeWeight, currentValue,
						// true);

						current_input_value = currentValue;
					} else { // false for tag
						// current_input_value =
						// LogProb.productProbLog(edgeWeight, currentValue,
						// true);

						// System.out.println(nodeQueryProb);
						current_input_value = currentValue;
						current_input_value = LogProb.productProbLog(
								current_input_value, nodeQueryProb, true);

					}
					
					
					//this normalization step is needed
					
					current_input_value = LogProb.divisionProbLog(current_input_value, Math.log(outgoing_edges),true);

					// v_input= LogProb.sumProbLog(v_input, current_input_value
					// ,true)/incident_count;

					current_values.add(current_input_value);
					edge_values.add(edgeWeight);
				}
			}

		}

		/**
		 * 
		 * Aggregate current_input values Add edge value normalized
		 */

		double edge_total = getArraySum(edge_values);
		//System.out.println();
		for (int i = 0; i < current_values.size(); i++) {
	
			double edge_weight = Math.log(edge_values.get(i) / edge_total);
			double currentValue = current_values.get(i);

			double temp = LogProb.productProbLog(edge_weight, currentValue,
					true);
			
			if(v_input==0){
				v_input=temp;
			}else{
				v_input = LogProb.sumProbLog(v_input, temp, true);	
			}
			
			

		}


		double first = v_input + Math.log(1 - alpha);
		double second = getVertexPrior(v) + Math.log(alpha);

		double sum_fs = LogProb.productProbLog(node_query_prob, LogProb
				.sumProbLog(first, second, true), true);

		double new_value = alpha > 0 ? sum_fs : v_input;
		// modify total_input according to alpha
		// double new_value = alpha > 0 ?
		// v_input * (1 - alpha) + getVertexPrior(v) * alpha :v_input;
		setOutputValue(v, new_value);

		return Math.abs(getCurrentValue(v) - new_value);
	}
	
	
	
	
	
	public double updateBackward(V v) {

		collectDisappearingPotential(v);

		double v_input = 0;

		DeliciousNode node = (DeliciousNode) v;

		double node_query_prob = this.node_transformer.calculateQueryProb(node);
		// System.out.println("Updating node: "+ node.getName() + " -----" +
		// "\t isTag" + node.isTag());

		ArrayList<Double> current_values = new ArrayList<Double>();

		ArrayList<Double> edge_values = new ArrayList<Double>();

		for (E e : graph.getInEdges(v)) {
			// int incident_count = getAdjustedIncidentCount(e);
			// For graphs, the code below is equivalent to
			// V w = graph.getOpposite(v, e);
			// total_input += (getCurrentValue(w) *
			// getEdgeWeight(w,e).doubleValue());
			// For hypergraphs, this divides the potential coming from w
			// by the number of vertices in the connecting edge e.

			DeliciousEdge edge = (DeliciousEdge) e;
			// System.out.println("\tIn edge of V" + edge.tag.getName() + "\t"+
			// edge.url.getName() + "\turl_to_Tag:"+ edge.url_to_tag());

			for (V w : graph.getIncidentVertices(e)) {
				graph.getIncidentEdges(w);
				
				DeliciousNode incident_node = (DeliciousNode) w;

				// System.out.println("\t\t Incident vertice:" +
				// incident_node.getName() + "\t go_trhough:"+ !w.equals(v) +
				// "\t isTag:"+ incident_node.isTag());
				double edgeWeight = 0.0d;
				edgeWeight = edge_transformer.getEdgeFreq(edge);
				double currentValue = 0.0d;
				double nodePrior = 0.0d;

				if (!w.equals(v) || hyperedges_are_self_loops) {
					// set url_to_tag type=false

					if (node.isTag()) {
						edge.setType(true);
					} else {

						edge.setType(false);
					}
					int outgoing_edges = graph.getInEdges(w).size();
				
				
					// edgeWeight=1.0;
					// System.out.println("\t\t\t url_to_tag after getEdgeWegith"+
					// edge.url_to_tag() + "\t" + edgeWeight);

					if (edge.url.getName().equals(
							"http://www.dailygrammar.com/")
							&& edge.tag.getName().equals("homeschooling")) {

						// edge.printFreqType();
						// System.exit(0);
					}

					currentValue = getCurrentValue(w);

					// use node transformer to get p(url) or p(tag)

					// ArrayList<DeliciousNode> arrayAdjacent =
					// tolist(graph.getIncidentVertices(e));
					// System.out.println(node_transformer);
					// nodePrior =
					// this.node_transformer.calculateNodePrior(node);
					// System.out.println(this.node_transformer + "\t"+
					// node.getName() + "\t"+ nodePrior);
					double nodeQueryProb = this.node_transformer
							.calculateQueryProb(incident_node);

					double current_input_value = 0.0;

					if (!node.isTag()) { // true for url
						// current_input_value = edgeWeight * currentValue;
						// current_input_value =
						// LogProb.productProbLog(edgeWeight, currentValue,
						// true);

						current_input_value = currentValue;
					} else { // false for tag
						// current_input_value =
						// LogProb.productProbLog(edgeWeight, currentValue,
						// true);

						// System.out.println(nodeQueryProb);
						current_input_value = currentValue;
						current_input_value = LogProb.productProbLog(
								current_input_value, nodeQueryProb, true);

					}
					
					
					//this normalization step is needed
					
					current_input_value = LogProb.divisionProbLog(current_input_value, Math.log(outgoing_edges),true);

					// v_input= LogProb.sumProbLog(v_input, current_input_value
					// ,true)/incident_count;

					current_values.add(current_input_value);
					edge_values.add(edgeWeight);
				}
			}

		}

		/**
		 * 
		 * Aggregate current_input values Add edge value normalized
		 */

		double edge_total = getArraySum(edge_values);
		//System.out.println();
		for (int i = 0; i < current_values.size(); i++) {
	
			double edge_weight = Math.log(edge_values.get(i) / edge_total);
			double currentValue = current_values.get(i);

			

			double temp = LogProb.productProbLog(edge_weight, currentValue,
					true);
			
			if(v_input==0){
				v_input=temp;
			}else{
				v_input = LogProb.sumProbLog(v_input, temp, true);	
			}
			
			

		}


		double first = v_input + Math.log(1 - alpha);
		double second = getVertexPrior(v) + Math.log(alpha);

		double sum_fs = LogProb.productProbLog(node_query_prob, LogProb
				.sumProbLog(first, second, true), true);

		double new_value = alpha > 0 ? sum_fs : v_input;
		// modify total_input according to alpha
		// double new_value = alpha > 0 ?
		// v_input * (1 - alpha) + getVertexPrior(v) * alpha :v_input;
		setOutputValue(v, new_value);

		return Math.abs(getCurrentValue(v) - new_value);
	}

	public double updateBroken(V v) {

		collectDisappearingPotential(v);

		double v_input = 0;

		DeliciousNode node = (DeliciousNode) v;

		double node_query_prob = this.node_transformer.calculateQueryProb(node);
		// System.out.println("Updating node: "+ node.getName() + " -----" +
		// "\t isTag" + node.isTag());

		ArrayList<Double> current_values = new ArrayList<Double>();

		ArrayList<Double> edge_values = new ArrayList<Double>();

		for (E e : graph.getInEdges(v)) {
			// int incident_count = getAdjustedIncidentCount(e);
			// For graphs, the code below is equivalent to
			// V w = graph.getOpposite(v, e);
			// total_input += (getCurrentValue(w) *
			// getEdgeWeight(w,e).doubleValue());
			// For hypergraphs, this divides the potential coming from w
			// by the number of vertices in the connecting edge e.

			DeliciousEdge edge = (DeliciousEdge) e;
			// System.out.println("\tIn edge of V" + edge.tag.getName() + "\t"+
			// edge.url.getName() + "\turl_to_Tag:"+ edge.url_to_tag());
			int incident_count = getAdjustedIncidentCount(e);

			for (V w : graph.getIncidentVertices(e)) {

				DeliciousNode incident_node = (DeliciousNode) w;

				// System.out.println("\t\t Incident vertice:" +
				// incident_node.getName() + "\t go_trhough:"+ !w.equals(v) +
				// "\t isTag:"+ incident_node.isTag());
				double edgeWeight = 0.0d;
				double currentValue = 0.0d;
				double nodePrior = 0.0d;

				if (!w.equals(v) || hyperedges_are_self_loops) {
					// set url_to_tag type=false

					if (node.isTag()) {
						edge.setType(true);
					} else {

						edge.setType(false);
					}
					edgeWeight = edge_transformer.getEdgeFreq(edge);
					System.out.println(edgeWeight);
					edge_values.add(edgeWeight);
					// edgeWeight=1.0;
					// System.out.println("\t\t\t url_to_tag after getEdgeWegith"+
					// edge.url_to_tag() + "\t" + edgeWeight);

					if (edge.url.getName().equals(
							"http://www.dailygrammar.com/")
							&& edge.tag.getName().equals("homeschooling")) {

						// edge.printFreqType();
						// System.exit(0);
					}

					currentValue = getCurrentValue(w);

					// use node transformer to get p(url) or p(tag)

					// ArrayList<DeliciousNode> arrayAdjacent =
					// tolist(graph.getIncidentVertices(e));
					// System.out.println(node_transformer);
					// nodePrior =
					// this.node_transformer.calculateNodePrior(node);
					// System.out.println(this.node_transformer + "\t"+
					// node.getName() + "\t"+ nodePrior);
					double nodeQueryProb = this.node_transformer
							.calculateQueryProb(incident_node);

					double current_input_value = 0.0;

					if (!node.isTag()) { // true for url
						// current_input_value = edgeWeight * currentValue;
						// current_input_value =
						// LogProb.productProbLog(edgeWeight, currentValue,
						// true);

						current_input_value = currentValue;
					} else { // false for tag
						// current_input_value =
						// LogProb.productProbLog(edgeWeight, currentValue,
						// true);

						// System.out.println(nodeQueryProb);
						current_input_value = currentValue;
						current_input_value = LogProb.productProbLog(
								current_input_value, nodeQueryProb, true);

					}

					// v_input= LogProb.sumProbLog(v_input, current_input_value
					// ,true)/incident_count;

					current_values.add(current_input_value);

				}
			}

		}

		/**
		 * 
		 * Aggregate current_input values Add edge value normalized
		 */

		double edge_total = getArraySum(edge_values);

		for (int i = 0; i < current_values.size(); i++) {

			double edge_weight = Math.log(edge_values.get(i) / edge_total);
			double currentValue = current_values.get(i);

			if (node.getName().equals("julie")) {
				System.out.println(edge_weight + "\t" + currentValue);
			}

			double temp = LogProb.productProbLog(edge_weight, currentValue,
					true);
			if (node.getName().equals("julie")) {
				System.out.println("edge_weight * currentValue" + temp);
			}
			v_input = LogProb.sumProbLog(v_input, temp, true);

		}

		if (node.getName().equals("julie")) {

			System.out.println("Total edges " + edge_values.size() + "\t"
					+ edge_total);
			System.out.println(current_values.size() + "\t v_input:" + v_input);
			System.out.println("------------------------");

		}

		double first = v_input + Math.log(1 - alpha);
		double second = getVertexPrior(v) + Math.log(alpha);

		double sum_fs = LogProb.productProbLog(node_query_prob, LogProb
				.sumProbLog(first, second, true), true);

		double new_value = alpha > 0 ? sum_fs : v_input;
		// modify total_input according to alpha
		// double new_value = alpha > 0 ?
		// v_input * (1 - alpha) + getVertexPrior(v) * alpha :v_input;
		setOutputValue(v, new_value);

		return Math.abs(getCurrentValue(v) - new_value);
	}

	public double getArraySum(ArrayList<Double> list) {

		double r = 0.0;
		for (int i = 0; i < list.size(); i++) {
			r = list.get(i) + r;

		}

		return r;
	}

	public double update(V v) {
		collectDisappearingPotential(v);

		double v_input = 1000;
		for (E e : graph.getInEdges(v)) {
		
			int incident_count = getAdjustedIncidentCount(e);

			for (V w : graph.getIncidentVertices(e)) {
				DeliciousNode node = (DeliciousNode) v;

				if (!w.equals(v) || hyperedges_are_self_loops){
					double tt  = LogProb.divisionProbLog(getCurrentValue(w) , Math.log(incident_count), true);
					if(v_input==1000){
						v_input = tt;
					}else{
					 v_input = LogProb.sumProbLog(v_input,tt,true);	
					}
					
				}
			
			}
		}
		
		double vertex_prior= node_transformer.calculateNodePrior(((DeliciousNode)v));
		//double prior= Math.exp(getVertexPrior(v));  //before!!!!
		double prior= Math.exp(vertex_prior);
		double v_input_prob=Math.exp(v_input);
		// modify total_input according to alpha
		double new_value = alpha > 0 ? v_input_prob * (1 - alpha)
				+ prior * alpha : v_input_prob;
		setOutputValue(v, Math.log(new_value));
		
		
		return Math.abs(getCurrentValue(v) - Math.log(new_value));
	}

	/**
	 * Cleans up after each step. In this case that involves allocating the
	 * disappearing potential (thus maintaining normalization of the scores)
	 * according to the vertex probability priors, and then calling
	 * <code>super.afterStep</code>.
	 */
	@Override
	protected void afterStep() {
		// distribute disappearing potential according to priors
		if (disappearing_potential > 0) {
			for (V v : graph.getVertices()) {
				setOutputValue(v, getOutputValue(v) + (1 - alpha)
						* (disappearing_potential * getVertexPrior(v)));
			}
			disappearing_potential = 0;
		}

		super.afterStep();
	}

	protected void afterStepPageRank() {
		// distribute disappearing potential according to priors
		if (disappearing_potential > 0) {
			for (V v : graph.getVertices()) {
				setOutputValue(v, getOutputValue(v) + (1 - alpha)
						* (disappearing_potential * getVertexPrior(v)));
			}
			disappearing_potential = 0;
		}

		super.afterStep();
	}

	/**
	 * Collects the "disappearing potential" associated with vertices that have
	 * no outgoing edges. Vertices that have no outgoing edges do not directly
	 * contribute to the scores of other vertices. These values are collected at
	 * each step and then distributed across all vertices as a part of the
	 * normalization process.
	 */
	@Override
	protected void collectDisappearingPotential(V v) {
		if (graph.outDegree(v) == 0) {
			if (isDisconnectedGraphOK())
				disappearing_potential += getCurrentValue(v);
			else
				throw new IllegalArgumentException("Outdegree of " + v
						+ " must be > 0");
		}
	}
}
