package algorithms;

import java.util.ArrayList;
import java.util.Collection;
import java.util.Hashtable;
import java.util.Iterator;

import org.apache.commons.collections15.Transformer;

import puppy.graph.AbstractIterativeScorerWithPriorsCustom;
//import puppy.graph.DeliciousEdge;
import puppy.graph.DeliciousEdgeSimple;
import puppy.graph.DeliciousEdgeSimpleTransformer;


import puppy.graph.DeliciousNodeSimple;
import puppy.graph.DeliciousNodeSimpleTransformer;

import edu.uci.ics.jung.algorithms.scoring.util.UniformDegreeWeight;
import edu.uci.ics.jung.graph.Hypergraph;

public class RandomWalkAbsorvingNN <V, E> 
extends AbstractIterativeScorerWithPriorsCustom<V,E,Double>
{
	
	
	
	/**
	 * Creates an instance with the specified graph, edge weights, vertex priors, and 
	 * 'random jump' probability (alpha).
	 * @param graph the input graph
	 * @param edge_weights the edge weights, denoting transition probabilities from source to destination
	 * @param vertex_priors the prior probabilities for each vertex
	 * @param alpha the probability of executing a 'random jump' at each step
	 */
	
	DeliciousEdgeSimpleTransformer edge_transformer = null;
	DeliciousNodeSimpleTransformer node_transformer = null;
	private Hashtable<String, Float> query;

	public RandomWalkAbsorvingNN(Hypergraph<V, E> g,
			Transformer<E, ? extends Number> edge_weights,
			Transformer<V, ? extends Double> vertex_priors, double alpha) {
	
		
		super(g, edge_weights, vertex_priors, alpha);
		
		// TODO Auto-generated constructor stub
	}


	public void initModifiers(DeliciousNodeSimpleTransformer node_transformer, DeliciousEdgeSimpleTransformer edge_transformer, Hashtable<String,Float>query) {

	
		this.node_transformer= node_transformer;
		this.edge_transformer= edge_transformer;
		this.query=query;
	
	// TODO Auto-generated constructor stub
	}



/**
 * Maintains the amount of potential associated with vertices with no out-edges.
 */
	protected double disappearing_potential = 0.0;



/**
 * Creates an instance with the specified graph, vertex priors, and 
 * 'random jump' probability (alpha).  The outgoing edge weights for each
 * vertex will be equal and sum to 1.
 * @param graph the input graph
 * @param vertex_priors the prior probabilities for each vertex
 * @param alpha the probability of executing a 'random jump' at each step
 */


public RandomWalkAbsorvingNN(Hypergraph<V, E> g,
		
		Transformer<V, ? extends Double> vertex_priors, double alpha) {
	 super(g, vertex_priors, alpha);
	    this.edge_weights = new UniformDegreeWeight<V,E>(graph);
	// TODO Auto-generated constructor stub
}

/**
 * Updates the value for this vertex.  Called by <code>step()</code>.
 * 
 * Calculates for url vertex
 * 
 *  FOR EACH INCOMING edge
 *  
 *  
 *  p(url|tag)P(url)  tag(i-1)
 * 
 * Calculates for tag vertex
 * 
 *
 *  p(tag|url)p(tag) p(q|tag)  url(i-i)
 * 
 */
//@Override
	public double updateMix(V v) {
		collectDisappearingPotential(v);

		double v_input = 0;

		DeliciousNodeSimple node = (DeliciousNodeSimple) v;

	
		for (E e : graph.getInEdges(v)) {
		
			DeliciousEdgeSimple edge = (DeliciousEdgeSimple) e;
	
			int incident_count = getAdjustedIncidentCount(e);
			for (V w : graph.getIncidentVertices(e)) {

				DeliciousNodeSimple incident_node = (DeliciousNodeSimple) w;
	
				double edgeWeight = 0.0d;
				double currentValue = 0.0d;
				double nodePrior = 0.0d;
				if (!w.equals(v) || hyperedges_are_self_loops) {
					
				
					// set url_to_tag type=false


				//	edgeWeight = getEdgeWeight(w, e).doubleValue();
					edgeWeight=1.0;
					// System.out.println("\t\t\t url_to_tag after getEdgeWegith"+
					// edge.url_to_tag() + "\t" + edgeWeight);

					currentValue = getCurrentValue(w);

					// use node transformer to get p(url) or p(tag)

					ArrayList<DeliciousNodeSimple> arrayAdjacent = tolist(graph
							.getIncidentVertices(e));

					nodePrior = this.node_transformer.calculateNodePrior(node);
					// System.out.println(this.node_transformer + "\t"+
					// node.getName() + "\t"+ nodePrior);
					double nodeQueryProb = this.node_transformer
							.calculateQueryProb(node);

				//	double kl  = this.node_transformer.calculateKL(node);
			//		if(nodeQueryProb !=0){
				//	System.out.println("node name:" + node.getName()+ "\t node prior " + nodePrior+"\t"+ "query prob:" + nodeQueryProb);
					//}
					
					
				//	if(node.getName().contains("star") && node.isTag()){
				//	if(node.isTag())
				//	System.out.println("node name:" + node.getName()+ "\t node prior " + nodePrior+"\t"+ "query prob:" + nodeQueryProb);
				//	}
					double current_input_value=0.0;
					
					
						current_input_value = edgeWeight * currentValue * nodeQueryProb ;

				
					
					v_input += current_input_value/incident_count;

					
				}
			}

		}

    // modify total_input according to alpha
    double new_value = alpha > 0 ? 
    		v_input * (1 - alpha) + getVertexPrior(v) * alpha :v_input;
    setOutputValue(v, new_value);
    
    return Math.abs(getCurrentValue(v) - new_value);
}


private ArrayList<DeliciousNodeSimple> tolist(Collection<V> incidentVertices) {
	// TODO Auto-generated method stub
	
	ArrayList<DeliciousNodeSimple> list = new ArrayList<DeliciousNodeSimple>();
	
	Iterator<V> iter = incidentVertices.iterator();
	
	while(iter.hasNext()){
		
		list.add((DeliciousNodeSimple) iter.next());
	}
	
	return list;
}




public double update(V v)
{
    collectDisappearingPotential(v);
    
    double v_input = 0;
    for (E e : graph.getInEdges(v))
    {
    	// For graphs, the code below is equivalent to 
//      V w = graph.getOpposite(v, e);
//      total_input += (getCurrentValue(w) * getEdgeWeight(w,e).doubleValue());
    	// For hypergraphs, this divides the potential coming from w 
    	// by the number of vertices in the connecting edge e.
    	int incident_count = getAdjustedIncidentCount(e);
    	ArrayList<DeliciousNodeSimple> arrayAdjacent = tolist(graph
				.getIncidentVertices(e));
    	for (V w : graph.getIncidentVertices(e)) 
    	{
    		DeliciousNodeSimple node = (DeliciousNodeSimple) v;
    		

    		if (!w.equals(v) || hyperedges_are_self_loops) {
    			
    			DeliciousNodeSimple w_node  =(DeliciousNodeSimple)w;
    			
    			if(query.containsKey(w_node.name) ){
        			
    				double second  =getCurrentValue(w)  + Math.log(getEdgeWeight(w,e).doubleValue() / incident_count);
    				v_input += (getCurrentValue(w) * 
        					getEdgeWeight(w,e).doubleValue() / incident_count);
        		}else{
    			
    			v_input += (getCurrentValue(w) * 
    					getEdgeWeight(w,e).doubleValue() / incident_count);
        		}
    		if(node.name.equals("lego_star_wars")){
    		
    		//	System.out.println("Star_wars\tcurrent;" + 	getCurrentValue(w) + "\tv_input"+ v_input + "\tprior:"+getVertexPrior(v)); 
    			
    		}
    		}
    	}
    }
    
    // modify total_input according to alpha
    double new_value = alpha > 0 ? 
    		v_input * (1 - alpha) + getVertexPrior(v) * alpha :
    		v_input;
    setOutputValue(v, new_value);
    
    return Math.abs(getCurrentValue(v) - new_value);
}

/**
 * Cleans up after each step.  In this case that involves allocating the disappearing
 * potential (thus maintaining normalization of the scores) according to the vertex 
 * probability priors, and then calling 
 * <code>super.afterStep</code>.
 */
@Override
protected void afterStep()
{
    // distribute disappearing potential according to priors
    if (disappearing_potential > 0)
    {
        for (V v : graph.getVertices())
        {
            setOutputValue(v, getOutputValue(v) + 
                    (1 - alpha) * (disappearing_potential * getVertexPrior(v)));
        }
        disappearing_potential = 0;
    }
    
    super.afterStep();
}


protected void afterStepPageRank()
{
    // distribute disappearing potential according to priors
    if (disappearing_potential > 0)
    {
        for (V v : graph.getVertices())
        {
            setOutputValue(v, getOutputValue(v) + 
                    (1 - alpha) * (disappearing_potential * getVertexPrior(v)));
        }
        disappearing_potential = 0;
    }
    
    super.afterStep();
}

/**
 * Collects the "disappearing potential" associated with vertices that have 
 * no outgoing edges.  Vertices that have no outgoing edges do not directly 
 * contribute to the scores of other vertices.  These values are collected 
 * at each step and then distributed across all vertices
 * as a part of the normalization process.
*/
@Override
protected void collectDisappearingPotential(V v)
{
    if (graph.outDegree(v) == 0)
    {
        if (isDisconnectedGraphOK())
            disappearing_potential += getCurrentValue(v);
        else
            throw new IllegalArgumentException("Outdegree of " + v + " must be > 0");
    }
}
}