package de.pass.ch.preprocessing;

import java.util.Collections;
import java.util.Vector;

import de.pass.ch.datastr.BinaryHeapNodeOrder;
import de.pass.ch.datastr.EdgeList;
import de.pass.ch.datastr.PQueueNodeElimination;
import de.pass.ch.dijkstra.DijkstraUpdateVoronoi;
import de.pass.ch.dijkstra.LocalDijkstraContract;
import de.pass.ch.graph.UpdateableGraph;
import de.pass.ch.graph.edge.CompleteEdge;
import de.pass.ch.graph.edge.Edge;
import de.pass.ch.util.Constants;

public class NodeOrder {

	private UpdateableGraph graph;
	private WeightCalculation weightCalc;
	private ContractParameters contractParams;
	private int maxHops;
	private int maxHopsIndex;
	private float maxHopsDegreeLimit;
	private double maxAvgDegree;
	private int noOfNodes;
	private int noOfEdges;
	private int currentLevel;
	private boolean lazyUpdate;
	private boolean localReduceEdges;

	private boolean saveShortcutsWitnesses = false;
	// private boolean saveShortcutsText = false;
	private boolean oneHopBackwardSearch = true;
	// private boolean onTheFlyWitnessCheck = false;

	private Vector<Double> betweenness;
	private Vector<Double> reach;

	private int[] possibleWitnesses;
	private Vector<LinkedNodeMTM> linkedListMTM;

	private BinaryHeapNodeOrder pqElimination;
	private EdgeList newEdges;

	private LocalDijkstraContract localDijkstra;
	private DijkstraUpdateVoronoi dVoronoi;

	public NodeOrder(WeightCalculation weightCalc, UpdateableGraph graph,
			Vector<Double> betweenness, Vector<Double> reach) {

		this.graph = graph;
		this.weightCalc = weightCalc;
		maxHops = 0;
		maxHopsIndex = 0;
		maxHopsDegreeLimit = 0;
		maxAvgDegree = 0;
		noOfNodes = 0;
		noOfEdges = 0;
		currentLevel = 0;
		lazyUpdate = false;
		// _betweenness = betweenness;
		// _reach = reach;
		// _statsFile = statsFile;
		// _testShortestPaths = testShortestPaths;
		localReduceEdges = weightCalc.localReduceEdges;

		this.betweenness = betweenness;
		this.reach = reach;

		possibleWitnesses = new int[graph.noOfNodes()];
		linkedListMTM = new Vector<LinkedNodeMTM>();

		newEdges = new EdgeList();

		localDijkstra = new LocalDijkstraContract(graph);
		dVoronoi = new DijkstraUpdateVoronoi(graph);

	}

	public void createHierarchy() {
		initPQ();
		eliminateByPQueue();
	}

	private void initPQ() {
		System.out.println("Initialize elimination weights...");

		// initalize statistical variables
		noOfEdges = 0;
		noOfNodes = 0;

		initHopLimit();

		// start with level 0, the most unimportant level
		currentLevel = 0;

		// enable lazy update if the check interval > 0
		lazyUpdate = weightCalc.lazyUpdateRecalcLimit > 0;

		pqElimination = new BinaryHeapNodeOrder(graph.noOfNodes());

		// need to init witness array for fast mtm 2 hops search
		// if a 2-hop search is in the list of hop-limits
		initPossibleWitnessesMTM(weightCalc.maxHops);

		System.out
				.print("%        10        20        30        40        50        60        70        80        90      100\n");
		int percent = 0;

		for (int nodeID = 0; nodeID < graph.noOfNodes(); nodeID++) {

			CalculationResult result = calculateEliminationWeight(nodeID,
					Phase.NODEORDER_INIT);

			PQueueNodeElimination nodeEl = new PQueueNodeElimination();
			nodeEl.searchSpace = result.searchSpace;
			nodeEl.voronoiOwner = nodeID;

			pqElimination.insert(result.weight, nodeEl);

			noOfNodes++;
			noOfEdges += graph.lastEdge(nodeID);

			double percentTmp = ((nodeID + 1) / (double) graph.noOfNodes()) * 100;
			for (; percent < percentTmp; percent++) {
				System.out.print("-");
			}

		}

		System.out.println();

		System.out.println("#nodes: " + noOfNodes + " / #edges: " + noOfEdges);

	}

	private void eliminateByPQueue() {
		System.out.println("Eliminatate by priority-queue...");

		int i = 0;

		// lazy updates counter
		int lazyUpdateCounter = 0;
		// lazy updates counter at the beginnig of the last check interval
		// if there are too many lazy updates during a check interval
		// the whole priority queue is updated
		int lastLazyUpdateCounter = 0;

		long timeStart = System.currentTimeMillis();

		System.out
				.print("%        10        20        30        40        50        60        70        80        90      100\n");
		int percent = 0;

		// Main loop: in each loop run, exactly one node is eliminated
		// so this loop is executed noOfNodes() times.
		while (pqElimination.min() != Double.MAX_VALUE) {

			// lazy update: update min element, only remove it if it is still
			// the min element
			if (lazyUpdate) {

				// if there where more than one lazy update for each node in the
				// last check interval (on average), the update the whole
				// priority queue
				if (i % weightCalc.lazyUpdateRecalcLimit == 0) {
					if (lazyUpdateCounter - lastLazyUpdateCounter > weightCalc.lazyUpdateRecalcLimit) {
						System.out
								.println("There were too many lazy updates, updating whole priority queue.");
						updatePQueue();
					}
					lastLazyUpdateCounter = lazyUpdateCounter;
				}

				int node = pqElimination.minElement() - 1;

				// recalculate the elimination weight (priority) of the
				// currently
				// topmost (most unimportant) node, if it does not equals the
				// stored priority, repeat this step with the now topmost node
				double oldWeight = pqElimination.min();
				CalculationResult newResult = updateEliminationWeight(node);
				while (oldWeight != newResult.weight) {

					lazyUpdateCounter++;

					// If the topmost node remains the same but had a different
					// priority than
					// stored, another update is not necessary. However the
					// lazyUpdateCounter
					// should be increased, so this condition is not checked at
					// the entrance
					// of the loop.
					if ((node + 1) == pqElimination.minElement())
						break;
					node = pqElimination.minElement() - 1;
					oldWeight = pqElimination.min();
					newResult = updateEliminationWeight(node);
				}
			}

			// double time1, time2;
			// if (saveStats != SAVE_STATS_NONE) time1 = timestamp();

			// remove topmost node with lowest priority from priority queue
			int index = pqElimination.deleteMin();
			int node = index - 1;

			// System.out.println("node " + node + " deleted");

			for (double percentTmp = ((i + 1) / (double) graph.noOfNodes()) * 100; percent < percentTmp; percent++) {
				System.out.print("-");
			}

			// int searchSpace;
			// int edgeDiff;
			// int newEdges;
			// int inDegree;
			// int outDegree;
			// int voronoiNumber = pqData(node).voronoiNumber;

			// process node, meaning contraction (adds shortcuts),
			// update of level of node and update of priority of neighbors
			processNode(node, Phase.NODEORDER_ELIMINATE, false);

			// staged hop-limits: if the average degree exceeds
			// a limit, a new hop-limit is specified and the whole
			// priority queue gets updated.
			if (updateHopLimit()) {
				System.out
						.println("Switch to max hops " + maxHops + " after "
								+ (i + 1) + " nodes, "
								+ (System.currentTimeMillis() - timeStart)
								+ " seconds");
				updatePQueue();
			}

			currentLevel++;

			i++;
		}

		assert (graph.checkReverseGraphExists());

		System.out.println("#edges: " + graph.noOfEdges());
	}

	/**
	 * Initalize possible witnesses array.
	 */
	private void initPossibleWitnessesMTM(float[] maxHops) {
		for (int i = 0; i < maxHops.length; i += 2) {
			if (maxHops[i] == 2) {
				for (int j = 0; j < possibleWitnesses.length; j++) {
					possibleWitnesses[j] = Integer.MAX_VALUE;
				}
				break;
			}
		}
	}

	/**
	 * First step of the 2-hop many-to-many search. The incoming edges (x,v)
	 * from each node v that is incident to a outgoing edge (node,v) of the
	 * currently processed node are scanned and the distance d(x,v) is stored in
	 * the bucket b(x) along with v. The currently processed node (node) is
	 * ignored.
	 * 
	 * @param node
	 *            currently processed node
	 * @param node
	 *            firstEdge start index into edge array for node
	 * @param node
	 *            lastEdge last index+1 into edge array for node
	 */
	private void initBucketsMTM(int node, int firstEdge, int lastEdge) {
		// assert( _linkedListMTM.empty() );
		// one step backward search from all targets of the outgoing edges
		for (int eOut = firstEdge; eOut < lastEdge; eOut++) {

			Edge edgeOut = graph.edge(node, eOut);

			// only outgoing edges of the currently processed node
			if (!edgeOut.isDirected((byte) 0))
				continue;
			int v = edgeOut.getTarget();

			// scan all incoming edges (x,v) of node v and store
			// the distance to v in the bucket b(x) of x.
			int vLastEdge = graph.lastEdge(v);
			for (int e = graph.firstLevelEdge(v); e < vLastEdge; e++) {
				Edge edge = graph.edge(v, e);

				if (!edge.isDirected((byte) 1) || edge.getTarget() == node)
					continue;
				linkedListMTM.add(new LinkedNodeMTM());
				LinkedNodeMTM link = linkedListMTM.lastElement();
				link.nodeID = v;
				link.weight = edge.getWeight();
				link.next = graph.node(edge.getTarget()).getPQElement();
				graph.node(edge.getTarget()).setPQElement(linkedListMTM.size());
			}
		}
	}

	/**
	 * Cleanup step of the 2-hop many-to-many search. The bucket entries are
	 * cleared and the start pointers stored in the graph data structure to the
	 * buckets are removed. Similar to a clear() in the DijkstraCH class.
	 * 
	 * @param node
	 *            currently processed node
	 * @param node
	 *            firstEdge start index into edge array for node
	 * @param node
	 *            lastEdge last index+1 into edge array for node
	 */
	private void clearMTM(int node, int firstEdge, int lastEdge) {
		for (int eOut = firstEdge; eOut < lastEdge; eOut++) {
			Edge edgeOut = graph.edge(node, eOut);

			if (!edgeOut.isDirected((byte) 0))
				continue;
			int v = edgeOut.getTarget();
			int vLastEdge = graph.lastEdge(v);
			for (int e = graph.firstLevelEdge(v); e < vLastEdge; e++) {
				Edge edge = graph.edge(v, e);

				if (!edge.isDirected((byte) 1) || edge.getTarget() == node)
					continue;
				graph.node(edge.getTarget()).setPQElement(0);
			}
		}
		linkedListMTM.clear();
	}

	private void updatePQueue() {
		System.out.println("Recalculate all remaining weights...");
		// int i = 0;
		// long timeStart = System.currentTimeMillis();
		// long timeLast = System.currentTimeMillis();
		// VERBOSE( Percent percent(_noOfNodes) );

		System.out
				.print("%        10        20        30        40        50        60        70        80        90      100\n");
		int percent = 0;
		for (int v = 0; v < graph.noOfNodes(); v++) {
			if (!pqData(v).isEliminated()) {

				// VERBOSE(
				// if (i > 0 && i % PROGRESS_NODES_INIT == 0)
				// {
				// double now = timestamp();
				// cout << i << " nodes, " << (now-timeStart) << " seconds, " ;
				// cout << (now-timeLast) << " last, ";
				// cout << ((((now-timeStart) / (i)) *
				// _noOfNodes)-(now-timeStart)) << " remaining";
				// cout << endl;
				// timeLast = now;
				// }
				// )

				updateEliminationWeight(v);

				// VERBOSE( percent.printStatus(i) );
				// i++;

				double percentTmp = ((v + 1) / (double) graph.noOfNodes()) * 100;
				for (; percent < percentTmp; percent++) {
					System.out.print("-");
				}
			}
		}
	}

	private void initHopLimit() {
		maxAvgDegree = 0;
		maxHops = 0;
		maxHopsIndex = 0;
		maxHopsDegreeLimit = 0;
		if (maxHopsIndex < weightCalc.maxHops.length) {
			maxHops = (int) weightCalc.maxHops[maxHopsIndex++];

			if (maxHopsIndex < weightCalc.maxHops.length) {
				maxHopsDegreeLimit = weightCalc.maxHops[maxHopsIndex++];
			}
		}
	}

	private boolean updateHopLimit() {
		// staged hop-limits: if the average degree exceeds
		// a limit, a new hop-limit is specified.
		if (noOfNodes > 0) {
			double currentAvgDegree = (double) noOfEdges / (double) noOfNodes;
			if (maxAvgDegree < currentAvgDegree)
				maxAvgDegree = currentAvgDegree;

			if (maxHopsIndex < weightCalc.maxHops.length
					&& currentAvgDegree >= maxHopsDegreeLimit) {
				maxHops = (int) weightCalc.maxHops[maxHopsIndex++];

				if (maxHopsIndex < weightCalc.maxHops.length) {
					maxHopsDegreeLimit = weightCalc.maxHops[maxHopsIndex++];
				}
				return true;
			}
		}
		return false;
	}

	/**
	 * Calculates the elimination weight (priority) of a remaining node in the
	 * current state of contraction. The priority is a linear combination of
	 * several priority terms. There are several return values
	 * (call-by-reference) that are returned, if != NULL.
	 * 
	 * @param phase
	 *            - phase of hierarchy creation, during PHASE_NODEORDER_INIT no
	 *            accessible to the priority queue is allowed since not all
	 *            nodes have been inserted.
	 */
	private CalculationResult calculateEliminationWeight(int node, Phase phase) {
		// variables representing several priority terms
		int searchSpace = 0;
		int edgeDiff = 0;
		int newEdges = 0;
		int inDegree = 0;
		int outDegree = 0;
		int deletedNeighbors = 0;
		int shortcutOriginalEdgeSum = 0;

		if (phase != Phase.NODEORDER_INIT) {
			deletedNeighbors = pqData(node).deletedNeighbors;
		}

		// Perform a simulated contraction of node to calculate several priority
		// terms. This yields e.g. the edge difference. The second template
		// parameter, here true, specifies the simulation.
		ProcessResult processResult = processNode(node, phase, true);

		searchSpace = processResult.searchSpace;
		edgeDiff = processResult.edgeDiff;
		newEdges = processResult.newEdges;
		inDegree = processResult.inDegree;
		outDegree = processResult.outDegree;
		shortcutOriginalEdgeSum = processResult.shortcutOriginalEdgeSum;

		processResult = null;

		// Result will contain the priority.
		// Priority terms: edge difference, number of new edges, deleted
		// neighbors, search space of local searches
		double weight = ((double) edgeDiff * weightCalc.edgeDiffMult
				+ (double) newEdges * weightCalc.newEdgesMult
				+ (double) deletedNeighbors * weightCalc.delNeighbMult + ((double) searchSpace * weightCalc.searchSpaceMult));

		// size of Voronoi region, extract square root
		if (weightCalc.voronoiMult != 0) {
			int voronoiNumber = 0;
			if (phase != Phase.NODEORDER_INIT) {
				voronoiNumber = pqData(node).voronoiNumber;
			}
			weight += Math.sqrt(((double) voronoiNumber)
					* weightCalc.voronoiMult);
		}

		// Upper bound on search paths found by query algorithm. There is a
		// command-line
		// argument (-T) that changes the behaviour to time-dependend queries.
		// In this case,
		// the log10(+1) is used since the priority term eventually exceeds even
		// a long for
		// the PTV Western Europe road network.
		if (weightCalc.searchPathHopBorderMult != 0) {
			if (phase != Phase.NODEORDER_INIT) {
				if (!weightCalc.searchPathHopBorderOriginalEdges) {
					weight += (pqData(node).searchPathHopBorder * weightCalc.searchPathHopBorderMult);
				} else {
					weight += (weightCalc.searchPathHopBorderMult * Math
							.log10(1 + pqData(node).searchPathHopBorder));
				}
			}
		}

		// Relative betweenness, the fraction of remaining nodes with smaller
		// betweenness
		// is the priority term in [0,1] and thus needs an approriate wheight.
		if (weightCalc.betweennessAdd != 0) {
			weight += calculateSmallerNeighbors(node, betweenness)
					* weightCalc.betweennessAdd;
		}
		// Relative reach, the fraction of remaining nodes with smaller reach
		// is the priority term in [0,1] and thus needs an approriate wheight.
		if (weightCalc.reachAdd != 0) {
			weight += calculateSmallerNeighbors(node, reach)
					* weightCalc.reachAdd;
		}

		// count the original edges, the shortcuts added during the simulated
		// contraction represent
		if (weightCalc.shortcutOriginalEdgeSumMult != 0) {
			weight += weightCalc.shortcutOriginalEdgeSumMult
					* Math.sqrt((double) shortcutOriginalEdgeSum);
		}

		return new CalculationResult(weight, searchSpace, edgeDiff, newEdges);
	}

	/**
	 * Calculates the elimination weight (priority) of node and updates the
	 * priority queue accordingly.
	 */
	private CalculationResult updateEliminationWeight(int node) {

		// calculate elimination weight (priority), this only happens during the
		// elimiation phase
		// (PHASE_NODEORDER_ELIMINATION), the other nodeorder phase is the
		// initalization phase.
		CalculationResult newResult = calculateEliminationWeight(node,
				Phase.NODEORDER_ELIMINATE);

		// update the priority queue key and data
		pqElimination.updateKey(node, newResult.weight);
		pqElimination.setSearchSpace(node, newResult.searchSpace);

		// return some priority terms by call-by-reference
		return newResult;
	}

	private double calculateSmallerNeighbors(int node, Vector<Double> values) {
		int lastEdge = graph.lastEdge(node);
		Vector<Integer> neighbors = new Vector<Integer>();

		for (int e = graph.firstLevelEdge(node); e < lastEdge; e++) {
			Edge edge = graph.edge(node, e);

			neighbors.add(edge.getTarget());
		}

		Collections.sort(neighbors);
		int previous = Constants.SPECIAL_NODEID;
		int noOfNeighbors = 0;
		int noOfSmallNeighbors = 0;

		double myValue = values.get(node);

		int i = 0;
		for (Integer neighbor : neighbors) {

			if (neighbor == previous)
				continue;

			previous = neighbor;
			noOfNeighbors++;

			if (values.get(i) < myValue)
				noOfSmallNeighbors++;

			i++;
		}
		return noOfNeighbors == 0 ? 0 : (double) noOfSmallNeighbors
				/ noOfNeighbors;
	}

	private ProcessResult processNode(int node, Phase phase,
			boolean simulateOnly) {

		// System.out.println("Process node " + node + " ......");

		// range in the edge array, for each node the edges are
		// partitioned into edges where the second incident edge,
		// called target independent of the direction of the edge,
		// is already contracted/eliminated or not.
		// firstEdge
		// ..
		// (edges to contracted nodes)
		// ..
		// firstLevelEdge
		// ..
		// (edges to remaining nodes)
		// ..
		// lastEdge
		int firstEdge = graph.firstLevelEdge(node);
		int lastEdge = graph.lastEdge(node);

		// We sort the edges so we can process them more efficiently. This
		// is e.g. used to add bidirectional flags to shortcuts, if
		// they exists in both directions with the same weight.
		if (firstEdge < lastEdge) {
			graph.sortEdges(node);
		}

		int inDegree = 0;
		int outDegree = 0;

		if (phase == Phase.NODEORDER_INIT || phase == Phase.NODEORDER_ELIMINATE) {
			// calculate the incoming degree and outgoing degree
			// of the node (always wrt to the remainig graph)
			// used for weight (priority) calculation
			for (int edgeID = firstEdge; edgeID < lastEdge; edgeID++) {
				Edge edge = graph.edge(node, edgeID);

				if (edge.isDirected((byte) 0))
					outDegree++;
				if (edge.isDirected((byte) 1))
					inDegree++;
			}

		}

		// the edge difference is implemented as the difference
		// in the number of entries in the edge array.
		// Since each remaining edge is still stored twice,
		// at both incident edges, multiply by 2.
		// Initially, the edges incident to the currently
		// processed nodes count negative. Each required shortcut
		// increases the difference by 2.
		int edgeDiff = -2 * (lastEdge - firstEdge);
		// New edge counter, similar to edgeDiff.
		int newEdgesCounter = 0;
		// Search space, a priority term. Depends on the
		// implementation of the local searches.
		int searchSpace = 0;

		// Only act if the current node has remainig edges.
		if (firstEdge < lastEdge) {

			// debug: Check for parallel edges, these must not exist.
			// Usually, this is ensured by UpdateableGraph::addShortcutEdge().
			// #ifndef NDEBUG
			// int previousIn = SPECIAL_NODEID;
			// int previousOut = SPECIAL_NODEID;
			// for ( int e = firstEdge; e < lastEdge; e++ )
			// {
			// Edge edge = graph.edge(e);
			// if ( edge.isDirected((byte)1) )
			// {
			// if ( previousIn == edge.target() )
			// {
			// cout << "node " << node << endl;
			// for (int edgeID = firstEdge; edgeID < lastEdge; edgeID++)
			// {
			// cout << graph.edge(edgeID) << endl;
			// }
			// }
			// assert( previousIn != edge.target() );
			// previousIn = edge.target();
			// }
			// if ( edge.isDirected((byte)0) )
			// {
			// if ( previousOut == edge.target() )
			// {
			// cout << "node " << node << endl;
			// for (int edgeID = firstEdge; edgeID < lastEdge; edgeID++)
			// {
			// cout << graph.edge(edgeID) << endl;
			// }
			// }
			// assert( previousOut != edge.target() );
			// previousOut = edge.target();
			// }
			// }
			// #endif

			// ***
			// The next if blocks specify the local searches because
			// there are four significantly different implementations:
			// - 1-hop search
			// - 2-hop search using many-to-many
			// - local Dijkstra search with 1-hop backward search
			// - local Dijkstra search
			// ***

			// 1-hop search
			// ------------
			// This is a simple scan of the adjacency array.
			if (maxHops == 1) {
				// The search is implemented as a triply nested loop.
				// First loop: Scan edge array of node for nodes incident to
				// incoming edges (v,node)
				// Second loop: Scan edge array of node for nodes incident to
				// outgoing edges (node,w)
				// Third loop: Scan edge array of v to find an edge to w.
				// A witness path is found, if its length is not longer
				// than the path <v,node,w>.
				for (int eIn = firstEdge; eIn < lastEdge; eIn++) {
					Edge edgeIn = graph.edge(node, eIn);

					if (!edgeIn.isDirected((byte) 1))
						continue;

					int target = edgeIn.getTarget();
					int inFirstEdge = graph.firstLevelEdge(target);
					int inLastEdge = graph.lastEdge(target);

					if (phase == Phase.NODEORDER_INIT
							|| phase == Phase.NODEORDER_ELIMINATE) {
						searchSpace += inLastEdge - inFirstEdge;
					}

					for (int eOut = firstEdge; eOut < lastEdge; eOut++) {
						Edge edgeOut = graph.edge(node, eOut);

						if (!edgeOut.isDirected((byte) 0))
							continue;

						// A path to the start node is not necessary.
						if (edgeIn.getTarget() == edgeOut.getTarget())
							continue;

						boolean foundWitness = false;
						for (int e = inFirstEdge; e < inLastEdge; e++) {
							Edge edge = graph.edge(edgeIn.getTarget(), e);

							if (edge.isDirected((byte) 0)
									&& edge.getTarget() == edgeOut.getTarget()) {
								if (edge.getWeight() <= (edgeIn.getWeight() + edgeOut
										.getWeight())) {
									foundWitness = true;
								}
								break;
							}
						}

						// If no witness path is found, after the contraction of
						// node,
						// shortest paths distance would possibly increase. The
						// countermeasure
						// is to add a shortcut edge with the length of the
						// shortest path.
						// Since the search is limted, sometimes shortcuts are
						// added,
						// that are not necessary. But they do not invalidate
						// the correctness.
						if (!foundWitness) {
							prepareNewShortcutEdge(node, firstEdge, lastEdge,
									eIn, edgeIn, eOut, edgeOut);
						}

						if (phase == Phase.NODEORDER_INIT
								|| phase == Phase.NODEORDER_ELIMINATE) {
							// Count each scan of the adjacency array as 1
							// search space unit.
							// This works fine. The search space is not so
							// relevant for
							// the 1-hop search since at least in roat networks
							// the degree
							// of nodes is small.
							searchSpace++;
						}
					}
				}
			}

			// 2-hop search
			// ------------
			// The 2-hop search is implemented as a simplified many-to-many
			// search
			// from all nodes incident to incoming edges to all nodes incident
			// to outgoing
			// edges. It uses additional procedures.
			else if (maxHops == 2) {
				// 1-hop backward search from nodes incident to outgoing edges
				// to fill buckets.
				initBucketsMTM(node, firstEdge, lastEdge);

				// For each node incident to an incoming edge, perform a
				// 1-hop forward search using the previously initalized buckets
				// to find distances (ommiting current node (node)) to
				// nodes 1 or 2 hops away. The distances are stored
				// in the array _possibleWitnesses, index is node id.
				for (int eIn = firstEdge; eIn < lastEdge; eIn++) {
					Edge edgeIn = graph.edge(node, eIn);

					if (!edgeIn.isDirected((byte) 1))
						continue;

					// Fill _possibleWitnesses vector.
					findPossibleWitnessesMTM(node, edgeIn.getTarget());

					if (phase == Phase.NODEORDER_INIT
							|| phase == Phase.NODEORDER_ELIMINATE) {
						// ########## CHANGE: lastEdge - lastEdge to lastEdge -
						// firstEdge
						searchSpace += graph.lastEdge(edgeIn.getTarget())
								- graph.lastEdge(edgeIn.getTarget());
						// ########## END CHANGE
					}

					// Check the distance to each node incident to an outgoing
					// edge.
					for (int eOut = firstEdge; eOut < lastEdge; eOut++) {
						Edge edgeOut = graph.edge(node, eOut);

						if (!edgeOut.isDirected((byte) 0))
							continue;

						// A path to the start node is not necessary.
						if (edgeIn.getTarget() == edgeOut.getTarget())
							continue;

						// Get length of shortest witness.
						// witnessWeight == Weight::MAX_VALUE if no witness
						// exists
						int witnessWeight = possibleWitnesses[edgeOut
								.getTarget()];
						possibleWitnesses[edgeOut.getTarget()] = Integer.MAX_VALUE;

						// If shortest path would have increased length, a
						// shortcut edge may be necessary.
						// Since the search is limted, sometimes shortcuts are
						// added,
						// that are not necessary. But they do not invalidate
						// the correctness.
						if (witnessWeight > (edgeIn.getWeight() + edgeOut
								.getWeight())) {
							prepareNewShortcutEdge(node, firstEdge, lastEdge,
									eIn, edgeIn, eOut, edgeOut);
						}
					}
				}

				if (phase == Phase.NODEORDER_INIT
						|| phase == Phase.NODEORDER_ELIMINATE) {
					searchSpace += linkedListMTM.size();
				}

				// cleanup the used mtm data structures
				clearMTM(node, firstEdge, lastEdge);
			}

			// local dijkstra search, with 1-hop backward search
			// -------------------------------------------------
			// The 1-hop backward search is only used if hop-limits are
			// specified.
			// The current hop-limit is stored in _maxHops.
			else if (oneHopBackwardSearch && maxHops > 0) {

				// Prepare target-flags for the local search. The search is
				// stopped if all targets are settled. These target flags need
				// to be removed after the local searches.
				int noOfTargets = 0;
				for (int eOut = firstEdge; eOut < lastEdge; eOut++) {
					Edge edgeOut = graph.edge(node, eOut);
					if (!edgeOut.isDirected((byte) 0))
						continue;

					// Since we perform an additional 1-hop-backward search,
					// we do use the nodes adjacent to the outgoing node
					// as targets.
					int target = edgeOut.getTarget();
					int outFirstEdge = graph.firstLevelEdge(target);
					int outLastEdge = graph.lastEdge(target);
					for (int e = outFirstEdge; e < outLastEdge; e++) {
						Edge edge = graph.edge(target, e);

						if (!edge.isDirected((byte) 1)
								|| edge.getTarget() == node)
							continue;
						if (!graph.node(edge.getTarget()).isTarget()) {
							graph.node(edge.getTarget()).setTarget(true);
							noOfTargets++;
						}
					}
				}

				// A local search starting at each node incident to an incoming
				// edge is performed. The distances to the nodes incident to the
				// outgoing edges are used to decide the necessity of shortcut
				// edges.
				for (int eIn = firstEdge; eIn < lastEdge; eIn++) {
					Edge edgeIn = graph.edge(node, eIn);

					// need a incoming edge
					if (!edgeIn.isDirected((byte) 1))
						continue;

					// This is the second stop criterion for the local search.
					// We only want to find witness paths that are at most as
					// long as the path via the currently processed node. If we
					// exceed this distance, we can stop the search. The maximum
					// distance is the length of the incoming edge plus the
					// maximum length of an outgoing edge minus the minimum
					// incoming edge of the node incident to this outgoing edge.
					// maxOutDist is this length without the length of the
					// incoming edge.
					int maxOutDist = 0;
					for (int eOut = firstEdge; eOut < lastEdge; eOut++) {
						Edge edgeOut = graph.edge(node, eOut);

						if (!edgeOut.isDirected((byte) 0))
							continue;

						// A path to the start node is not necessary.
						if (edgeIn.getTarget() == edgeOut.getTarget())
							continue;

						// one hop backward search
						int minBackDist = Integer.MAX_VALUE;
						int target = edgeOut.getTarget();
						int outFirstEdge = graph.firstLevelEdge(target);
						int outLastEdge = graph.lastEdge(target);
						for (int e = outFirstEdge; e < outLastEdge; e++) {
							Edge edge = graph.edge(target, e);

							if (!edge.isDirected((byte) 1)
									|| edge.getTarget() == node)
								continue;
							if (minBackDist > edge.getWeight())
								minBackDist = edge.getWeight();
						}

						// Special case: the node incident to the outgoing edge
						// has no ingoing edges
						// except for the one from "node". Look out for problems
						// converting unsigned int to int.
						if (minBackDist < Integer.MAX_VALUE) {
							if (maxOutDist < ((int) edgeOut.getWeight() - (int) minBackDist))
								maxOutDist = ((int) edgeOut.getWeight() - (int) minBackDist);
						}
					}

					// maximum number of settled nodes during local search, 0 =
					// infinite.
					// This limit can differ between weight calculation
					// (simulateOnly=true)
					// and actual contraction.
					int maxSettled;
					if (phase == Phase.NODEORDER_INIT
							|| phase == Phase.NODEORDER_ELIMINATE) {
						maxSettled = weightCalc.maxSettledElim;
						if (simulateOnly) {
							maxSettled = weightCalc.maxSettledApprox;
						}
					} else {
						maxSettled = contractParams.maxSettledElim;
					}

					// Hop-limit, decrease the hop-limit by 1 since an
					// additional 1-hop backward search is performed.
					int maxHops = this.maxHops;
					if (maxHops > 0)
						maxHops--;

					// *** Perform local search. ***
					// Only perform local search if there is hope to find a
					// witness. This check is necessary since maxOutDist may be
					// negative.
					if ((maxOutDist + edgeIn.getWeight()) > 0) {
						localDijkstra.searchWithoutTarget(edgeIn.getTarget(),
								node /*
									 * ignore node that will be contracted
									 */, maxOutDist + edgeIn.getWeight() /*
																		 * maximum
																		 * distance
																		 * of
																		 * the
																		 * search
																		 */,
								noOfTargets, maxSettled /*
														 * max settled nodes 0
														 * == inf
														 */, maxHops);

						if (phase == Phase.NODEORDER_INIT
								|| phase == Phase.NODEORDER_ELIMINATE) {
							// add up search space sizes of the local searches
							// for weight calculation
							searchSpace += localDijkstra.noOfSettledNodes();
						}
					}

					// Local edge reduction uses the results of the local search
					// to remove edges that are not on any shortest path.
					if (localReduceEdges
							&& (phase == Phase.NODEORDER_ELIMINATE || phase == Phase.CONSTRUCT)
							&& !simulateOnly) {
						reduceEdgesLocal(node, edgeIn);
					}

					// We finished the local search from start node v =
					// edgeIn.target(). Now we want to find witness paths to
					// each node w incident to an outgoing edge (node,w) Either
					// the local search settled w or we perform a 1-hop backward
					// search from w.
					for (int eOut = firstEdge; eOut < lastEdge; eOut++) {
						Edge edgeOut = graph.edge(node, eOut);

						if (!edgeOut.isDirected((byte) 0))
							continue;

						// A path to the start node is not necessary.
						if (edgeIn.getTarget() == edgeOut.getTarget())
							continue;

						boolean foundWitness = false;

						// First check if the local search reached
						// edgeOut.target() with a
						// _maxHops - 1 witness path. If there is no such
						// witness, do there
						// 1-hop backward search.
						if (localDijkstra.isSettled(edgeOut.getTarget()) == 0
								|| localDijkstra
										.distanceTo(edgeOut.getTarget()) > (edgeIn
										.getWeight() + edgeOut.getWeight())
								|| localDijkstra.parentOf(edgeOut.getTarget()) == node)

						{
							// Scan through the edges (x,w) incoming to the node
							// w incident to the outgoing edge
							// (node,w). (w = edgeOut.target()) to perform a
							// 1-hop backward search.
							// Then we check if x is settled by the local search
							// with to find additional
							// witness paths.
							int target = edgeOut.getTarget();
							int outFirstEdge = graph.firstLevelEdge(target);
							int outLastEdge = graph.lastEdge(target);
							for (int e = outFirstEdge; e < outLastEdge; e++) {
								Edge edge = graph.edge(target, e);

								if (!edge.isDirected((byte) 1)
										|| edge.getTarget() == node)
									continue;

								if (localDijkstra.isSettled(edge.getTarget()) != 0
										&& localDijkstra.distanceTo(edge
												.getTarget())
												+ edge.getWeight() <= (edgeIn
												.getWeight() + edgeOut
												.getWeight())
										&& localDijkstra.parentOf(edge
												.getTarget()) != node) {
									foundWitness = true;
									break;
								}
							}
						} else {
							foundWitness = true;
						}

						// If shortest path would have increased length, a
						// shortcut edge may be necessary.
						// Since the search is limted, sometimes shortcuts are
						// added,
						// that are not necessary. But they do not invalidate
						// the correctness.
						if (!foundWitness) {
							prepareNewShortcutEdge(node, firstEdge, lastEdge,
									eIn, edgeIn, eOut, edgeOut);
						}
					}
					localDijkstra.clear();
				}

				// Remove target-flags that are previously set since these are
				// stored in the global graph datastructure.
				for (int eOut = firstEdge; eOut < lastEdge; eOut++) {
					Edge edgeOut = graph.edge(node, eOut);

					if (!edgeOut.isDirected((byte) 0))
						continue;

					// one hop backward search
					int target = edgeOut.getTarget();
					int outFirstEdge = graph.firstLevelEdge(target);
					int outLastEdge = graph.lastEdge(target);
					for (int e = outFirstEdge; e < outLastEdge; e++) {
						Edge edge = graph.edge(target, e);

						if (!edge.isDirected((byte) 1)
								|| edge.getTarget() == node)
							continue;
						if (graph.node(edge.getTarget()).isTarget()) {
							graph.node(edge.getTarget()).setTarget(false);
							noOfTargets--;
						}
					}
				}

				// In case of edge reduction, it is possible that an edge (v,w)
				// has been removed
				// with (v,node) an incoming edge and (node,w) an outgoing edge.
				// Because of the
				// 1-hop backward search from node w, the node v becomes a
				// target. So it is
				// necessary to check nodes incident to ingoing edges for target
				// flags, too.
				if (localReduceEdges
						&& (phase == Phase.NODEORDER_ELIMINATE || phase == Phase.CONSTRUCT)
						&& !simulateOnly) {

					for (int eIn = firstEdge; eIn < lastEdge; eIn++) {
						Edge edgeIn = graph.edge(node, eIn);

						if (!edgeIn.isDirected((byte) 1))
							continue;
						if (graph.node(edgeIn.getTarget()).isTarget()) {
							graph.node(edgeIn.getTarget()).setTarget(false);
							noOfTargets--;
						}
					}
				}
				assert (noOfTargets == 0) : "noOfTargets sollte 0 sein, ist aber "
						+ noOfTargets;
			}

			// simple local search with dijkstra
			// ---------------------------------
			// This is the basic local search without any speedups.
			else {

				// Prepare target-flags for the local search. The search is
				// stopped
				// if all targets are settled. These target flags need to be
				// removed
				// after the local searches.
				int noOfTargets = 0;
				for (int eOut = firstEdge; eOut < lastEdge; eOut++) {
					Edge edgeOut = graph.edge(node, eOut);

					if (!edgeOut.isDirected((byte) 0))
						continue;
					if (!graph.node(edgeOut.getTarget()).isTarget()) {
						graph.node(edgeOut.getTarget()).setTarget(true);
						noOfTargets++;
					}
				}

				// A local search starting at each node incident to an incoming
				// edge
				// is performed. The distances to the nodes incident to the
				// outgoing edges
				// are used to decide the necessity of shortcut edges.
				for (int eIn = firstEdge; eIn < lastEdge; eIn++) {
					Edge edgeIn = graph.edge(node, eIn);

					if (!edgeIn.isDirected((byte) 1))
						continue;

					// This is the second stop criterion for the local search.
					// We only want to find
					// witness paths that are at most as long as the path via
					// the currently processed node.
					// If we exceed this distance, we can stop the search.
					// The maximum distance is the length of the incoming edge
					// plus
					// the maximum length of an outgoing edge.
					// maxOutDist is this length without the length of the
					// incoming edge.
					int maxOutDist = 0;
					for (int eOut = firstEdge; eOut < lastEdge; eOut++) {
						Edge edgeOut = graph.edge(node, eOut);

						if (!edgeOut.isDirected((byte) 0))
							continue;

						// A path to the start node is not necessary.
						if (edgeIn.getTarget() == edgeOut.getTarget())
							continue;

						if (maxOutDist < edgeOut.getWeight())
							maxOutDist = edgeOut.getWeight();
					}

					// We assume that each edge has positive weight. If
					// maxOutDist == 0,
					// then there exists no outgoing edge (or only one leading
					// to the
					// start node), we do not need a local search and also no
					// shortcuts.
					if (maxOutDist == 0)
						continue;

					// maximum number of settled nodes during local search, 0 =
					// infinite.
					// This limit can differ between weight calculation
					// (simulateOnly=true)
					// and actual contraction.
					int maxSettled;
					if (phase == Phase.NODEORDER_INIT
							|| phase == Phase.NODEORDER_ELIMINATE) {
						maxSettled = weightCalc.maxSettledElim;
						if (simulateOnly) {
							maxSettled = weightCalc.maxSettledApprox;
						}
					} else {
						maxSettled = contractParams.maxSettledElim;
					}

					// *** Perform local search. ***
					localDijkstra.searchWithoutTarget(edgeIn.getTarget(),
							node /*
								 * ignore node that will be eliminated
								 */, maxOutDist + edgeIn.getWeight() /*
																	 * maximum
																	 * distance
																	 * of the
																	 * search
																	 */,
							noOfTargets, maxSettled /*
													 * max settled nodes 0 ==
													 * inf
													 */, maxHops);

					if (phase == Phase.NODEORDER_INIT
							|| phase == Phase.NODEORDER_ELIMINATE) {
						searchSpace += localDijkstra.noOfSettledNodes();
					}

					// Local edge reduction uses the results of the local search
					// to remove
					// edges that are not on any shortest path.
					if (localReduceEdges
							&& (phase == Phase.NODEORDER_ELIMINATE || phase == Phase.CONSTRUCT)
							&& !simulateOnly) {
						reduceEdgesLocal(node, edgeIn);
					}

					// The local search starting at v ( = edgeIn.target() ) is
					// finished.
					// Now we check for each node w incident to an outging edge
					// (node,w) ( = edgeOut )
					// if the path <v,node,w> is shorter than the shortest path
					// distance v -> w ignoring
					// node. If so, a shortcut edge is necessary to save
					// shortest paths distances.
					// Since the local search may be limited, sometimes
					// shortcuts are added,
					// that are not necessary. But they do not invalidate the
					// correctness.
					for (int eOut = firstEdge; eOut < lastEdge; eOut++) {
						Edge edgeOut = graph.edge(node, eOut);

						if (!edgeOut.isDirected((byte) 0))
							continue;

						// no self loops
						if (edgeIn.getTarget() == edgeOut.getTarget())
							continue;

						// Compare length of path <v,node,w> to distance between
						// v -> w ignoring node.
						if (localDijkstra.isSettled(edgeOut.getTarget()) == 0
								|| localDijkstra
										.distanceTo(edgeOut.getTarget()) > (edgeIn
										.getWeight() + edgeOut.getWeight())
								|| localDijkstra.parentOf(edgeOut.getTarget()) == node) {

							prepareNewShortcutEdge(node, firstEdge, lastEdge,
									eIn, edgeIn, eOut, edgeOut);
						}

						// If a witness exists, we may want to store the witness
						// to use This
						// information in a later hieararchy construction.
						else {
							if (phase == Phase.NODEORDER_ELIMINATE
									&& !simulateOnly) {
								// storeWitness(node, edgeIn, edgeOut);
							}
						}
					}
					localDijkstra.clear();
				}

				// Remove previously added target flags from the global graph
				// datastructure.
				// If we do not, they manipulate subsequent local searches.
				for (int eOut = firstEdge; eOut < lastEdge; eOut++) {
					Edge edgeOut = graph.edge(node, eOut);

					if (!edgeOut.isDirected((byte) 0))
						continue;
					graph.node(edgeOut.getTarget()).setTarget(false);
				}
			}

		}

		// VERBOSE_CONTRACT( cout << " search space " << searchSpace << flush; )
		// VERBOSE_CONTRACT( cout << " old " << 2*(lastEdge-firstEdge) << flush;
		// )
		// VERBOSE_CONTRACT( cout << " new " << 2*_newEdges.size() << flush; )

		// Save shortcuts to text file, starting witn node, then the triple
		// (v,w,weight) for each shortcut (v,w).
		// if ( saveShortcutsText && phase == PHASE_NODEORDER_ELIMINATE &&
		// !simulateOnly && _shortcutsText.is_open() )
		// {
		// _shortcutsText << node;
		// }

		// Now we add necessary shortcuts that are stored in a buffer
		// (_newEdges).
		// Also some statistics and outputs are performed.
		int shortcutOriginalEdgeSumTemp = 0;
		NewEdge newEdge = null;
		for (CompleteEdge edge : newEdges) {
			if (edge instanceof NewEdge) {
				newEdge = (NewEdge) edge;
			}
			// *** Add shortcuts ***
			// The addShorcutEdge...() function returns the edge difference in
			// terms of used entries in the edge array. It returns -2, 0 or 2.
			int diff;
			if (simulateOnly) {
				diff = graph.addShortcutEdgeSimulate(newEdge.getSource(),
						newEdge);
			} else {
				diff = graph.addShortcutEdge(newEdge.getSource(), newEdge);

				// Output of shortcut edges that can be used in a later
				// hierarchy construction.
				if (phase == Phase.NODEORDER_ELIMINATE
						&& saveShortcutsWitnesses) {
					// storeShortcuts.addShortcut(node, newEdge.getSource(),
					// newEdge.getTarget(), newEdge.isBidirected());
				}
			}

			edgeDiff += diff;

			if (phase == Phase.NODEORDER_INIT
					|| phase == Phase.NODEORDER_ELIMINATE) {
				// Used for weight calculation.
				newEdgesCounter += diff;

				// Priority term: Count sum of original edges the new shortcuts
				// represent.
				// Count bidirectional edges twice.
				shortcutOriginalEdgeSumTemp += newEdge
						.getShortcutOriginalEdgeCount();
				if (newEdge.isBidirected())
					shortcutOriginalEdgeSumTemp += newEdge
							.getShortcutOriginalEdgeCount();
			}

			// output shorcuts, one line per contracted node
			// if ( saveShortcutsText && phase == PHASE_NODEORDER_ELIMINATE &&
			// !simulateOnly && _shortcutsText.is_open() )
			// {
			// _shortcutsText << " " << iter->source << " " <<
			// iter->edge.getTarget() << " " << iter->edge.getWeight();
			// if (iter->edge.isBidirected()) _shortcutsText << " " <<
			// iter->edge.getTarget() << " " << iter->source << " " <<
			// iter->edge.getWeight();
			// }

		}

		// Clear buffer of new edges.
		newEdges = new EdgeList();

		// Update new level of contracted node. In case of node ordering Also
		// update attributes and neighbors.
		if (!simulateOnly
				&& (phase == Phase.NODEORDER_ELIMINATE || phase == Phase.CONSTRUCT)) {
			// before updating neighbors set current node into right level
			graph.node(node).setLevel(currentLevel);
			graph.changeNodeLevelOnlyReverseEdges(node);

			noOfEdges += edgeDiff;
			noOfNodes--;

			if (phase == Phase.NODEORDER_ELIMINATE) {
				pqElimination.setLevel(node, currentLevel);
				// VERBOSE_CONTRACT( cout << " edges " << _noOfEdges << flush; )
				updateAfterContraction(node, firstEdge, lastEdge);
				// VERBOSE_CONTRACT( cout << " )" << flush; )
			}
		}

		if (phase == Phase.NODEORDER_INIT || phase == Phase.NODEORDER_ELIMINATE) {
			// if ( saveShortcutsText && phase == PHASE_NODEORDER_ELIMINATE &&
			// !simulateOnly && _shortcutsText.is_open() )
			// {
			// _shortcutsText << endl;
			// }
			// Return attributes/priority terms, if requested.
			// if ( shortcutOriginalEdgeSum != NULL ) *shortcutOriginalEdgeSum =
			// shortcutOriginalEdgeSumTemp;
			// if ( searchSpaceResult != NULL ) *searchSpaceResult =
			// searchSpace;
			// if ( edgeDiffResult != NULL ) *edgeDiffResult = edgeDiff;
			// if ( newEdgesResult != NULL ) *newEdgesResult = newEdgesCounter;
		}
		return new ProcessResult(searchSpace, edgeDiff, newEdgesCounter,
				inDegree, outDegree, shortcutOriginalEdgeSumTemp);
	}

	private void findPossibleWitnessesMTM(int node, int target) {
		assert (possibleWitnesses.length >= graph.noOfNodes());
		// Check wheter the array storing the lengths of the
		// witnesses is clean.
		// #ifndef NDEBUG
		// int firstEdge = graph.firstLevelEdge(node);
		// int lastEdge = graph.lastEdge(node);
		// for ( int eOut = firstEdge; eOut < lastEdge; eOut++ )
		// {
		// Edge edgeOut = graph.edge(eOut);
		// // only outgoing edges
		// if ( !edgeOut.isDirected(0) ) continue;
		// assert( _possibleWitnesses[edgeOut.target()] == Weight::MAX_VALUE );
		// }
		// #endif

		// Scan the bucket b(v) to find the length
		// of one-hop witnesses.
		int index = graph.node(target).getPQElement();
		while (index != 0) {
			assert (index <= linkedListMTM.size());
			LinkedNodeMTM link = linkedListMTM.get(index - 1);
			if (possibleWitnesses[link.nodeID] > link.weight) {
				possibleWitnesses[link.nodeID] = link.weight;
			}
			index = link.next;
		}

		// Perform a 1-hop forward search (edge array scan)
		// and scan the buckets of the reached nodes
		// to find the length of 2-hop witnesses.
		int vLastEdge = graph.lastEdge(target);
		for (int e = graph.firstLevelEdge(target); e < vLastEdge; e++) {
			Edge edge = graph.edge(target, e);

			if (!edge.isDirected((byte) 0) || edge.getTarget() == node)
				continue;
			index = graph.node(edge.getTarget()).getPQElement();
			while (index != 0) {
				assert (index <= linkedListMTM.size());
				LinkedNodeMTM link = linkedListMTM.get(index - 1);
				if (link.nodeID != target) {
					int newWeight = link.weight + edge.getWeight();
					if (possibleWitnesses[link.nodeID] > newWeight) {
						possibleWitnesses[link.nodeID] = newWeight;
					}
				}
				index = link.next;
			}
		}
	}

	private void reduceEdgesLocal(int node, Edge edgeIn) {
		// reduce edges, process edges descending because otherwise it
		// will cause problems at removal (rearrangeing of edges)
		int target = edgeIn.getTarget();
		int rE = graph.lastEdge(target);
		if (rE > 0) {
			// Scan through all outgoing edges of the start node of the local
			// search.
			// If the incident node is settled and the distance found by the
			// local
			// search is smaller than the edge weight, remove the edge.
			// Note: the edges are processed descending, this makes it easier to
			// directly remove the edges without causing index problems.
			int rFirstEdge = graph.firstLevelEdge(target);
			for (rE--; rE >= rFirstEdge; rE--) {
				Edge rEdge = graph.edge(target, rE);

				if (rEdge.isDirected((byte) 0) && rEdge.getTarget() != node) {
					if (localDijkstra.isSettled(rEdge.getTarget()) != 0
							&& localDijkstra.distanceTo(rEdge.getTarget()) < rEdge
									.getWeight()) {
						int rReverseE = graph.reverseLevelEdge(target, rE);
						// Special case: need to consider bidirectional flags.
						// We can only remove unidirectional edges, but
						// bidirectional edges becomes one-way. Also the reverse
						// edge in the graph data structure needs to be
						// considered since each edge is stored in the adjacency
						// array of both incident nodes.
						if (rEdge.isBidirected()) {
							rEdge.makeOneWay((byte) 1);
							graph.edge(target, rReverseE).makeOneWay((byte) 0);
						} else {
							int reverseNodeID = rEdge.getTarget();
							int reverseEdgeID = graph.reverseLevelEdge(target,
									rE);
							graph.removeEdge(target, rE);
							graph.removeEdge(reverseNodeID, reverseEdgeID);
							noOfEdges -= 2;
						}
					}
				}
				if (rE == 0)
					break;
			}
		}
	}

	private void prepareNewShortcutEdge(int node, int firstEdge, int lastEdge,
			int eIn, Edge edgeIn, int eOut, Edge edgeOut) {

		// On bidirectional shortcuts, only insert one bidirectional
		// shortcut instead of two unidirectional ones. The check
		// uses the fact that the adjacency array of node is sorted.
		boolean bidir = edgeIn.isBidirected() && edgeOut.isBidirected();
		boolean b = !bidir || edgeIn.getTarget() < edgeOut.getTarget();

		// adjacency array sorted
		if (!b) {
			b = true;
			NewEdge newEdge = null;
			for (Edge edge : newEdges) {
				if (edge instanceof NewEdge)
					newEdge = (NewEdge) edge;

				if (newEdge.getSource() == edgeOut.getTarget()
						&& newEdge.getTarget() == edgeIn.getTarget()
						&& newEdge.getWeight() == (edgeIn.getWeight() + edgeOut
								.getWeight())) {
					newEdge.makeTwoWay();
					b = false; // second shortcut not necessary
				}
			}
		}
		// Add shortcut edge. All shortcut edges required for the
		// contraction of node are saved in an array and later added
		// at once.
		if (b) {
			// Prepare shortcut edge, information of path expansion are stored
			// if supported by the edge datastructure. And also the
			// number of original edges a shortcut represent, is stored
			// using the attribute shortcutOriginalEdgeCount.
			newEdges.add(new NewEdge(edgeIn.getTarget(), edgeOut.getTarget(),
					edgeIn.getWeight() + edgeOut.getWeight(),
					false /* unidirectional */, node, eIn - firstEdge, eOut
							- firstEdge, edgeIn.getShortcutOriginalEdgeCount()
							+ edgeOut.getShortcutOriginalEdgeCount()));
		}
	}

	private void storeWitness(int node, Edge edgeIn, Edge edgeOut) {
		System.out.println("storeWitness is not implemented!");
	}

	private void updateAfterContraction(int node, int firstEdge, int lastEdge) {
		// Update the deleted neighbour counter and the upper bound on search
		// paths hops. Need to do this before update of elimination weights.
		int previous = Constants.SPECIAL_NODEID;
		if (!weightCalc.searchPathHopBorderOriginalEdges) {

			PQueueNodeElimination pqDataNode = pqData(node);

			// possibly new search path hops = upper bound of current node + 1
			double newsearchPathHopBorder = pqDataNode.searchPathHopBorder + 1;
			assert (newsearchPathHopBorder >= pqDataNode.searchPathHopBorder);
			for (int e = firstEdge; e < lastEdge; e++) {
				Edge edge = graph.edge(node, e);

				if (edge.getTarget() == previous)
					continue;
				previous = edge.getTarget();

				PQueueNodeElimination pqDataTarget = pqData(previous);

				pqElimination.setDeletedNeighbors(edge.getTarget(),
						pqDataTarget.deletedNeighbors);
				pqDataTarget.deletedNeighbors++;

				// increase upper search path hop bound, if necessary
				if (pqDataTarget.searchPathHopBorder < newsearchPathHopBorder) {
					pqElimination.setSearchPathHopBorder(edge.getTarget(),
							newsearchPathHopBorder);
					pqDataTarget.searchPathHopBorder = newsearchPathHopBorder;
				}
			}
		} else {

			// This is an upper bound for the costs of the search paths by a
			// time dependend search. Requested by Veit Batz.

			PQueueNodeElimination pqDataNode = pqData(node);

			for (int e = firstEdge; e < lastEdge; e++) {
				Edge edge = graph.edge(node, e);

				if (edge.getTarget() == previous)
					continue;
				previous = edge.getTarget();

				PQueueNodeElimination pqDataTarget = pqData(previous);

				// we ignore the direction of the edge and use just the first
				// edge
				double newsearchPathHopBorder = 2
						* pqDataNode.searchPathHopBorder
						+ edge.getShortcutOriginalEdgeCount();
				assert (newsearchPathHopBorder >= pqDataNode.searchPathHopBorder);

				pqElimination.setDeletedNeighbors(edge.getTarget(),
						pqDataTarget.deletedNeighbors);
				pqDataTarget.deletedNeighbors++;

				if (pqDataTarget.searchPathHopBorder < newsearchPathHopBorder) {
					pqElimination.setSearchPathHopBorder(edge.getTarget(),
							newsearchPathHopBorder);
					pqDataTarget.searchPathHopBorder = newsearchPathHopBorder;
				}
			}
		}

		// Update the Voronoi regions, meaning distribute the nodes in the
		// Voronoi region of the current node among the the neighboring Voronoi
		// regions.
		if (weightCalc.voronoiMult != 0 && noOfNodes > 1) {
			distributeVoronoiRegion(node);
		}

		// VERBOSE_CONTRACT( cout << " (" << flush; )

		// Update weights of all neigbors.
		if (!weightCalc.updateHops) {
			// do not update a neighbor twice if there is a separate
			// incoming and outgoing edge to this neighbor.
			// The edges in the edge array are sorted by the node id
			// of the incident node.
			previous = Constants.SPECIAL_NODEID;
			for (int e = firstEdge; e < lastEdge; e++) {
				Edge edge = graph.edge(node, e);

				if (edge.getTarget() == previous)
					continue;
				previous = edge.getTarget();

				// VERBOSE_CONTRACT( cout << " node " << edge.getTarget() <<
				// flush; )
				updateEliminationWeight(edge.getTarget());

			}
		}
		// Test case: update all nodes within the hop-limit.
		// This is quite time consuming and shows only little
		// impact on the resulting hierarchy.
		// A BFS is used to find all nodes, the direction of the
		// edges is ignored.
		else {
			// assert( maxHops > 0 );
			// stack< pair<NodeID, NodeID> > bfs;
			// vector<NodeID> updatedNodes;
			// bfs.push( make_pair( node, 0 ) );
			// int maxHops = this.maxHops;
			// // Special case: 1-hop search can affect nodes 2 hops away
			// if ( maxHops == 1 ) maxHops = 2;
			// while ( !bfs.empty() )
			// {
			// int v = bfs.top().first;
			// int hops = bfs.top().second + 1;
			// bfs.pop();
			//
			// int vLastEdge = graph.lastEdge(v);
			// for ( int e = graph.firstLevelEdge(v); e < vLastEdge; e++ )
			// {
			// Edge edge = graph.edge(e);
			// if ( !_updateBfs[edge.target()] )
			// {
			// // VERBOSE_CONTRACT( cout << " node " << edge.target() << flush;
			// )
			// int singleSearchSpace = 0;
			// updateEliminationWeight(edge.target(), &singleSearchSpace);
			// //searchSpace += singleSearchSpace;
			// updatedNodes.push_back(edge.target());
			// _updateBfs[edge.target()] = true;
			//
			// if ( hops < maxHops )
			// {
			// bfs.push( make_pair( edge.target(), hops ) );
			// }
			// }
			// }
			// }
			// for ( vector<NodeID>::const_iterator iter = updatedNodes.begin();
			// iter != updatedNodes.end(); iter++ )
			// {
			// _updateBfs[*iter] = false;
			// }
		}
	}

	private void distributeVoronoiRegion(int node) {

		// The nodes of the Voronoi region are stored in a single linked list
		// that
		// is terminated by SPECIAL_NODEID. The DijkstraCH class provides
		// only low level methods to update/insert nodes and extract The
		// node with the lowest distance. The main logic is in this subroutine.

		int current = node;
		while (current != Constants.SPECIAL_NODEID) {

			// Under all nodes that are incident to an incoming node of the
			// currently
			// regarded node in R(node), and that are not in R(node), take the
			// node
			// that is on the shortest path from its owner to the currently
			// regarded node.

			int minDist = Integer.MAX_VALUE;
			int minDistNode = Constants.SPECIAL_NODEID;
			int firstEdge = graph.firstLevelEdge(current);
			int lastEdge = graph.lastEdge(current);
			for (int e = firstEdge; e < lastEdge; e++) {
				Edge edge = graph.edge(current, e);

				if (!edge.isDirected((byte) 1))
					continue;
				PQueueNodeElimination data = pqData(edge.getTarget());

				// Ignore nodes in Voronoi region R(node).

				if (data.voronoiOwner == node)
					continue;

				// Special case: because the graph is directed, not all nodes
				// can be distributed. Such nodes that cannot be distributed are
				// identified by the fact that their Voronoi region owner is
				// already eliminated (contracted). These nodes must not be
				// regarded for the Vornoi region distribution.

				if (pqData(data.voronoiOwner).isEliminated())
					continue;

				int dist = data.voronoiNumber + edge.getWeight();

				if (dist < minDist) {
					minDist = dist;
					minDistNode = edge.getTarget();
				}
			}

			// Special case: It is possibly, that no neighboring Voronoi region
			// exists that can reach the currently regarded node. In this case,
			// the node is ignored.

			if (minDistNode != Constants.SPECIAL_NODEID) {

				dVoronoi.insertNode(current, minDist, minDistNode);
			}
			current = pqData(current).voronoiNextBorderNode;
		}

		int v = 0;
		int dist = 0;
		int parent = 0;
		// *** Process nodes in the priority queue. Main work loop ***
		// searchNext() returns the node with the lowest distance in the
		// priority queue
		// by call-by-reference, along with the distance and the parent. the
		// return value of the function indicates whetere the priority queue is
		// not empty.
		while (dVoronoi.searchNext(v, dist, parent)) {
			// Node v is assigned to the Voronoi region of its parent.
			PQueueNodeElimination data = pqData(v);
			data.voronoiOwner = pqData(parent).voronoiOwner;
			data.voronoiNumber = dist;
			PQueueNodeElimination dataOwner = pqData(data.voronoiOwner);

			assert (!dataOwner.isEliminated());

			// Update stats of voronoi owner
			dataOwner.voronoiNumber++;

			// Add v to linked list of voronoi Region of new owner.
			data.voronoiNextBorderNode = dataOwner.voronoiNextBorderNode;
			dataOwner.voronoiNextBorderNode = v;

			// Relax edges, but only into voronoi region of "node"
			int lastEdge = graph.lastEdge(v);
			for (int e = 0; e < lastEdge; e++) {
				Edge edge = graph.edge(v, e);

				if (!edge.isDirected((byte) 0)
						|| pqData(edge.getTarget()).voronoiOwner != node)
					continue;
				int newDist = dist + edge.getWeight();
				dVoronoi.updateNode(edge.getTarget(), newDist, v);
			}
		}
		dVoronoi.clear();
	}

	/**
	 * Returns the data stored with each node (element) index the priority
	 * queue. Note that this data is still available even a node has been
	 * deleted from the pqueue.
	 */
	private PQueueNodeElimination pqData(int node) {
		// assert ((node + 1) < pqElimination.size());
		return pqElimination.getPQData(node);
	}

	private enum Phase {

		/** initialize priority queue with all nodes */
		NODEORDER_INIT,

		/** eliminate nodes by priority queue */
		NODEORDER_ELIMINATE,

		/** eliminate nodes by previously known node order */
		CONSTRUCT

	}

	private class CalculationResult {

		private double weight;
		private int searchSpace;
		private int edgeDiff;
		private int newEdges;

		private CalculationResult(double weight, int searchSpace, int edgeDiff,
				int newEdges) {
			this.weight = weight;
			this.searchSpace = searchSpace;
			this.edgeDiff = edgeDiff;
			this.newEdges = newEdges;
		}

	}

	private class ProcessResult {

		private int searchSpace;
		private int edgeDiff;
		private int newEdges;
		private int inDegree;
		private int outDegree;
		private int shortcutOriginalEdgeSum;

		private ProcessResult(int searchSpace, int edgeDiff, int newEdges,
				int inDegree, int outDegree, int shortcutOriginalEdgeSum) {
			this.searchSpace = searchSpace;
			this.edgeDiff = edgeDiff;
			this.newEdges = newEdges;
			this.inDegree = inDegree;
			this.outDegree = outDegree;
			this.shortcutOriginalEdgeSum = shortcutOriginalEdgeSum;
		}

	}

	private class NewEdge extends CompleteEdge {

		private static final long serialVersionUID = -5593060645205723848L;

		private NewEdge(int source, int target, int weight,
				boolean isBidirected, int shortcutMiddle,
				int shortcutParentEdge, int shortcutChildEdge,
				int shortcutOriginalEdgeCount) {
			super(source, target, weight, Constants.EDGE_TYPE_SHORTCUT, true,
					true, isBidirected, shortcutMiddle, shortcutParentEdge,
					shortcutChildEdge, shortcutOriginalEdgeCount);
		}

	}

}
