package com.jcroke.DACGraph;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;

import org.jgrapht.alg.CycleDetector;
import org.jgrapht.graph.DefaultDirectedGraph;
import org.jgrapht.graph.DefaultEdge;
import org.jgrapht.traverse.TopologicalOrderIterator;

import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.ResultSet;
import com.jcroke.DACGraph.DAC.DACClient;

/**
 * @author jcroke
 * 
 */

@SuppressWarnings("serial")
public class RDFData implements Serializable {

	private DACClient _dac;
	private String _dataSet;
	private Pnode _spout;
	private Pnode _outputSerializer;
	private ConcurrentLinkedQueue<Pnode> _q;
	private ConcurrentHashMap<String, Pnode> _iriToPnode;
	private DefaultDirectedGraph<Pnode, DefaultEdge> _g;
	private CycleDetector<Pnode, DefaultEdge> _cycleDetector;

	public RDFData(DACClient dac, String dataSet) {
		this._dac = dac;
		this._dataSet = dataSet;
	}
	
	private void _initGraph() {
		this._iriToPnode = new ConcurrentHashMap<String, Pnode>();
		this._g = new DefaultDirectedGraph<Pnode, DefaultEdge>(
				DefaultEdge.class);
		this._cycleDetector = new CycleDetector<Pnode, DefaultEdge>(_g);
		this._q = new ConcurrentLinkedQueue<Pnode>();
	}

	public synchronized void buildGraph() {
		_initGraph();
		// Query for root of consumption graph boundary nodes
		ResultSet resultSet = _dac.sparql(String.format(
				SPARQL.BOUNDARY_NODE_QUERY_TEMPLATE, _dataSet));

		while (resultSet.hasNext()) {
			QuerySolution bs = resultSet.next();
			Pnode p = new Pnode(bs);
			if (p.getNodeName().matches("os")) {
				this.setOutputSerializer(p);
			}

			_q.add(p);
			_iriToPnode.put(p.getIRI(), p);
			_g.addVertex(p);
		}
		
		Pnode p;
		for (p = _q.poll(); p != null; p = _q.poll()) {
			_processHead(p);
		}
	}

	private void _processHead(Pnode cur) {
		ResultSet resultSet;
		Pnode n = null;

		// Query for transformation class or runtime compilation code.
		resultSet = _dac.sparql(String.format(SPARQL.BOLT_LOGIC_QUERY_TEMPLATE,
				cur.getIRI()));

		// Assign transformer logic to node.
		if (resultSet.hasNext()) {
			QuerySolution logicSolution = resultSet.next();
			Logic l = new Logic(_dac, logicSolution);

			cur.linkLogic(l);
		}

		// Query for consumption edges
		resultSet = _dac.sparql(String.format(SPARQL.BOLT_WIRE_QUERY_TEMPLATE,
				cur.getIRI()));

		// Process edges
		while (resultSet.hasNext()) {
			QuerySolution edgeSolution = resultSet.next();

			Wire e = new Wire(_dac, edgeSolution);

			// Add input signatures to current node
			cur.linkInput(e.getSignatureElement());

			// Check if this node has been instantiated.
			if (!_iriToPnode.containsKey(e.getSourceIRI())) {
				n = new Pnode(e.getSourceRDFNode());
				if (n.getNodeName().matches("spout")) {
					this.setSpout(n);
				}

				// Add a known output to new node
				n.linkOutput(e.getSignatureElement());

				// Add a vertex to topological graph
				_g.addVertex(n);

				// Add edge
				_g.addEdge(n, cur);

				// Enqueue the new node
				_q.add(n);
				_iriToPnode.put(n.getIRI(), n);

			} else {
				
				// Find the existing vertex of this IRI, and attach to it.
				_g.addEdge(_iriToPnode.get(e.getSourceIRI()), cur);
				
				// If different, add additional output signature to this end
				// point node.
				Signature toCompare = e.getSignatureElement();
				
				// Check if output signature is different;
				// change pNode outside of iterator to satisfy concurrency.
				boolean change = false;
				for (Signature s : _iriToPnode.get(e.getSourceIRI())
						.getOutputSignature()) {
					if (!s.getSourcePin().equals(toCompare.getSourcePin())) {
						change = true;
					}
				}
				if (change) {
					_iriToPnode.get(e.getSourceIRI()).linkOutput(
							e.getSignatureElement());
				}
			}
		}
	}

	public ConcurrentHashMap<String, Pnode> getIRItoPnode() {
		return (_iriToPnode);
	}

	public TopologicalOrderIterator<Pnode, DefaultEdge> getIterator() {
		return (new TopologicalOrderIterator<Pnode, DefaultEdge>(_g));
	}

	public boolean cyclesDetected() {
		return (_cycleDetector.detectCycles() ? true : false);
	}

	public ArrayList<Pnode> getCycles() {
		Iterator<Pnode> iterator;
		Set<Pnode> cycleVertices;
		Set<Pnode> subCycle;
		Pnode cycle;
		ArrayList<Pnode> retCycle = new ArrayList<Pnode>();

		if (cyclesDetected()) {
			// Get all vertices involved in cycles.
			cycleVertices = _cycleDetector.findCycles();

			// Loop through vertices trying to find disjoint cycles.
			while (!cycleVertices.isEmpty()) {
				// Get a vertex involved in a cycle.
				iterator = cycleVertices.iterator();
				cycle = iterator.next();

				// Get all vertices involved with this vertex.
				subCycle = _cycleDetector.findCyclesContainingVertex(cycle);
				for (Pnode sub : subCycle) {
					retCycle.add(sub);
				}
			}
		}
		return retCycle;
	}

	public synchronized DefaultDirectedGraph<Pnode, DefaultEdge> getDirectedGraph() {
		return _g;
	}

	public Pnode getSpout() {
		return _spout;
	}

	private void setSpout(Pnode _spout) {
		this._spout = _spout;
	}

	public Pnode getOutputSerializer() {
		return _outputSerializer;
	}

	public void setOutputSerializer(Pnode outputSerializer) {
		this._outputSerializer = outputSerializer;
	}
}
