package com.darkflame.client.semantic;

import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.logging.Logger;










import com.darkflame.client.SuperSimpleSemantics;
import com.darkflame.client.interfaces.GenericDebugDisplayer;
import com.darkflame.client.interfaces.GenericWaitForRepeating;
import com.darkflame.client.interfaces.SSSGenericFileManager.FileCallbackError;
import com.darkflame.client.interfaces.SSSGenericFileManager.FileCallbackRunnable;
import com.darkflame.client.interfaces.GenericWaitForRepeating.MyRepeatingCommand;
import com.darkflame.client.semantic.QueryEngine.DoSomethingWithNodesRunnable;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Multimaps;


/** stores all nodes with a common property(prec/value) **/
public class SSSNodesWithCommonProperty extends ArrayList<SSSNode> {

	static Logger Log = Logger.getLogger("sss.SSSNodesWithCommonProperty");

	/**hashset of All the SSS Common property lists yet to be parsed
	 * Note; When they arnt on this list they are considered ready to be queried
	 * That doesn't necessarily mean their data has been loaded. */
	private static final HashSet<SSSTriplet>  CPLToParseList = new HashSet<SSSTriplet>();
	

	/** used to visual debug querys - its just a logger more or less**/
	private static GenericDebugDisplayer DebugPanel=null;

	/** This lists predicate. Every node in this list has this predicate/value **/
	public SSSNode getCommonPrec() {
		return commonPrec;
	}
	/** This lists predicate. Every node in this list has this predicate/value **/
	public SSSNode getCommonValue() {
		return commonValue;
	}

	private SSSNode commonPrec;
	private SSSNode commonValue;

	//this is -10 by default so we can tell when no loading has even started yet
	private int LeftToLoad = -10;
	

	//private LoadingCallback calledWhenDone;
	private ArrayList<LoadingCallback> calledWhenDoneList = new ArrayList<LoadingCallback>();
	
	//w
	private static int globalLeftToLoad = -10;
	private static Runnable globalCalledWhenDone;

	// global
	// static ArrayList<SSSNodesWithCommonProperty> globalNodesWithPropertyList;
	public static HashMultimap<SSSNode, SSSNodesWithCommonProperty> globalNodesWithPropertyListByPredicate =HashMultimap
			.create();
	
	//might need to ve replaced if concurrency iss an issue
//public static Multimap<SSSNode, SSSNodesWithCommonProperty> globalNodesWithPropertyListByPredicate =   
	//		   Multimaps.synchronizedMultimap(HashMultimap.<SSSNode, SSSNodesWithCommonProperty> create());
	
	
	HashMultimap<SSSNode, String> SupportingDomains = HashMultimap.create();

	SSSNodesWithCommonProperty thisset = this;

	/** Determines if this set is loaded.
	 * If the preload option is set, the list will be loaded straight away and this set to True
	 * Else it will wait till its needed, then loaded if its not already.
	 * Adding any sourcefile will automatically set this to false untill that file is loaded.
	 * In essence this flag should be TRUE either;
	 * a) If there was never any files to load
	 * or
	 * b) All requested files are loaded  **/
	public boolean isLoaded = true;

	boolean NodeListActive = false;

	/** files that contribute to this set **/
	
	//private HashSet<String> fileSource = new HashSet<String>();
	private HashMap<String,String> fileSource = new HashMap<String,String>();
	
	/**hashset of url's left to be loaded into this specific common property set */
	private final HashSet<String> FilesLeftToLoadForThisSet = new HashSet<String>();
	
	private boolean list_not_used = false;

	
	/** safely create a new SSSNodesWithCommonProperty, or return it if it exists already **/
	static public SSSNodesWithCommonProperty createSSSNodesWithCommonProperty(
			SSSNode commonPrec, SSSNode commonValue, SSSNode[] NodesInSet) {
		
		//first get a new empty set with the normal safe method
		//This internally will reuse existing sets if one is found
		SSSNodesWithCommonProperty newset = createSSSNodesWithCommonProperty(commonPrec, commonValue);
		
		//add nodes to this new set
		for (SSSNode nodetoadd : NodesInSet) {
			newset.addNodeToThisSet(nodetoadd, "INTERNALY ADDED");
			
		}
		return newset;
	
	}

	/** safely create a new SSSNodesWithCommonProperty, or return it if it exists already **/
	static public SSSNodesWithCommonProperty createSSSNodesWithCommonProperty(
			SSSNode commonPrec, SSSNode commonValue) {
		
		info("making new property set");
		if (commonPrec==null || commonValue ==null){
			error("CANT CREATE SET WITH NULL SSSNODES FOR commonPrec or commonValue.");
			return null;
		}
		
		SSSNodesWithCommonProperty newset = SSSNodesWithCommonProperty
				.getSetFor(commonPrec, commonValue);
		Log.info("~completed set getting");
		
		if (newset == null) {

			newset = new SSSNodesWithCommonProperty(commonPrec, commonValue);

			info("made new set for "+commonPrec.PURI+" , "+commonValue.PURI);
			
			Log.warning("~~~~~~~~~~~~~~~~~~SET CREATED FOR~~~~~~~~~~~~~~"+commonPrec.PURI+" , "+commonValue.PURI);
			
			Log.info("sets with this predicate = "+globalNodesWithPropertyListByPredicate.get(commonPrec).size());

			Boolean added = globalNodesWithPropertyListByPredicate.put(commonPrec, newset);

			if (!added){

				Log.info("already exists for:"+commonPrec.PURI+" , "+commonValue.PURI);

				for (SSSNodesWithCommonProperty cp : globalNodesWithPropertyListByPredicate.values()) {

					Log.info(cp.getCommonPrec().PURI+" | "+cp.getCommonValue().PURI);
					Log.info("hashcode:"+cp.hashCode());


				}



			}




			newset.NodeListActive = true;

		} else {

			info("set found for "+commonPrec.PLabel+" , "+commonValue.PLabel);
		}

		return newset;

	}

	@Override
	public int hashCode(){

		//based on the primary uri
		return this.getCommonPrec().hashCode()^(this.getCommonValue().hashCode());

	}

	public SSSNodesWithCommonProperty(SSSNode commonPrec, SSSNode commonValue) {

		super();

		this.commonPrec = commonPrec;
		this.commonValue = commonValue;

		// globalNodesWithPropertyList.add(this);

	}

	//AsyncCallback<String> 
	public void setCallback(LoadingCallback calledWhenDone) {

		info("callback set");
		
		Log.warning("_________CALLBACK ADDED________");
		this.calledWhenDoneList.add(calledWhenDone);
		
		//this.calledWhenDone = calledWhenDone;

	}
	
	public void removeCallback(LoadingCallback calledWhenDone) {

		info("callback removed");
		
		Log.warning("_________CALLBACK Removed________");
		this.calledWhenDoneList.remove(calledWhenDone);
		
		//this.calledWhenDone = calledWhenDone;

	}
	/** sets a callback to run after all loading is done **/
	public static void setGlobalCallback(Runnable calledWhenDone) {
		Log.info("globalCalledWhenDone set");
		globalCalledWhenDone = calledWhenDone;

	}

	public void parseSSSFile(final String fileContents) {
		String ns = "Internal";

		parseSSSFile(commonPrec, commonValue, fileContents, ns,"");
	}
	
	/** this should never be used. A NS default should always be supplied.
	 * This will use the global default which unless you plan to never use other file sources but 
	 * local ones to your initial ntlist will cause problems **/
	public void loadSSSFile(final String fileURL) {
		
		loadSSSFile( fileURL,  SuperSimpleSemantics.getDefaultBaseURI());
		
		
	}
	/**
	 * adds the nodes in the file to the current list. Predicate and Values
	 * should all match this lists!
	 **/
	public void loadSSSFile(final String fileURL, final String ns) {	
		
		
		isLoaded = false;
		
		//if first file
		if (LeftToLoad==-10){
			LeftToLoad=0;
		}
	
		SuperSimpleSemantics.error("file load triggering:"+fileURL);
		
		
		//
		// final RequestBuilder getPropertyFile = new RequestBuilder(
		// RequestBuilder.GET, fileURL);
		
		LeftToLoad++;
		//globalLeftToLoad++;

		SuperSimpleSemantics.addToTotalLoadUnits(1);

		FileCallbackRunnable onResponse = new FileCallbackRunnable(){

			@Override
			public void run(final String responseData, final int responseCode) { 

				SimpleLoadingQueue.loadedAnItem();

				SuperSimpleSemantics.stepLoadClockForward();

				SuperSimpleSemantics.setCurrentLoadProcess("processing CPL");

				SuperSimpleSemantics.waitFor.scheduleAfter(new Runnable() {

					@Override
					public void run() {
						
						processFile(fileURL,ns, responseData, responseCode);

						SuperSimpleSemantics.setCurrentLoadProcess("processed CPL");
						
					}
				});

				


			}

			public void processFile(final String fileURL,String ns, String responseData, int StatusCode) {

				// process file if not 404
				if (StatusCode==404){					

					Log.warning("FILE NOT FOUND--"+fileURL);

					//send to the main log on the page too
					log("File Not Found:"+fileURL, "RED");
					subtractFromLoadingAndTest(fileURL);
					
					
				//	SSSNodesWithCommonProperty.removeFromLeftToLoadList(fileURL,commonPrec,commonValue);
					
					//TrustedIndexList.loadingicon.stepClockForward();
				} else {				
					// the domain should be the default namespace for this set
					// of nodes
					//String ns = fileURL.substring(0, fileURL.lastIndexOf("/"))
					//		+ "/DefaultOntology.n3#";
					
					
					
					//commonPrec, commonValue 
					parseSSSFile(commonPrec, commonValue , responseData, ns,fileURL);			
					Log.info("post "+getCommonPrec()+":"+getCommonValue()+" parse size:"+thisset.size());

					//TrustedIndexList.loadingicon.stepClockForward();
				}

				



			}



		};

		//what to do if theres an error
		FileCallbackError onError = new FileCallbackError(){

			@Override
			public void run(String errorData, Throwable exception) {

				Log.info("getting sssfile- " + fileURL + " failed");

				SimpleLoadingQueue.loadedAnItem();
				//LeftToLoad--;
				 
			//	globalLeftToLoad--;			


				 subtractFromLoadingAndTest(fileURL);

			//	SSSNodesWithCommonProperty.removeFromLeftToLoadList(fileURL,commonPrec,commonValue);
				
				SuperSimpleSemantics.stepLoadClockForward();


			//	Log.warning("temp3");
				//checkIfFinnishedGlobal();

			}

		};


		boolean forcePost = false;

		if (fileURL.contains(":")){
			forcePost = true;
		}

		Log.info("_____________________________getting file: " + fileURL +" at "+ns);
		
		
		
		//using the above, try to get the text!
		
			//else we add it to the loading queue
			Log.info("Too much simultanious loading!, adding to loading queue");
			
			SimpleLoadingQueue.add(fileURL,
					onResponse,
					onError,
					forcePost);
			
			
		


	}
	private void subtractFromLoadingAndTest(final String url) {
		
		Boolean removed = FilesLeftToLoadForThisSet.remove(url);
		Log.warning("__Just loaded:"+url+ " removed:"+removed);
		
		
		SSSNodesWithCommonProperty.removeFromLeftToLoadList(url,commonPrec,commonValue);
		
		LeftToLoad--;
		globalLeftToLoad--;

		Log.warning("globalLeftToLoad-------------------------------------" + getGlobalLeftToLoad());
		Log.info("LeftToLoad--" + LeftToLoad);

		checkLeftToLoad(url);

		Log.warning("temp4");
		checkIfFinnishedGlobal();
		
	}
	public static int getGlobalLeftToLoad() {
		
		
		//Log.warning("CPLToParseList"+CPLToParseList.size());
		
		return CPLToParseList.size();
		//return globalLeftToLoad;
	}

	public static void addToGlobalLeftToLoad(int i) {
		if (globalLeftToLoad==-10){
			globalLeftToLoad=-1;
		}
		globalLeftToLoad = globalLeftToLoad+i;
	}
	public static void subtractFromGlobalLeftToLoad(int i) {
		globalLeftToLoad = globalLeftToLoad-i;
	}
	
	static public void addParentRelationship(SSSNode child, SSSNode parent,
			String accordingToThisSource) {

		// find a existing CPS, if it exists, else create one
		SSSNodesWithCommonProperty set = SSSNodesWithCommonProperty
				.createSSSNodesWithCommonProperty(SSSNode.SubClassOf, parent);

		// add node to it
		set.addNodeToThisSet(child, accordingToThisSource);

		SuperSimpleSemantics.info(" added:" + child.PURI + " to set:" + set.commonPrec.PURI + "|"
				+ set.commonValue.PURI);

	}

	public boolean removeNodeFromThisSet(SSSNode removeThis){

		//remove supporting domains
		SupportingDomains.get(removeThis).clear();
		//remove object
		return remove(removeThis);
		

	}

	/** returns true if it was successfully added **/ 
	public Boolean addNodeToThisSet(SSSNode newnode, String SupportingDomain) {

		//sanity checks!
		//first we ensure that if this property set is specifying subtypes, we dont specify
		// itself as a suptype, else we create a loop!
		if (this.commonPrec == SSSNode.SubClassOf){

			if (newnode.isOrHasChildClass((this.commonValue))){

				Log.info("___SSS Parse Error:  "+newnode.toString()+" would create inheritance loop");
				log("___SSS Parse Error:  "+newnode.toString()+" would create inheritance loop");

				return false; 
			}
		}

		if (!this.contains(newnode)) {		

			Log.info("adding node"+newnode.PURI);
			this.add(newnode);
			// add uri to list of things supporting this association:
			SupportingDomains.get(newnode).add(SupportingDomain);
			return true;
		}
		SupportingDomains.get(newnode).add(SupportingDomain);
		
		return false; 
	}

	public void parseSSSFile(final SSSNode commonPrec, final SSSNode commonValue,
			String fileData, final String defaultns,final String fileURL) {
	//public void parseSSSFile(final SSSTriplet fileDefinition,
	//			String fileData, final String defaultns,final String fileURL) {
			

		Log.info("parseing file:\n" +fileURL);
		
		info("parseing file:\n" + commonPrec.PURI + " " + commonValue.PURI+" ("+fileURL+")");
		

		// create new nodes from each line,assuming the default uri prefix
		// split to lines
		final String lines[] = fileData.split("\r?\n|\r");
		
		

		SuperSimpleSemantics.waitForRep.scheduleAfter(new MyRepeatingCommand() {
		
			int i = 0; //current line
			
			SSSNode previousnode= null; //we track the previous node so labels be added on the line following a node
			//eg
			// GrannySmithApple;
			// "Granny Smith Apple".
			// 
			// Previous node should only be stored if the line ended in a ; and theres none stored already
		    // They should be cleared after a . or a line without anything at the end
			
			@Override
			public boolean execute() {

				if (i>=lines.length){
					Log.warning("temp A");
					subtractFromLoadingAndTest(fileURL);
					return false;					
				}
				String line = lines[i].trim();
				i++;

				//make sure the line has contents
				if (line.length()<2){
					return true;
				}

				//make sure line isnt a prefix specification
				if (line.startsWith("@prefix")) {

					//process the line as a prefix specification, not a triplet
					line = line.replaceAll("@prefix ", "");
					String[] linebits = line.split("<");

					String prefix = linebits[0].trim();				
					String uri = linebits[1	].trim();
					//remove ending >
					int endsat  = uri.indexOf(">");
					uri=uri.substring(0, endsat);

					Log.info("loadstatements_loading prefixs from line p::" + prefix
							+ " u:" + uri);

					RawQueryUtilities.addPrefix(uri, prefix);

					//skip processing this line as a triplet 				
					return true;
				} 

				// SSS files are compatibleish with n3 files - so we only pay attention
				// to the first fragment
				// we can, of course, add checks here so that if a prec and value
				// are specified they match the expected ones

				// get node from line representing label of previous node 
				// this only applies if the previousnode is not empty AND this line is a quoted string
				if (previousnode!=null && line.startsWith("\"") && 
						((line.endsWith("\"")) || (line.endsWith("\";")) || (line.endsWith("\".")) )  ){
					
					String newLabel = line.substring(1, line.lastIndexOf("\""));
					
					Log.info(" adding label "+newLabel+" to previous node ");
					Log.info(" has label already: "+previousnode.hasLabel());
					Log.info(" puri: "+previousnode.PURI);
					String shorturi=previousnode.getShortPURI();
					Log.info(" short uri: "+shorturi);
					
					previousnode.addLabel(newLabel);
					
					//clear if not a ; at the end of the current line
					//NOTE: really newlines following ; should either end by a . or a ;
					//but we assume anything but a ; means clear (ie, the next line is not associated with the last)
					if (!line.endsWith(";")){
						previousnode=null;				
					}
					
					return true;
					
				}
				
					// get node from line (normal treatment)
				SSSNode newnode = returnNodeURIfromSSSline(line, defaultns);
				
			
				//this check probably not needed as "addNodeToThisSet" already does it ----------optimization possible
				if (!thisset.contains(newnode)) {

					//this.add(newnode);
					addNodeToThisSet(newnode,defaultns);

				}

				//if the NPS type is for labels, we assign this label to the node as well
				if (commonPrec==SSSNode.Label){
					//info("adding label "+commonValue.getPLabel()+" to "+newnode.PURI);					
					newnode.addLabel(commonValue.getPLabel());
				}

				//if this common property set is specifying equilivents to another class the class
				if (commonPrec==SSSNode.Equivlient){

					info(" Detecting Equilivent value");
					info(" This : "+newnode.PURI);
					info(" IsThis : "+commonValue.PURI);

					Log.warning("_________xx____________This : "+newnode.PURI);
					Log.warning("_________xx____________IsThis : "+commonValue.PURI);
					
					SSSNode.mergeNode(newnode, commonValue, "merge supported by not yet implemented");


				}

				//update previous node if needed
				if (line.endsWith(";") && previousnode == null){
					previousnode = newnode;
				}
				//clear if not a ; at the end of the current line
				//NOTE: really newlines following ; should either end by a . or a ;
				//but we assume anything but a ; means clear (ie, the next line is not associated with the last)
				if (!line.endsWith(";")){
					previousnode=null;				
				}


				
				
				// add uri to list of things supporting this association:
				SupportingDomains.get(newnode).add(defaultns); // the defaultns
				
				// should the url
				// this data came
				// from

				return true;

				
			}
		});

	
	}

	@Deprecated
	public void parseSSSFile_old(SSSNode commonPrec, SSSNode commonValue,
			String fileData, String defaultns) {

		Log.info("parseing file:\n" + fileData);

		Log.info("parseing file:\n" + commonPrec.PURI + " " + commonValue.PURI);


		//nb; This should probably be done with a incremental schedule too.

		// create new nodes from each line,assuming the default uri prefix
		// split to lines
		String lines[] = fileData.split("\r?\n|\r");

		// loop over lines
		int i = 0;
		while (i < lines.length) {

			String line = lines[i].trim();
			i++;

			//make sure the line has contents
			if (line.length()<2){
				continue;
			}

			//make sure line isnt a prefix specification
			if (line.startsWith("@prefix")) {

				//process the line as a prefix specification, not a triplet
				line = line.replaceAll("@prefix ", "");
				String[] linebits = line.split("<");

				String prefix = linebits[0].trim();				
				String uri = linebits[1	].trim();
				//remove ending >
				int endsat  = uri.indexOf(">");
				uri=uri.substring(0, endsat);

				Log.info("loadstatements_loading prefixs from line p::" + prefix
						+ " u:" + uri);

				RawQueryUtilities.addPrefix(uri, prefix);

				//skip processing this line as a triplet 				
				continue;
			} 

			// SSS files are compatible with n3 files - so we only pay attention
			// to the first fragment
			// we can, of course, add checks here so that if a prec and value
			// are specified they match the expected ones

			// get node from line
			SSSNode newnode = returnNodeURIfromSSSline(line, defaultns);

			if (!this.contains(newnode)) {

				//this.add(newnode);
				addNodeToThisSet(newnode, "supporting url not yet implemented");

			}

			//if the NPS type is for labels, we assign this label to the node as well
			if (commonPrec==SSSNode.Label){
				//info("adding label "+commonValue.getPLabel()+" to "+newnode.PURI);

				newnode.addLabel(commonValue.getPLabel());
			}

			// add uri to list of things supporting this association:
			SupportingDomains.get(newnode).add(defaultns); // the defaultns
			// should the url
			// this data came
			// from

		}

		// adds the new nodes
	}
	public static void setDebugPanel(GenericDebugDisplayer debugPanel){
		DebugPanel=debugPanel;
	}
	private static void info(String string) {

		//if theres a debug panel set, we log to it
		if (DebugPanel!=null){

			DebugPanel.log(string);
		}


	}
	private static void log(String string,String color) {

		//if theres a debug panel set, we log to it
		if (DebugPanel!=null){

			DebugPanel.log(string,color);
		}


	}
	private static void log(String string) {

		//if theres a debug panel set, we log to it
		if (DebugPanel!=null){

			DebugPanel.log(string);
		}


	}
	private static void error(String string) {

		//if theres a debug panel set, we log to it
		if (DebugPanel!=null){

			DebugPanel.error(string);
		}


	}
	private SSSNode returnNodeURIfromSSSline(String line, String defaultns) {
		
		Log.info("line=="+line);
		
		//first we remove any ; or . at the end, as these indicate either node information continues on the next line, or the end of a set of node information
		//This is pretty much only used for labels in SSS list files (.txt) but in (.ntlists) it could be used for lots of things.
		//if . or ; is desired at the end then the whole node uri needs to be quoted
		if (line.endsWith(".")||line.endsWith(";")){
			line=line.substring(0, line.length()-1);
		}
		
		
		String NodeURI = "";
		
		if (line.contains(":")) {

			// if it contains a : we assume its a n3 file, or something making
			// use of prefixs
			// we thus take more effort to extract the subject

			String[] linebits = line.split(" ");
			int i = 0;
			int linefragment = 0;
			boolean quoteOpen = false;

			String linebit=""; 
			while (i < linebits.length) {

				linebit = (linebit+" "+linebits[i]).trim();
				i++;
				
				Log.info("linebit=="+linebit);
				
				
				if (linebit.startsWith("\"")&& !quoteOpen){										
					//ensure it doesn't also end the quote before flagging quote as open
					if (!linebit.substring(1).contains("\"")){
						quoteOpen = true;
					}
					//added is empty here
				} else if (!linebit.isEmpty() && linebit.substring(1).contains("\"")&& quoteOpen){
					Log.info("full quoted line=="+linebit);
					quoteOpen =false;
				}

				if (quoteOpen){				
					continue;
				}

				if (linebit.isEmpty()) {
					continue;
				}
				// only the first bit matters, as thats the node that has this
				// property
				if (linefragment == 0) {

					NodeURI = linebit;
					Log.info("\n NodeURI detected:" + NodeURI);
					// we should separate out the label from the full uri here
					// if a	full uri is specified

					linefragment++;

				}

				//we reset the linebit as quotes wont get this far
				linebit="";

				// in future test for inconsistencies in file if any Prec or Val
				// is
				// specified

			}

		} else {
			// the whole line is the subject!
			NodeURI = line;
		}
		
		//strip quotes from nodeuri if present
		/*
		if (NodeURI.startsWith("\"") && NodeURI.endsWith("\"")){
			
			NodeURI = NodeURI.substring(1,NodeURI.length()-1);
			Log.info("striped quotes from start and end:"+NodeURI);
			
		}
		*/

		Log.warning("_____creating node:"+ NodeURI.toLowerCase()+" defaultns:"+defaultns);
		
		//the label is guessed from the pURI (we do this separately to maintain its case
		String newnodeslabel = SSSNode.extractLabelFromURI(NodeURI);
		
		SSSNode newnode = SSSNode.createSSSNode(newnodeslabel,
				NodeURI.toLowerCase(), defaultns);
		
		Log.warning("_____Adding Node :"+ newnode.getPURI());

		
		if (commonPrec == SSSNode.SubClassOf) {
			
			//This used to be set to "true" which caused huge redundancy in file loading
			//It would not only update the nodes parent classes internally, but would
			//then try re-adding that node to this very set!
			newnode.addParentClasses(commonValue,defaultns,false);
			
		}



		// SSSNode existing = SSSNode.getNodeByUri(defaultns + NodeURI);
		// if (existing == null) {
		//
		// existing = new SSSNode(NodeURI.toLowerCase(), defaultns +
		// NodeURI.toLowerCase());
		// if (commonPrec == SSSNode.SubClassOf) {
		// Log.info("\n adding parent class:" + commonValue.PURI);
		// existing.addParentClasses(commonValue);
		// }
		//
		// } else {
		// // it exists already, no need to create it.
		// if (commonPrec == SSSNode.SubClassOf) {
		// Log.info("\n adding parent class:" + commonValue.PURI);
		// existing.addParentClasses(commonValue);
		// }
		// Log.info("(existing node)");
		// }
		// now we have the nodes name, we check if it exists already, else we
		// create it
		return newnode;

	}

	// gets all the domains that support this node as having this
	// predicate/value pair.
	public String getURIsThatSupportThisTriplet(SSSNode node) {

		Iterator<String> sdit = SupportingDomains.get(node).iterator();
		String sources = "";
		while (sdit.hasNext()) {
			String uri = sdit.next();

			Log.info("supporting domain=:" + uri);
			sources=sources+","+uri;
		}

		return sources;

	}

	public String getAllDomainsSourced() {

		//add them to a hashset to remove duplicates
		HashSet<String> domains = new HashSet<String>();

		domains.addAll(SupportingDomains.values());

		Iterator<String> sdit = domains.iterator();

		String uri="-";

		while (sdit.hasNext()) {

			String string = sdit.next();

			uri=string+" , "+uri;

		}

		return uri;
	}

	static public SSSNodesWithCommonProperty getSetFor(SSSNode pred,
			SSSNode value) {

		info("_____getting matching set-: " + pred.PURI + " " + value.PURI);
		
		Log.info("_____getting matching set: " + pred.PURI + " " + value.PURI);
		
		Log.info("____searching "
				+ globalNodesWithPropertyListByPredicate.size()
				+ " propertylists");

		
		
		//Detach if we are dealing with a dynamically tested predicate
		//in future this test should be refractored as there could be many dynamic tests
		//It is not the intention to support full dynamic rulesets though, merely common stuff
		if (pred==SSSNode.STARTSWITH){
			
			info("starts with detected");
			
			//we dynamically create a set containing all the nodes that start with the specified value
			//maybe?
			
		}
		
		
		Iterator<SSSNodesWithCommonProperty> nwpit = globalNodesWithPropertyListByPredicate
				.get(pred).iterator();

		//Log.info("____searching out of-  "
		//		+ globalNodesWithPropertyListByPredicate.get(pred).size());
	//	Log.info("testing for value=");
	//	Log.info("value="+value.getPURI());
		
		
		SSSNodesWithCommonProperty matchingset = null;
		
		int matchingsetcount=0;
				
		while (nwpit.hasNext()) {

			SSSNodesWithCommonProperty sssnodeSet = nwpit.next();						
			
			Log.info("testing:-  "+sssnodeSet.getCommonPrec()+":"+sssnodeSet.getCommonValue());
			
			
			
			if (sssnodeSet.commonValue.equals(value)) {
				
				matchingset = sssnodeSet;
				matchingsetcount++;
				
			}			
			
			
		}
		Log.info("found "+matchingsetcount+" set matchs");
		
		info("found "+matchingsetcount+" set matchs");
		
		return matchingset;

	}

	static public void getAllNodesWithProperty(SSSNode pred,
			SSSNode value, final DoSomethingWithNodesRunnable dothisafterForIntersect, final Boolean forwardinvertFlag) {


		final ArrayList<SSSNode> valueArray = new ArrayList<SSSNode>();	
		valueArray.add(value);

		final ArrayList<SSSNode> predicateArray = new ArrayList<SSSNode>();	
		predicateArray.add(pred);

		//as we know the value and predicate for sure we can check for a preload here

		final SSSNodesWithCommonProperty neededSet = getSetFor(pred, value);
		Log.info("--completed set getting");
		
		
		if (neededSet==null){
			
			SuperSimpleSemantics.info("no set found");
			
			//as there's no matching sets we create an empty set and run the doThisAfter command with it
			final ArrayList<SSSNode> empty = new ArrayList<SSSNode>();	
			
			//even if we have no sets, then the set should contain the searched for value.
			//This means searching for "subclassof green" results in "green" being in the results.
			
 			if (pred==SSSNode.SubClassOf){
				empty.add(value);
			}
			
			dothisafterForIntersect.run(empty, forwardinvertFlag);
			
			return;
		}
		

		//the rest can be replaced with a call too;
		final Runnable bit_to_do_after_loading = new Runnable(){
			@Override
			public void run() {
				getAllNodesWithPropertys(predicateArray,valueArray,dothisafterForIntersect,forwardinvertFlag);
			}
		};		
		
		SuperSimpleSemantics.waitFor.scheduleAfter(new Runnable() {
			
			@Override
			public void run() {

				neededSet.loadAndRunAfter(bit_to_do_after_loading);
				
			}
		});
		
		
		
		/*
		if (!neededSet.isLoaded){

			SuperSimpleSemantics.info("Warning not loaded:"+neededSet.getCommonPrec()+"="+neededSet.getCommonValue());

			neededSet.setCallback(new LoadingCallback(){

				@Override
				public void onFailure(Throwable caught) {
					SuperSimpleSemantics.error("Load Failed");
					
				}

				@Override
				public void onSuccess(String result) {
					SuperSimpleSemantics.info("loaded:"+neededSet.getCommonPrec()+"="+neededSet.getCommonValue());
					SuperSimpleSemantics.info("continueing query:");

					getAllNodesWithPropertys(predicateArray,valueArray,dothisafterForIntersect,forwardinvertFlag);
				}


			});

			for (String url : neededSet.fileSource) {

				neededSet.loadSSSFile(url);

			}

			return;
		} else {
			SuperSimpleSemantics.info("set already loaded:"+neededSet.fileSource.toString());
			
			//possible optimisation here; this function does much the same as getSetFor when theres only one pred/value being searched
			
			 getAllNodesWithPropertys(predicateArray,valueArray,dothisafterForIntersect,forwardinvertFlag);
				
		}
	*/
		 return;

	}

	static public HashSet<SSSNode> getAllCurrentNodesWithProperty(SSSNode pred,
			SSSNode value, final Boolean forwardinvertFlag) {


		final ArrayList<SSSNode> valueArray = new ArrayList<SSSNode>();	
		valueArray.add(value);

		final ArrayList<SSSNode> predicateArray = new ArrayList<SSSNode>();	
		predicateArray.add(pred);

		//as we know the value and predicate for sure we can check for a preload here

		final SSSNodesWithCommonProperty neededSet = getSetFor(pred, value);
		Log.info("=completed set getting");
		
		if (neededSet==null){
			return null;
		}
		
		
		if (!neededSet.isLoaded){

			SuperSimpleSemantics.info("Warning not loaded:"+neededSet.getCommonPrec()+"="+neededSet.getCommonValue());

			
		} else {
			Log.info("set already loaded:"+neededSet.fileSource.keySet().toString());
		}
		//possible optimization here; this function does much the same as getSetFor when theres only one pred/value being searched
		return getAllCurrentNodesWithPropertys(predicateArray,valueArray,forwardinvertFlag);


	}

	static public void getAllNodesWithPropertys(ArrayList<SSSNode> preds,
			ArrayList<SSSNode> values, final DoSomethingWithNodesRunnable dothisafterForIntersect, final Boolean forwardinvertFlag) {

		
		
		//debug info only
		//for (SSSNode pred : preds) {	
		//	for (SSSNode val : values) {
		//		info("_____requested matching nodes to get:-: " + pred.PURI + "=" + val.PURI);			
		//	}			
		//
		//}

		Log.info("____searching "
				+ globalNodesWithPropertyListByPredicate.size()
				+ " propertylists for "+preds.toString()+" v="+values.toString());

		//--------------
		DoSomethingWithNodesRunnable afterGettingTheNodesInSetsDoThis= new DoSomethingWithNodesRunnable(){

			@Override
			public void run(ArrayList<SSSNode> newnodes, boolean invert) {
				if (dothisafterForIntersect!=null){

					//get all subclass's of results too? iterate? 
					//convert to list
					final ArrayList<SSSNode> subresultstemp = new ArrayList<SSSNode>();
					subresultstemp.addAll(newnodes);
					

					SuperSimpleSemantics.waitFor.scheduleAfter(new Runnable() {
						
						@Override
						public void run() {
							
							dothisafterForIntersect.run(subresultstemp, forwardinvertFlag);
								
						}
						
					});
					
						
				}
			}
			

		
			
		};
		
		
		getAllNodesInSetsFor(preds, values, afterGettingTheNodesInSetsDoThis);
		

		return;
	}

	/** gets the currently known nodes only **/
	static public HashSet<SSSNode> getAllCurrentNodesWithPropertys(ArrayList<SSSNode> preds,
			ArrayList<SSSNode> values, Boolean forwardinvertFlag) {

		//debug
		
		
		//for (SSSNode pred : preds) {			

			//Log.info("_____getting matching nodes for all: " + pred.PURI + " :");
		//	for (SSSNode val : values) {
			//	info("_____getting matching nodes: " + pred.PURI + " " + val.PURI);			
		//	}			

		//}


		Log.info("____searching "
				+ globalNodesWithPropertyListByPredicate.size()
				+ " propertylists for "+preds.toString()+" v="+values.toString());

		//Set<SSSNodesWithCommonProperty> cps = globalNodesWithPropertyListByPredicate
		//		.get(pred);

		HashSet<SSSNode> matchingNodes = getAllCurrentNodesInSetsFor(preds, values);
		//get all subclass's of results too? iterate? 


		//should we also return a SSSNode that matches the URI values?
		/*
		for (SSSNode pred : preds) {	
			if (pred == SSSNode.SubClassOf){
				for (SSSNode val : values) {
					
					//matchingNodes.add(val);
					
				}
			}
		}*/
		
		

		return matchingNodes;
	}
	/** convience for when you just need one value, not an array**/
	public static HashSet<SSSNode> getAllCurrentNodesInSetsFor(SSSNode pred,
			SSSNode value) {

		ArrayList<SSSNode> valueArray=new ArrayList<SSSNode>();			
		valueArray.add(value);


		ArrayList<SSSNode> predArray=new ArrayList<SSSNode>();			
		predArray.add(pred);

		return getAllCurrentNodesInSetsFor(predArray,valueArray);

	}
	
	/** convience for when you just need one predicate/value to search for, not an array
	 *  **/
	public static void getAllNodesInSetsFor(SSSNode pred,
			SSSNode value, DoSomethingWithNodesRunnable doThisAfter) {

		
		ArrayList<SSSNode> valueArray=new ArrayList<SSSNode>();			
		valueArray.add(value);

		ArrayList<SSSNode> predArray=new ArrayList<SSSNode>();			
		predArray.add(pred);
		
		getAllNodesInSetsFor(predArray,valueArray,doThisAfter);
		
		return;

	}
	
	/** Conveyance for when you just need one value, not an array
	 * @return **/
	public static void getAllNodesInSetsFor(SSSNode pred,
			ArrayList<SSSNode> values, DoSomethingWithNodesRunnable doThisAfter) {

		

		ArrayList<SSSNode> predArray=new ArrayList<SSSNode>();			
		predArray.add(pred);
		
		getAllNodesInSetsFor(predArray,values,doThisAfter);
		
		return;

	}
	
	/** gets all the nodes in a set, as well children of those nodes.
	 * This only looks for nodes currently known about , it wont dynamically load anything **/
	public static HashSet<SSSNode> getAllCurrentNodesInSetsFor(ArrayList<SSSNode> preds,
			ArrayList<SSSNode> values) {

		//we first get all the propety sets for all the predicates requested
		//only then we start to loop over checking values

		//Iterator<SSSNodesWithCommonProperty> nwpit = globalNodesWithPropertyListByPredicate
		//		.get(pred).iterator();

		ArrayList<SSSNodesWithCommonProperty> setToSearch = new ArrayList<SSSNodesWithCommonProperty>();
		for (SSSNode pred : preds) {

			Set<SSSNodesWithCommonProperty> newToSearch = globalNodesWithPropertyListByPredicate.get(pred);

			setToSearch.addAll(newToSearch);

		}


		Iterator<SSSNodesWithCommonProperty> nwpit =setToSearch.iterator();

		Log.info("____searching out of  "
				+ setToSearch.size());

		HashSet<SSSNode> matchingNodes = new HashSet<SSSNode>();
		//set interface loading to set length
		SuperSimpleSemantics.addToTotalSearchUnits(setToSearch.size());

		while (nwpit.hasNext()) {
			//set interface loading plus a step
			SuperSimpleSemantics.stepSearchClockForward();


			SSSNodesWithCommonProperty sssnodeSet = nwpit.next();

			Log.info("checking:" + sssnodeSet.commonPrec.PURI+" size -"+
					sssnodeSet.size());
			Log.info("checking:" + sssnodeSet.commonValue.PURI+" size -"+
					sssnodeSet.size());

			if (!sssnodeSet.isLoaded){

				SuperSimpleSemantics.info("WARNING FILE NOT LOADED"+sssnodeSet.fileSource.keySet().toString());

				//trigger loading here with a callback when all returned


			}

			sssnodeSet.commonValue.updateCachesOnNextUse();

			//might be an idea not to check for parents here, but rather to work 
			//out all the children of the request Predicate in advance, and then just
			//see if it matchs one of them

			//we loop over all possible values
			for (SSSNode value : values) {
				if (sssnodeSet.commonValue.isOrHasParentClass((value.PURI))) {

					matchingNodes.addAll(sssnodeSet);
				}				 
			}


		}

		Log.info("adding subclasses");
		HashSet<SSSNode> Subclasses = new HashSet<SSSNode>();
		//add subclasses
		Iterator<SSSNode> matchit = matchingNodes.iterator();
		while (matchit.hasNext()) {

			SSSNode sssNode = (SSSNode) matchit.next();					
			Subclasses.addAll(getAllCurrentNodesInSetsFor(SSSNode.SubClassOf,sssNode));

		}

		matchingNodes.addAll(Subclasses);

		/*
		for (SSSNode sssNode : matchingNodes) {

			matchingNodes.addAll(getAllNodesInSetsFor(SSSNode.SubClassOf,sssNode));


		}*/
		//foreach matchingNodes 
		//so we get all the nodes in sets for classof 	matchingNodes


		return matchingNodes;
	}

	/** runs some code only after all the nodes in this set are loaded
	 * from the know files **/
	private void loadAndRunAfter(final Runnable doThisAfter){

		if (!isLoaded){
			
			SuperSimpleSemantics.info("files "+this.getSourceFiles()+" not loaded, loading...");
		
		LoadingCallback runAfterLoadingFiles = new LoadingCallback(){

			@Override
			public void onFailure(Throwable caught) {
				
				SuperSimpleSemantics.info("XXXXXXXXXXXXXXXX WARNING FILE LOAD FAILED");
				//setCallback(null);
			}

			@Override				
			public void onSuccess(String result) {
				
				SuperSimpleSemantics.log("FILE LOADED:"+result,"green");
				doThisAfter.run();
				
				//remove old callback here? (so it doesnt fire twice)
				removeCallback(this);
				//setCallback(null);
				
			}
		};
		
		this.setCallback(runAfterLoadingFiles);
		
		for (String url : fileSource.keySet()) {

			Log.warning("started to load:"+url);
			
			loadSSSFile(url,fileSource.get(url));
			

		}
		
		} else {
			SuperSimpleSemantics.info("Already loaded, so we can just run it.");
			doThisAfter.run();
		}
	}
	
	/** gets all the nodes and subclasses of them in this set  **/
	public void getAllNodesInSet(final DoSomethingWithNodesRunnable doThisAfter) {
	
		Log.info("~(getAllNodesInSet)");
		
		if (this==null){
			Log.info("error:set doesnt exist");
		}
		
		
		if (!isLoaded){
			
			Log.info("set not loaded yet");
			
			//if it isnt loaded
			//load and recall this method when done	
			Runnable runAfterLoading = new Runnable(){
				@Override
				public void run() {
					getAllNodesInSet(doThisAfter);
				}				
			};					
					
			loadAndRunAfter(runAfterLoading);
			
			
		} else {

			//if this object is loaded;

			Log.info("set loaded: "+this.getCommonPrec()+" "+this.getCommonValue());
			
			Log.info("set size: "+this.size());
			
			//create an object to store the results
			final HashSet<SSSNode> resultNodes = new HashSet<SSSNode>();
			resultNodes.addAll(thisset);
			
			final int numberToTest= this.size();
			
			
			//prepare a callback for when each subclass result comes in
			final DoSomethingWithNodesRunnable onEachResultSet = new DoSomethingWithNodesRunnable(){

				int leftToTest = numberToTest;
				@Override
				public void run(ArrayList<SSSNode> newnodes, boolean invert) {
									
					leftToTest--;
					SuperSimpleSemantics.info("# of nodes left to test for subclass's:"+leftToTest);
					
					if (newnodes!=null){
						resultNodes.addAll(newnodes);
					}
										
					if (leftToTest==0){

						ArrayList<SSSNode> finalresult = new ArrayList<SSSNode>();
						finalresult.addAll(resultNodes);
						
						SuperSimpleSemantics.info("all subclass's of this set added - running post action:");
						
						doThisAfter.run(finalresult, false);
						
					}
				}
				
			};
		
			
			//we loop over these nodes looking for subclasses of them too.
			final Iterator<SSSNode> nit = this.iterator();
			
			SuperSimpleSemantics.info("# of nodes to test for subclass's:"+this.size());
			
			//loop over all the sets in order to get the nodes in their subclasss's
			SuperSimpleSemantics.waitForRep.scheduleAfter(new MyRepeatingCommand(){

				@Override
				public boolean execute() {
					
					Log.info("~(geting next node)");	
					
					SSSNode node = (SSSNode) nit.next();
					
					SSSNodesWithCommonProperty set = getSetFor(SSSNode.SubClassOf, node);
										
					Log.info("~completed set getting for subclasses");
										
					if (set!=null){
						
						set.getAllNodesInSet( onEachResultSet );
						
					} else {
						
						Log.info("~(no set found)");	
						
						SuperSimpleSemantics.info("(no set found)");	
						
						onEachResultSet.run(null, false);

						Log.info("~(onEachResultSet run)");	
						
						
					}
					
					Boolean hasnext = nit.hasNext();
					
					if (hasnext){
						Log.info("had next after:"+node.getPURI());
					}
					
					return hasnext;
				}
				
			});
			
			
			
			
			
		}
				
	}
	public static void getAllNodesInSetsFor(
			final ArrayList<SSSNode> requestedPreds,
			final ArrayList<SSSNode> values,			
			final DoSomethingWithNodesRunnable doThisAfter) {

		Log.warning("request for "+requestedPreds.toString()+"="+values.toString());
		
		
		//prepare actions to do after we are sure we have all the subtypes
		final DoSomethingWithNodesRunnable afterWeGetPredicatesDoThis = new DoSomethingWithNodesRunnable(){

			
			@Override
			public void run(ArrayList<SSSNode> preds, boolean invert) {
				
				//add originally requested preds
				//preds.addAll(requestedPreds);
				
				Log.warning("preds="+preds.toString());
				Log.warning("values="+values.toString());
				
				// the results are the predicates to use
				ArrayList<SSSNodesWithCommonProperty> setsToSearch = new ArrayList<SSSNodesWithCommonProperty>();
							
				
				for (SSSNode pred : preds) {

					Set<SSSNodesWithCommonProperty> newToSearch = globalNodesWithPropertyListByPredicate.get(pred);

					setsToSearch.addAll(newToSearch);

				}
				Log.warning("setsToSearch="+setsToSearch.size());
				
				
				//now we eliminate all the unneeded sets by checking their values
				ArrayList<SSSNodesWithCommonProperty> finalReliventSets = new ArrayList<SSSNodesWithCommonProperty>();
				
				
				for (SSSNodesWithCommonProperty set : setsToSearch) {
					
					for (SSSNode value : values) {						
						//Log.warning("Set found:"+set.commonPrec.toString()+"="+set.commonValue.toString());	
						
						if (set.commonValue==value){
							Log.warning("Set found:"+set.commonPrec.toString()+"="+set.commonValue.toString());	
							finalReliventSets.add(set);
							//resultnodes.addAll(set);
						}
						
					}
				}

				//result
				final ArrayList<SSSNode> resultnodes = new ArrayList<SSSNode>();
				//if we were searching for a classof we also add the parent class
				if ((requestedPreds.size()==1 && requestedPreds.get(0)==SSSNode.SubClassOf))
				{
					resultnodes.addAll(values);
				}
				
				
				
				DoSomethingWithNodesRunnable afterWeGetNodes = new DoSomethingWithNodesRunnable(){

					@Override
					public void run(ArrayList<SSSNode> newnodes, boolean invert) {

						resultnodes.addAll(newnodes);
						doThisAfter.run(resultnodes, false);
					}
					
				};
				
				
				
				SSSNodesWithCommonProperty.addAllNodesInTheseSets(finalReliventSets,afterWeGetNodes );
				
					
				
				
				//doThisAfter
				//requestedPreds
				//Log.warning("-----------------------------");
				//Log.warning("--Requested predicates:"++"--");
				
				
				
				
				
				
			}
			
		};
		

		//get all subtypes of predicates too unless we are looking for subtypes
		//eg "creator=xxxx" also checks "writer=XXXX"
		//
		
		if (!(requestedPreds.size()==1 && requestedPreds.get(0)==SSSNode.SubClassOf))
		{
			
			Log.warning("--------------getting subclasses of:-----------"+requestedPreds.toString());
			
			SSSNodesWithCommonProperty.getAllNodesInSetsFor(SSSNode.SubClassOf, requestedPreds, afterWeGetPredicatesDoThis);
			
		} else {
			
			Log.warning("--------------getting subclasses of2:"+values.toString());
			//we are looking for subtypes anyway no need to get subtypes. (because theres no subsclass of "SSSNode.SubClassOf"	
			final ArrayList<SSSNode> predArray=new ArrayList<SSSNode>();
			predArray.add(SSSNode.SubClassOf);
			
			//we run after giving the interface time to update
			SuperSimpleSemantics.waitFor.scheduleAfter(new Runnable() {				
				@Override
				public void run() {
					afterWeGetPredicatesDoThis.run(predArray, false);
				}
			});
			
			
			
		}
		
		

		
		
		
	}
	
	
	public static void addAllNodesInTheseSets(ArrayList<SSSNodesWithCommonProperty> sets,final DoSomethingWithNodesRunnable afterWeGetNodes)
	{
		SuperSimpleSemantics.info("adding nodes in specified sets");
		
		final ArrayList<SSSNode> resultnodes = new ArrayList<SSSNode>();
				
		//we set up a runnable that runs as each result is recieved
		//Remember, results may need to be loaded from files
		//So this is all asycronominious
		
		final int number_of_sets_to_add = sets.size();
		
		//if its empty, we just fire the post command with an empty set
		if (number_of_sets_to_add==0){
			SuperSimpleSemantics.info("No sets specified to add nodes from");
			afterWeGetNodes.run(resultnodes, false);
			return;
		}
		
		
		final DoSomethingWithNodesRunnable onEachResult = new DoSomethingWithNodesRunnable(){
		
			int left_to_add = number_of_sets_to_add;
			
			@Override
			public void run(ArrayList<SSSNode> newnodes, boolean invert) {

				SuperSimpleSemantics.info("_____got nodes: "+newnodes.toString());
				SuperSimpleSemantics.info("left_to_add="+left_to_add);
				
				//add to current result
				resultnodes.addAll(newnodes);
				left_to_add--;
				//run only after all have been got;
				if (left_to_add==0){
					
					afterWeGetNodes.run(resultnodes, false);
				}
			}
			
		};
		
		
		
		
		final Iterator<SSSNodesWithCommonProperty> sit  = sets.iterator();
		
				
		SuperSimpleSemantics.waitForRep.scheduleAfter(new MyRepeatingCommand() {
			
			@Override
			public boolean execute() {
				
				SSSNodesWithCommonProperty setToAdd = (SSSNodesWithCommonProperty) sit.next();
				
				SuperSimpleSemantics.info("adding nodes from "+setToAdd.commonPrec.toString()+"="+setToAdd.commonValue.toString());	
									
					
					//Log.warning("not loaded yet: "+setToAdd.commonPrec+"="+setToAdd.commonValue);
					//we use the get nodes in set function, we loads the set
					//and triggers the DoSomething runnable with freshly got nodes.
				
					setToAdd.getAllNodesInSet(onEachResult);
					
				//Note; We need to get all the nodes after loading them
				//setToAdd.getAllNodesInSet(doThisAfter)
					return sit.hasNext();
			}
		});
		/*
		
		while (sit.hasNext()) {
			
			SSSNodesWithCommonProperty setToAdd = (SSSNodesWithCommonProperty) sit.next();
			
			SuperSimpleSemantics.info("adding nodes from "+setToAdd.commonPrec.toString()+"="+setToAdd.commonValue.toString());	
								
				
				//Log.warning("not loaded yet: "+setToAdd.commonPrec+"="+setToAdd.commonValue);
				//we use the get nodes in set function, we loads the set
				//and triggers the DoSomething runnable with freshly got nodes.
			
				setToAdd.getAllNodesInSet(onEachResult);
				
			//Note; We need to get all the nodes after loading them
			//setToAdd.getAllNodesInSet(doThisAfter)
			
			
		}
		*/
		
		
	}
	
	/** gets all the nodes in a set, as well children of those nodes.
	 * In the process of being replaced with a callback bassed system
	 *  to correctly support dynamic loading.
	 *  
	 *   However, it got really messy, so I switched to an alternative method.
	 *   
	 *   **/
	@Deprecated
	public static void getAllNodesInSetsFor_bu(
			ArrayList<SSSNode> preds,
			ArrayList<SSSNode> values,
			final DoSomethingWithNodesRunnable doThisAfter) {
		

		//we first get all the propety sets for all the predicates requested
		//only then we start to loop over checking values

		//Iterator<SSSNodesWithCommonProperty> nwpit = globalNodesWithPropertyListByPredicate
		//		.get(pred).iterator();

		ArrayList<SSSNodesWithCommonProperty> setToSearch = new ArrayList<SSSNodesWithCommonProperty>();
		
		for (SSSNode pred : preds) {

			Set<SSSNodesWithCommonProperty> newToSearch = globalNodesWithPropertyListByPredicate.get(pred);

			setToSearch.addAll(newToSearch);

		}


		Iterator<SSSNodesWithCommonProperty> nwpit =setToSearch.iterator();

		Log.warning("____searching out of  "
				+ setToSearch.size());

		final HashSet<SSSNode> matchingNodes = new HashSet<SSSNode>();
		
		
		//set interface loading to set length
		SuperSimpleSemantics.addToTotalSearchUnits(setToSearch.size());

		while (nwpit.hasNext()) {
			//set interface loading plus a step
			SuperSimpleSemantics.stepSearchClockForward();


			SSSNodesWithCommonProperty sssnodeSet = nwpit.next();


			sssnodeSet.commonValue.updateCachesOnNextUse();

			
			LoadingCallback runAfterLoadingNewFile = new LoadingCallback(){

				@Override
				public void onFailure(Throwable caught) {
					Log.warning("XXXXXXXXXXXXXXXX WARNING FILE LOAD FAILED");
				}

				@Override
				public void onSuccess(String result) {
					Log.warning("XXXXXXXXXXXXXXXX FILE LOADED");
				}


			}			;
			
			
			//might be an idea not to check for parents here, but rather to work 
			//out all the children of the request Predicate in advance, and then just
			//see if it matchs one of them

			//we loop over all possible values
			for (SSSNode value : values) {
				
				if (sssnodeSet.commonValue.isOrHasParentClass((value.PURI))) {
					
					
					if (!sssnodeSet.isLoaded){
						
						Log.warning("XXXXXXXXXXXXXXXX WARNING FILE NOT LOADED");
						
						SuperSimpleSemantics.info("WARNING FILE NOT LOADED"+sssnodeSet.fileSource.keySet());
						sssnodeSet.setCallback(runAfterLoadingNewFile);
						
						for (String url : sssnodeSet.fileSource.keySet()) {

							sssnodeSet.loadSSSFile(url,sssnodeSet.fileSource.get(url));
							

						}
						
						
						//trigger loading here with a callback when all returned
						
					      

					} else {
					Log.warning("set found for:" + sssnodeSet.commonPrec.PURI+"="+ sssnodeSet.commonValue.PURI+  " size -"+sssnodeSet.size());
										
					matchingNodes.addAll(sssnodeSet);
					
					}
				}				 
			}


		}

		Log.info("adding subclasses");
		
		final int leftToGet = matchingNodes.size();
				
		DoSomethingWithNodesRunnable setAllNodesInSetsRunnable = new DoSomethingWithNodesRunnable(){
			 int left = leftToGet;
			final HashSet<SSSNode> Subclasses = new HashSet<SSSNode>();
			
			@Override
			public void run(ArrayList<SSSNode> newnodes, boolean invert) {
				// TODO Auto-generated method stub
				Subclasses.addAll(newnodes);
				left = left-1;
				Log.warning("~~~~~~~~~~~~~~~~~~~~~~~~~~~~results="+newnodes.size()+"__"+left);
				if (left==0){
					matchingNodes.addAll(Subclasses);

					
					ArrayList<SSSNode> result = new ArrayList<SSSNode>();
					result.addAll(matchingNodes);
					//foreach matchingNodes 
					//so we get all the nodes in sets for classof 	matchingNodes
					doThisAfter.run(result, false);
				}
				
			}
			
		};
		
		if (matchingNodes.size()==0){
			//matchingNodes.addAll(Subclasses);

			
			ArrayList<SSSNode> result = new ArrayList<SSSNode>();
			result.addAll(matchingNodes);
			//foreach matchingNodes 
			//so we get all the nodes in sets for classof 	matchingNodes
			doThisAfter.run(result, false);
		}
		
		//add subclasses
		Iterator<SSSNode> matchit = matchingNodes.iterator();
		
		while (matchit.hasNext()) {

			SSSNode sssNode = (SSSNode) matchit.next();		
			
			//should be callback based
			//HashSet<SSSNode>  results = getAllCurrentNodesInSetsFor(SSSNode.SubClassOf,sssNode);
			Log.warning("~~~~~~~~~~~~~~~~getting subclass of:"+sssNode.PURI);
			
			getAllNodesInSetsFor(SSSNode.SubClassOf,sssNode,setAllNodesInSetsRunnable );
			
			//Subclasses.addAll(results);
			

		}
		
		/*

		matchingNodes.addAll(Subclasses);

		
		ArrayList<SSSNode> result = new ArrayList<SSSNode>();
		result.addAll(matchingNodes);
		//foreach matchingNodes 
		//so we get all the nodes in sets for classof 	matchingNodes
		doThisAfter.run(result, false);
*/
		return;
	}

	//why not use just ""contains" ? does the same thing
	public boolean containsNode(SSSNode node) {

		Iterator<SSSNode> contents = this.iterator();

		while (contents.hasNext()) {

			SSSNode sssNode = contents.next();
			if (sssNode==node) {
				return true;
			}

		}

		return false;

	}


	public boolean containsNodeWithURI(String URI) {

		Iterator<SSSNode> contents = this.iterator();

		while (contents.hasNext()) {

			SSSNode sssNode = contents.next();

			if (sssNode==null){
				Log.severe("null node in CPL error");
			}
			
			if (sssNode.hasURI(URI)) {
				return true;
			}

		}

		return false;

	}

	public static void checkIfFinnishedGlobal() {
		if (getGlobalLeftToLoad() == 0) {

			Log.warning("NodesWithCommonProperty left to load="+getGlobalLeftToLoad());
			
			if (globalCalledWhenDone != null) {

			 if (SuperSimpleSemantics.isAutoRefreshNodeParentCaches()){
				SSSNode.refreshAllCaches();
			 }
			 
			 Log.warning("running called when done");
			 
			    Runnable tempcopy = globalCalledWhenDone;
			    
			    globalCalledWhenDone=null;
			    tempcopy.run();
				
				Log.warning("=========================(set to null)");
				
				

			}

		}
	}
	
	public int getLefttoLoad(){
		return FilesLeftToLoadForThisSet.size();
	}

	public void checkLeftToLoad(final String fileURL) {
		
		if (getLefttoLoad()==0){
			Log.warning("XXXXXXXXX Nothing Left To Load");
		}
		
		if (list_not_used){
			return;
		}
		
		if (getLefttoLoad() == 0 && !NodeListActive) {


			Log.info("adding a set for:"+thisset.getCommonPrec().PLabel+" , "+thisset.getCommonValue().PLabel);
			Log.info("set has "+thisset.size()+" values in it");

			globalNodesWithPropertyListByPredicate.put(commonPrec,thisset);										
			NodeListActive = true;
			isLoaded = true;

		}

		if (getLefttoLoad() == 0) {
			isLoaded = true;
			
			//add to cpl display
			Log.info("set loaded? adding to displayer");
			SuperSimpleSemantics.addCPLToDisplayer(thisset);

			// activate callback
			calledWhenDoneCheck(fileURL);
			
			
		}
	}

	private void calledWhenDoneCheck(final String fileURL) {
		
		if (!calledWhenDoneList.isEmpty()) {
			
			//note; because things can be addesd to this list at any time we loop over a copy
			
			//Iterator<LoadingCallback> cwdit = calledWhenDoneList.iterator();

			Log.warning("running LoadingCallbacks:"+calledWhenDoneList.size());
			
			
			int size = calledWhenDoneList.size();
			int i=0;
			
			while (i<size ) {
				
				LoadingCallback loadingCallback = calledWhenDoneList.get(0);
							
				if (loadingCallback!=null){		
					
					Log.warning("running LoadingCallback:"+i);
					loadingCallback.onSuccess("Loaded:" + fileURL);				
					calledWhenDoneList.remove(loadingCallback);
					i++;
				
				} else {
					Log.warning("running LoadingCallback:"+i);
					break;
				}
				
			}
			
							
		} else {
			Log.warning("callback when done list empty");
		}
	}

	static public ArrayList<SSSNode> getAllCurrentNodesWithPredicate(SSSNode pred) {
		
		
		Set<SSSNodesWithCommonProperty> nodeLists = globalNodesWithPropertyListByPredicate.get(pred);

		ArrayList<SSSNode> allMatchingNodes = new ArrayList<SSSNode>();

		//Iterator<SSSNodesWithCommonProperty> sit = nodeLists.iterator();

		Log.info(" number of sets with this pedicate ("+pred.getPURI()+") :" + nodeLists.size());

		//replaced iterator with for loop for avoid concurrent modification errors? (not sure if best method here, in most cases
		//the user shouldnt use getAllCurrent methods but rather proper callback requests to allow loading to finnish)
		for (SSSNodesWithCommonProperty sssNodesWithCommonProperty : nodeLists) {
			
			
		//}
		
		
		//while (sit.hasNext()) {
		//	SSSNodesWithCommonProperty sssNodesWithCommonProperty = sit.next();

			if (!sssNodesWithCommonProperty.isLoaded){
				SuperSimpleSemantics.log("WARNING FILE With Predicate NOT LOADED"+sssNodesWithCommonProperty.fileSource.keySet(),"yellow");
				Log.warning("WARNING FILE With Predicate NOT LOADED");
				
			}


			allMatchingNodes.addAll(sssNodesWithCommonProperty);
		}

		return allMatchingNodes;
	}

	/** Searches all sets and returns those that contain this node.
	 * Probably should be suplimented with a way to search using the node itself rather then the uri. **/
	public static HashSet<SSSNodesWithCommonProperty> getCommonPropertySetsContaining(
			String URI) {

		HashSet<SSSNodesWithCommonProperty> matchingCPLs = new HashSet<SSSNodesWithCommonProperty>();

		Iterator<SSSNodesWithCommonProperty> gnpit = globalNodesWithPropertyListByPredicate
				.values().iterator();

		//	Log.info("searching for node in " + globalNodesWithPropertyListByPredicate
		//		.values().size() + " cps");

		
		
		
		while (gnpit.hasNext()) {

			SSSNodesWithCommonProperty sssNodesWithCommonProperty = gnpit
					.next();

			//if (!sssNodesWithCommonProperty.isLoaded){

			//	SuperSimpleSemantics.info("WARNING FILE Containing uri NOT LOADED:"+sssNodesWithCommonProperty.fileSource);

			//}

			if (sssNodesWithCommonProperty.containsNodeWithURI(URI)) {
				matchingCPLs.add(sssNodesWithCommonProperty);
				continue;
			}

		}

		//Log.info("found " + matchingCPLs.size() + " sets containing this node");

		return matchingCPLs;

	}

	public static void clearAll() {


		Iterator<SSSNodesWithCommonProperty> alllists = globalNodesWithPropertyListByPredicate.values().iterator();
		
		while (alllists.hasNext()) {

			SSSNodesWithCommonProperty cpList = alllists.next();

			if (!cpList.isLoaded){
				SuperSimpleSemantics.info("WARNING FILE TO Clear NOT LOADED");
			}

			cpList.calledWhenDoneList.clear(); 
			cpList.SupportingDomains.clear();
			cpList.fileSource.clear();
			cpList.FilesLeftToLoadForThisSet.clear();
			cpList.clear();

		}

		globalNodesWithPropertyListByPredicate.clear();
		CPLToParseList.clear();
		
		//clear callbacks
		 globalLeftToLoad = -10;
	 
		
		
		
	}



	/** if a node is used in any sets, we swap it for this other one **/
	public static void replaceNodeInSets(SSSNode oldnode, SSSNode newnode,String mergeSupportedBy) {
		SuperSimpleSemantics.info("Replacing Nodes In Already Loaded Sets: ");
		//NOTE; This method should be improved to use Node directly, and not the URI for it
		HashSet<SSSNodesWithCommonProperty> cps = SSSNodesWithCommonProperty.getCommonPropertySetsContaining(oldnode.PURI);
		if (cps.size()==0){
			//no cps contain this so no worries!
		} else {

			//update CPS to point to new node
			for (SSSNodesWithCommonProperty sssNodesWithCommonProperty : cps) {

				Boolean removed = sssNodesWithCommonProperty.removeNodeFromThisSet(oldnode);
				
				//only bother adding if the old was removed, if no change was needed why bother?
				if (removed){
					sssNodesWithCommonProperty.addNodeToThisSet(newnode, mergeSupportedBy);
				}

			}


		}
	}


	/** if a node is used in any set definitions, we swap it for this other one **/
	public static void replaceNodeInSetDefinitions(SSSNode oldnode, SSSNode newnode) {


		Log.warning("replacing:"+oldnode.getPURI()+" with "+newnode.getPURI()+" in sets");
		
		
		// find sets that use oldnode as a predicate
		Collection<SSSNodesWithCommonProperty> allSets = globalNodesWithPropertyListByPredicate.values();

		//prepare
		HashSet<SSSNodesWithCommonProperty> removeThesePropertyLists = new HashSet<SSSNodesWithCommonProperty>();
		
		
		//loop over checking
		Iterator<SSSNodesWithCommonProperty> setit = allSets.iterator();
		
		
		
		while (setit.hasNext()) {
			
			SSSNodesWithCommonProperty set_to_check = (SSSNodesWithCommonProperty) setit.next();
			
			
		//}
		
	//	for (SSSNodesWithCommonProperty set_to_check : allSets) {

			SSSNode pred = set_to_check.commonPrec;
			SSSNode value = set_to_check.commonValue;

			//we only change or combine the set if it used the oldnode
			boolean changed = false;
			
			Log.warning("checking set:"+set_to_check.getCommonPrec().PURI+"="+set_to_check.getCommonValue().PURI);
			
			//if the old node is used as the predicate or value use the replacement instead
			if (set_to_check.commonPrec==oldnode){
				pred=newnode;

				changed = true;
			}

			if (set_to_check.commonValue==oldnode){
				
				value=newnode;

				changed = true;
			}

			//we only change or combine the set if it used the oldnode
			if (changed){

				Log.warning("detecting existing sets");
				
				//detect if the new set exists already
				SSSNodesWithCommonProperty existingSet = SSSNodesWithCommonProperty.getSetFor(pred,value);
				Log.info("~~~completed set getting");
				
				//change the values AFTER looking for an existing set
				set_to_check.commonPrec = pred;
				set_to_check.commonValue = value;
				
				
				
				//if the existing set is not null, we merge the currently tested set after changing the values 
				if (existingSet!=null ){				
					
					log("existing set found while replacing node, so we merge the sets together","RED");
					mergeSets(set_to_check,existingSet);
					
					
					removeThesePropertyLists.add(existingSet);
					
					

				} else {
					Log.warning("no existing sets found");
				}
				
				

				//else we just  continue looping


			}

		}

		Log.warning("removing unused sets:");
		
		for (SSSNodesWithCommonProperty existingSet : removeThesePropertyLists) {
			

			Boolean removed = globalNodesWithPropertyListByPredicate.get(existingSet.commonPrec).remove(existingSet);
			
			log(existingSet.getCommonPrec()+":"+existingSet.getCommonValue()+" removed = "+removed);
		}


		Log.warning("finnished replacing:"+oldnode.getPURI()+" with "+newnode.getPURI()+" in sets");

	}


	/** merges two sets together 
	 * set2's contents will go into set1, 
	 * and then set2 should be removed.
	 * We dont remove set2 here, however, as that might cause concurrent mod errors.
	 *  **/
	private static void mergeSets(SSSNodesWithCommonProperty set1,
			SSSNodesWithCommonProperty set2) {
		
		if (set1==set2){
			log("atttempting to merge set with itself:");
		}
		
		Log.info("merging:"+set1.commonPrec+"="+set1.commonValue+" ("+set1.getAllDomainsSourced()+")"+" ("+set1.getSourceFiles()+")");
		Log.info("with:"+set2.commonPrec+"="+set2.commonValue+" ("+set2.getAllDomainsSourced()+")"+" ("+set2.getSourceFiles()+")");

		
		log("merging:"+set1.commonPrec+"="+set1.commonValue+" ("+set1.getAllDomainsSourced()+")"+" ("+set1.getSourceFiles()+")");
		log("with:"+set2.commonPrec+"="+set2.commonValue+" ("+set2.getAllDomainsSourced()+")"+" ("+set2.getSourceFiles()+")");

		// ensure values and predicates are the same first
		if (set1.commonPrec != set2.commonPrec){
			error("cant merge, different predicates");
		}
		if (set1.commonValue != set2.commonValue){
			error("cant merge, different values");
		}
		

				
		
		log("merging..");
		
		//add filessourced from set2
		//set1.fileSource.addAll(set2.fileSource)	;
		
		set1.fileSource.putAll(set2.fileSource);
		
		
		
		//add supported domains from set2
		set1.SupportingDomains.putAll(set2.SupportingDomains);
		
		//add contents from set2
		if (!set2.isLoaded){
			
			log("set2 is still loading, so instead of copying its data, we tell set1 to load the same urls");
			
			set1.loadSSSFiles(set2.fileSource);
			
			//clear callback
			set2.setCallback(null); //stop anything happening with this one
			set2.disregardList(true);
			
		} else {
			log("adding all.."+set2.size()+" nodes from set");
			log("contents =.."+set2.toString());
			set1.addAll(set2);
		}
		
		Log.info("merged. Removing old:"+set2.getSourceFiles());
		
		log("merged. Removing old:"+set2.getSourceFiles());
		
		//note; currerntly might vause concurrent mod error with the next in SSSNode.mergeNodeSafely
		//Boolean removed = globalNodesWithPropertyListByPredicate.get(set2.commonPrec).remove(set2);
		
		//log("removed = "+removed);
		//currently disabled to ensure theres no interface lag from this
		//SuperSimpleSemantics.propetyListDisplayer.removeCPLToDisplayer(set2);

	}

	private void disregardList(boolean b) {
		
		list_not_used = b;
		
		
		
	}
	//private void loadSSSFiles(HashSet<String> fileSource2) {
	
	private void loadSSSFiles(HashMap<String,String> fileSource2) {
		
		info("loading some more SSSFiles:");		
		
		for (String url : fileSource2.keySet()) 
		{
			info("loading :"+url);
			loadSSSFile(url,fileSource2.get(url));
			
			
		};
		
	}

	/** the index location is supplied for the default namespace if the files dont supply one**/
	public void addFileSource(String fileSource, String indexlocation) {
		
		//Boolean wasNewFile = this.fileSource.add(fileSource);
		
		//associate file with indexlocation
		String wasNewFile = this.fileSource.put(fileSource, indexlocation);
		
				//was no null here before
		if (wasNewFile==null){
		 isLoaded = false;
		 FilesLeftToLoadForThisSet.add(fileSource);
		}	
		
	}

	public String getSourceFiles() {

		//this should be an array of all the files, as there can be more then one
		if (fileSource.keySet().size()==0){
			return "no file source specified";
		}


		return fileSource.keySet().toString();
	}

	/** adds all the entries to its "left to parse" list.**/
	public static void addToLeftToLoadList(ArrayList<SSSTriplet> sssIndex) {
		
		
		Boolean changed = CPLToParseList.addAll(sssIndex);
		
		Log.warning("adding to list:"+sssIndex.size()+" list changed:"+changed);		
		
	}
	/**removes a entries to its "left to parse" list.**/
	public static void removeFromLeftToLoadList(SSSTriplet listDefinition) {

		
		Boolean removed = CPLToParseList.remove(listDefinition);

		
	}
	
	/**convience method that allows removing entry from the left to load list, by supplying its definition**/
		public static void removeFromLeftToLoadList(String url, SSSNode commonPredicate, SSSNode commonValue) {
			
			Iterator<SSSTriplet> loopOver = CPLToParseList.iterator();
			while (loopOver.hasNext()) {
				
				SSSTriplet test = (SSSTriplet) loopOver.next();
				
				if (test.precident == commonPredicate){
					
					if (test.value == commonValue){
						
					//	Log.warning(test.subject.getPURI()+" = "+url);
						
						if (test.subject.getPURI().equals(url)){
														
						//	Boolean removed = CPLToParseList.remove(test);
							//Note; we use the iterators remove command to remove the currently tested object
							//Because taking it directly from the list we are looping over will go BOOOOM
							//
							loopOver.remove();
						
						}
					}
				}
				
			}
							
			
		}

	/**Supply a complete list of all indexs (which are to be loaded) before loading them, this prevents
	 * the code thinking its finnished if it loads all it knows about before you gave it the next one to load  **/
	public static void addToLeftToLoadList(HashSet<SSSIndex> allKnowenIndexs) {
				for (SSSIndex sssIndex : allKnowenIndexs) {
					
					//Remember an Index file is just an arraylist of triplets
					//The triplets are basicaly just
					// FileURL : Predicate : Value
					//Where the FileURL contains nothing but a list of nodes with those propertys.
					addToLeftToLoadList(sssIndex);
				}
	}
	
	
	public static void logAllSets() {

		Iterator<SSSNodesWithCommonProperty> vit = globalNodesWithPropertyListByPredicate.values().iterator();
		while (vit.hasNext()) {
			
			SSSNodesWithCommonProperty cpl = (SSSNodesWithCommonProperty) vit.next();
			
			Log.info(" :-"+cpl.getCommonPrec().getPURI()+" = "+cpl.getCommonValue());
			
			
		}
		
	}




}
