package edu.kit.aifb.evtcrawl.pachube;

import static edu.kit.aifb.evtcrawl.Configuration.REPODIR;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.locks.ReentrantLock;
import javax.xml.bind.DatatypeConverter;
import org.openrdf.model.BNode;
import org.openrdf.model.Resource;
import org.openrdf.model.Statement;
import org.openrdf.model.URI;
import org.openrdf.model.Value;
import org.openrdf.model.impl.BNodeImpl;
import org.openrdf.model.impl.LiteralImpl;
import org.openrdf.model.impl.StatementImpl;
import org.openrdf.model.impl.URIImpl;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.sail.SailRepository;
import org.openrdf.rio.RDFHandlerException;
import org.openrdf.rio.n3.N3Writer;
import org.openrdf.sail.nativerdf.NativeStore;

import edu.kit.aifb.evtcrawl.LoadingThread;
import edu.kit.aifb.evtcrawl.pachube.structure.PachubeDatastream;
import edu.kit.aifb.evtcrawl.pachube.structure.PachubeFeed;
import edu.kit.aifb.evtcrawl.pachube.structure.PachubeLocation;

public class PachubeRepositoryHandler extends Thread {
	
	public final String EVENTNS = "http://events.event-processing.org/ids/";
	public final String EVENT_ID_SUFFIX = "#event";
	public final String PACHUBEFEEDNS = "https://cosm.com/feeds/";
	
	public final URI RDFTYPE = new URIImpl("http://www.w3.org/1999/02/22-rdf-syntax-ns#type");
	public final URI RDFSCOMMENT = new URIImpl("http://www.w3.org/2000/01/rdf-schema#comment");
	public final URI RDFSLABEL = new URIImpl("http://www.w3.org/2000/01/rdf-schema#label");
	public final URI RDFSSEEALSO = new URIImpl("http://www.w3.org/2000/01/rdf-schema#seeAlso");
	public final URI XSDSTRING = new URIImpl ("http://www.w3.org/2001/XMLSchema#string");
	public final URI XSDDATETIME = new URIImpl("http://www.w3.org/2001/XMLSchema#dateTime");
	public final URI XSDDOUBLE = new URIImpl("http://www.w3.org/2001/XMLSchema#double");
	public final URI GEOLAT = new URIImpl("http://www.w3.org/2003/01/geo/wgs84_pos#lat");
	public final URI GEOLONG = new URIImpl("http://www.w3.org/2003/01/geo/wgs84_pos#long");
	public final URI GEOLOCATION = new URIImpl("http://www.w3.org/2003/01/geo/wgs84_pos#location");
	public final URI EVENTENDTIME = new URIImpl("http://events.event-processing.org/types/endTime");
	public final URI EVENTMINVAL = new URIImpl("http://www.linkedopenservices.org/ns/temp-json#min_value");
	public final URI EVENTMAXVAL = new URIImpl("http://www.linkedopenservices.org/ns/temp-json#max_value");
	public final URI EVENTVALUE = new URIImpl("http://www.linkedopenservices.org/ns/temp-json#value");
	public final URI EVENTFEED = new URIImpl("http://www.linkedopenservices.org/ns/temp-json#feed");
	public final URI EVENTID = new URIImpl("http://www.linkedopenservices.org/ns/temp-json#id");
	public final Value PACHUBE_TYPE = new URIImpl("http://events.event-processing.org/types/PachubeEvent");
	
	
	private static Random random = new Random();
	
	private int uniqueId = 0;
	private boolean keepRunning = true;

	private Repository repository;
	private NativeStore nStore;
	private ReentrantLock lock;
	private PachubeHandler phandler;
	private RepositoryConnection con;
	
	private HashMap<String, BNode> feedNodes;
	private HashMap<String, HashMap<String,Date>> datastreams; 
	
		
	public BNode createFeedStatements(String feedId){
		BNode result = null;
		if(feedNodes.containsKey(feedId)) throw new IllegalArgumentException("Already exists!");
		
		PachubeFeed feed = phandler.getFeed(feedId);
		BNode feedResource = new BNodeImpl(""+getUniqueId());
		
		try{
			con = repository.getConnection();
			//Create id statement
			{
				String id = feed.getId();
				if(id != null && !id.equals("")){
					Statement stId = new StatementImpl(feedResource, EVENTID, new LiteralImpl(id));
					con.add(stId);
				}
			}
			//Create description (comment) statement
			{
				String description = feed.getDescription();
				if(description != null && !description.equals("")){
					Statement commSt = new StatementImpl(feedResource, RDFSCOMMENT, stringLit(feed.getDescription()));
					con.add(commSt);
				}
			}
			
			//Create label statement
			{
				String title = feed.getTitle();
				if(title != null && !title.equals("")){
					Statement stLabel = new StatementImpl(feedResource, RDFSLABEL, new LiteralImpl(title));
					con.add(stLabel);
				}
			}
			
			//Create location statements
			{
				PachubeLocation location = feed.getLocation();
				if(location != null){
					String lat = location.getLat();
					String lon = location.getLon();
					if(lat != null && lon != null && !lat.equals("") && !lon.equals("")){
						BNode bnode = new BNodeImpl(""+getUniqueId());
						Statement stLat = new StatementImpl(bnode, GEOLAT, new LiteralImpl(lat));
						Statement stLong = new StatementImpl(bnode, GEOLONG, new LiteralImpl(lon));
						Statement located = new StatementImpl(feedResource,GEOLOCATION, bnode);
						con.add(stLat);
						con.add(stLong);
						con.add(located);
					}
				}
			}
		}
		catch(RepositoryException e){
			e.printStackTrace();
			return null;
		}
		
		result = feedResource;
		return result;
	}
	
	
	public void createEventStatements(BNode parentFeed, PachubeDatastream stream){
		String eventId = EVENTNS + "pachubeevent" + Math.abs(random.nextLong());
		//String eventId = "./pachubeevent" + Math.abs(random.nextLong());
		URI eventURI = new URIImpl(eventId);
	    
		try {
	    	con = repository.getConnection();
	    	
	    	Statement typeSt = new StatementImpl(eventURI, RDFTYPE, PACHUBE_TYPE);
			con.add(typeSt);
			
			//Add corresponding Feed statement
			{
				Statement stFeed = new StatementImpl(eventURI, EVENTFEED, parentFeed);
				con.add(stFeed);
			}
			
			//Create endTime statement
			{
				String date = stream.getAt();
				if(date != null && !date.equals("")){
					Statement stDate = new StatementImpl(eventURI, EVENTENDTIME, dateLit(date));
					con.add(stDate);
				}
			}
			
			//Create min value statements
			{
				String minVal = stream.getMin_value();
				if(minVal != null && !minVal.equals("")){
					try{
						Double.parseDouble(minVal);
						Statement stMin = new StatementImpl(eventURI, EVENTMINVAL, doubleLit(minVal));
						con.add(stMin);
					}
					catch(NumberFormatException e){
						System.out.println(minVal + " is not a double, adding as untyped Literal");
						Statement stMin = new StatementImpl(eventURI, EVENTMINVAL, new LiteralImpl(minVal));
						con.add(stMin);
					}
				}
			}
			
			//Create max value statements
			{
				String maxVal = stream.getMax_value();
				if(maxVal != null && !maxVal.equals("")){
					try{
						Double.parseDouble(maxVal);
						Statement stMax = new StatementImpl(eventURI, EVENTMAXVAL, doubleLit(maxVal));
						con.add(stMax);
					}
					catch(NumberFormatException e){
						System.out.println(maxVal + " is not a double, adding as untyped Literal");
						Statement stMax = new StatementImpl(eventURI, EVENTMAXVAL, new LiteralImpl(maxVal));
						con.add(stMax);
					}
				}
			}
			
			//Create (current) value statements
			{
				String currVal = stream.getCurrent_value();
				if(currVal != null && !currVal.equals("")){
					try{
						Double.parseDouble(currVal);
						Statement stCurr = new StatementImpl(eventURI, EVENTVALUE, doubleLit(currVal));
						con.add(stCurr);
					}
					catch(NumberFormatException e){
						System.out.println(currVal + " is not a double, adding as untyped Literal");
						Statement stCurr = new StatementImpl(eventURI, EVENTVALUE, new LiteralImpl(currVal));
						con.add(stCurr);
					}
				}
			}
			
			//Create id statement
			{
				String id = stream.getId();
				if(id != null && !id.equals("")){
					Statement stId = new StatementImpl(eventURI, EVENTID, new LiteralImpl(id));
					con.add(stId);
				}
			}
			
			
			con.close();
		} catch (RepositoryException e) {
			e.printStackTrace();
		}	    
	}
	
	
	public void run(){
		while(keepRunning){
			lock.lock();
			Iterator<String> iter = datastreams.keySet().iterator();
			System.out.println("vvvvvvvvvvvvvvvvvvvvvvvv");
			while(iter.hasNext()){
				String feed = iter.next();
				BNode feedNode = feedNodes.get(feed);
				HashMap<String, Date> streamMap = datastreams.get(feed);
				Set<String> datastreams = streamMap.keySet();
				Iterator<String> streamIt = datastreams.iterator();
				while(streamIt.hasNext()){
					String stream = streamIt.next();
					Date lastDate = streamMap.get(stream);
					PachubeDatastream streamObj = phandler.getData(feed, stream);
					if(streamObj == null){
						System.out.println("WARNING: No Data returned for Feed " + feed + " , Stream " + stream);
						continue;
					}
					Date newDate = DatatypeConverter.parseDateTime(streamObj.getAt()).getTime();
					if(lastDate == null || newDate.after(lastDate)){
						System.out.println("New data for feed " + feed + " stream " + stream + ", adding");
						streamMap.put(stream, newDate);
						createEventStatements(feedNode, streamObj);
					}
					else{
						System.out.println("No new data for feed " + feed + " stream " + stream + ", skipping");
					}
				}
			}
			System.out.println("^^^^^^^^^^^^^^^^^^^^^^^^");
			lock.unlock();
			try {
				Thread.sleep(5000);
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
		}
	}

	public void addDatastream(String feedId, String streamId){
		lock.lock();
		if( (feedNodes.containsKey(feedId) && !datastreams.containsKey(feedId)) || (!feedNodes.containsKey(feedId) && datastreams.containsKey(feedId))){
			lock.unlock();
			throw new IllegalStateException("Feed must be either present in both or neither!");
		}
		
		if(phandler.getFeed(feedId) == null){
			System.out.println("Feed " + feedId + " doesnt exist!");
			lock.unlock();
			return;
		}
		if(phandler.getData(feedId, streamId) == null){
			System.out.println("Stream " + streamId + " for feed " + feedId + " doesnt exist!");
			lock.unlock();
			return;
		}
		
		if(!feedNodes.containsKey(feedId)){
			//No stream for this feed yet, add feed first!
			System.out.println("Add new Feed with id: " + feedId);
			BNode feedNode = createFeedStatements(feedId);
			if(feedNode != null){
				feedNodes.put(feedId, feedNode);
				HashMap<String, Date> map = new HashMap<String, Date>();
				map.put(streamId, null);
				datastreams.put(feedId, map);
			}
			else{
				lock.unlock();
				throw new IllegalStateException("Something went wrong while creating feed!");
			}
		}
		else{
			//Stream exists
			System.out.println("Add Stream to existing feed with id: " + feedId);
			datastreams.get(feedId).put(streamId, null);
		}
		
		lock.unlock();
	}
	
	public void init(){
		try {
			lock = new ReentrantLock();
			nStore = new NativeStore(new File(REPODIR));
			repository = new SailRepository(nStore);
			repository.initialize();
			phandler = new PachubeHandler("uOHj7Y0CB5rS9ESRCR26ea50InaSAKxjdzJjOTZyd1RWRT0g");
			
			feedNodes = new HashMap<String, BNode>();
			datastreams = new HashMap<String, HashMap<String, Date>>();

			addDatastream("9349", "0");
			addDatastream("46308", "Sensor01");
			addDatastream("46308", "Sensor02");
			
			System.out.println("init complete");
		} catch (RepositoryException e) {
			e.printStackTrace();
		}
	}
	
	public Value stringLit(String value){
		return new LiteralImpl(value, XSDSTRING);
	}
	
	public Value dateLit(String value){
		return new LiteralImpl(value, XSDDATETIME);
	}
	
	public Value doubleLit(String val){
		return new LiteralImpl(val, XSDDOUBLE);
	}
	
	
	
	public void serializeRepo(){
		lock.lock();
		FileOutputStream fout;
		try {
			File output = new File("outputPachube.n3");
			output.createNewFile();
			fout = new FileOutputStream(output);
			repository.getConnection().export(new N3Writer(fout), (Resource)null);
			fout.close();
			
		} catch (RepositoryException e) {
			e.printStackTrace();
		} catch (RDFHandlerException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		}
		finally{
			lock.unlock();
		}
	}
	
	public void serializeRepo(boolean toScreen){
		if(toScreen){
			System.out.println("===================THE REPO:");
			try {
				repository.getConnection().export(new N3Writer(System.out), (Resource)null);
			} catch (RepositoryException e) {
				e.printStackTrace();
			} catch (RDFHandlerException e) {
				e.printStackTrace();
			}
			return;
		}
		else{
			serializeRepo();
		}
	}
	
	public long getRepoSize(){
		try {
			RepositoryConnection con = repository.getConnection();
			long result = repository.getConnection().size();
			con.close();
			return result;
		} catch (RepositoryException e) {
			e.printStackTrace();
			return -999;
		}
		
	}
	
	private int getUniqueId(){
		return ++uniqueId;
	}


	public void shutDown() {
		lock.lock();
		keepRunning = false;
		try {
			LoadingThread t = new LoadingThread("Shutting down Repository");
			t.start();
			repository.shutDown();
			t.stopRun();
		} catch (Exception e) {
			e.printStackTrace();
		}
		finally{
			lock.unlock();
		}		
	}
}
