package xw4g08.voiD;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.zip.GZIPInputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;

import org.openrdf.model.BNode;
import org.openrdf.model.Literal;
import org.openrdf.model.Resource;
import org.openrdf.model.Statement;
import org.openrdf.model.URI;
import org.openrdf.model.Value;
import org.openrdf.model.ValueFactory;
import org.openrdf.model.impl.ValueFactoryImpl;
import org.openrdf.model.vocabulary.RDF;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFHandlerException;
import org.openrdf.rio.RDFParseException;
import org.openrdf.rio.RDFParser;
import org.openrdf.rio.RDFWriter;
import org.openrdf.rio.Rio;
import org.openrdf.rio.helpers.RDFHandlerBase;

import xw4g08.Configuration;


public class VoidGenerator extends RDFHandlerBase {
	
	private final Map<URI, Integer> typeCountMap = new HashMap<URI, Integer>();
	private final Map<URI,Long> predicates = new HashMap<URI,Long>();
	private final Map<URI,Set<Resource>> distSubject = new HashMap<URI,Set<Resource>>();
	private final Map<URI,Set<Value>> distObject = new HashMap<URI,Set<Value>>();
	
	private long tripleCount;
	private long entityCount;
	
	private ValueFactory vf = ValueFactoryImpl.getInstance();
	private Resource dataset;
	private String endpoint;
	
	private RDFWriter writer;
	
	private final Comparator<Value> VAL_COMP = new Comparator<Value>() {
		@Override public int compare(Value val1, Value val2) {
			return val1.stringValue().compareTo(val2.stringValue());
		}
	};
	
	// ------------------------------------------------------------------------
	public VoidGenerator(String filename) {
		endpoint = FileToEndpoint.getURI(filename);
		dataset = vf.createBNode();
		
		OutputStream os;
		try {
			os = new FileOutputStream(new File(Configuration.STA_OUT, filename+".ttl"));
			writer = new CompactBNodeTurtleWriter(os);
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		}
	}
	
	private void countType(URI type) {
		Integer count = typeCountMap.get(type);
		if (count == null) {
			typeCountMap.put(type, 1);
		} else {
			typeCountMap.put(type, 1 + count);
		}
	}

	
	private void writePredicateStatToVoid(URI predicate, long pCount, int distS, int distO) {
		BNode propPartition = vf.createBNode();
		Literal count = vf.createLiteral(String.valueOf(pCount));
		Literal distinctS  = vf.createLiteral(String.valueOf(distS));
		Literal distinctO  = vf.createLiteral(String.valueOf(distO));
		try {
			writer.handleStatement(vf.createStatement(dataset, vf.createURI(VOID.propertyPartition.toString()), propPartition));
			writer.handleStatement(vf.createStatement(propPartition, vf.createURI(VOID.property.toString()), predicate));
			writer.handleStatement(vf.createStatement(propPartition, vf.createURI(VOID.triples.toString()), count));
			writer.handleStatement(vf.createStatement(propPartition, vf.createURI(VOID.distinctSubjects.toString()), distinctS));
			writer.handleStatement(vf.createStatement(propPartition, vf.createURI(VOID.distinctObjects.toString()), distinctO));
		} catch (RDFHandlerException e) {
			e.printStackTrace();
		}
	}
	
	private void writeTypeStatToVoid(Value type, long tCount) {
		BNode classPartition = vf.createBNode();
		Literal count = vf.createLiteral(String.valueOf(tCount));
		try {
			writer.handleStatement(vf.createStatement(dataset, vf.createURI(VOID.classPartition.toString()), classPartition));
			writer.handleStatement(vf.createStatement(classPartition, vf.createURI(VOID.clazz.toString()), type));
			writer.handleStatement(vf.createStatement(classPartition, vf.createURI(VOID.entities.toString()), count));
		} catch (RDFHandlerException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
	
	private void writeGeneralStats() {
		try {
			writer.handleStatement(vf.createStatement(dataset, vf.createURI(VOID.triples.toString()), vf.createLiteral(String.valueOf(tripleCount))));
			writer.handleStatement(vf.createStatement(dataset, vf.createURI(VOID.properties.toString()), vf.createLiteral(String.valueOf(predicates.size()))));
			writer.handleStatement(vf.createStatement(dataset, vf.createURI(VOID.classes.toString()), vf.createLiteral(String.valueOf(typeCountMap.size()))));
			writer.handleStatement(vf.createStatement(dataset, vf.createURI(VOID.entities.toString()), vf.createLiteral(String.valueOf(entityCount))));
			Set<Resource> totalDisSub = new HashSet<Resource>();
			for(Set<Resource> sub:distSubject.values()) {
				totalDisSub.addAll(sub);
			}
			Set<Value> totalDisObj = new HashSet<Value>();
			for(Set<Value> obj:distObject.values()) {
				totalDisObj.addAll(obj);
			}
			writer.handleStatement(vf.createStatement(dataset, vf.createURI(VOID.distinctSubjects.toString()), vf.createLiteral(String.valueOf(totalDisSub.size()))));
			writer.handleStatement(vf.createStatement(dataset, vf.createURI(VOID.distinctObjects.toString()), vf.createLiteral(String.valueOf(totalDisObj.size()))));
		} catch (RDFHandlerException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
	
	// ------------------------------------------------------------------------
	
	@Override
	public void startRDF() throws RDFHandlerException {
		super.startRDF();
		
		writer.startRDF();
		
		// following namespaces which will be shortened automatically
		writer.handleNamespace("void", "http://rdfs.org/ns/void#");
		
		// general void information
		writer.handleStatement(vf.createStatement(dataset, RDF.TYPE, vf.createURI(VOID.Dataset.toString())));
		writer.handleStatement(vf.createStatement(dataset, vf.createURI(VOID.sparqlEndpoint.toString()), vf.createURI(endpoint)));
	}
	
	
	@Override
	public void handleStatement(Statement st) throws RDFHandlerException {
		
		tripleCount++;
		Resource subject = st.getSubject();
		URI predicate = st.getPredicate();
		Value object = st.getObject();
		//Initialise
		if(predicates.get(predicate) == null) {
			predicates.put(predicate, 0L);
			distSubject.put(predicate, new HashSet<Resource>());
			distObject.put(predicate, new HashSet<Value>());
		}
		
		//aggregate
		long pcount = predicates.get(predicate);
		pcount++;
		predicates.put(predicate, pcount);
		distSubject.get(predicate).add(subject);
		distObject.get(predicate).add(object);
		
		if(predicate.equals(RDF.TYPE)) {
			countType((URI) object);
			entityCount++;
		}
	}
	
	@Override
	public void endRDF() throws RDFHandlerException {
		super.endRDF();

		List<URI> predts = new ArrayList<URI>(predicates.keySet());
		Collections.sort(predts,VAL_COMP);
		for(URI predicate:predts) {
			writePredicateStatToVoid(predicate,predicates.get(predicate),
					distSubject.get(predicate).size(),distObject.get(predicate).size());
		}
		
		// write type statistics
		List<URI> types = new ArrayList<URI>(typeCountMap.keySet());
		Collections.sort(types, VAL_COMP);
		for (URI uri : types) {
			writeTypeStatToVoid(uri, typeCountMap.get(uri));
		}

		writeGeneralStats();
		
		writer.endRDF();
	}
	
	// ------------------------------------------------------------------------
	
	public static void main(String[] args) throws Exception{

		// check for file parameter
		if (args.length < 1) {
			String className = VoidGenerator.class.getName();
			System.err.println("USAGE: java " + className + " RDF.nt{.zip}");
			System.exit(1);
		}
		
		// process all files given as parameters
		for (String arg : args) {

			// check if file exists
			File file = new File(arg);
			if (!file.exists()) {
				System.err.println("file not found: " + file);
				System.exit(1);
			}

			// check if file is not a directory
			if (file.isDirectory()) {
				processDirectory(file);
			}else {
				processFile(file);
			}
			
			
		}
	}
	
	public static void processDirectory(File dir) throws IOException {
		File[] files = dir.listFiles();
		for(int i=0;i<files.length;i++) {
			if(files[i].isDirectory()) {
				processDirectory(files[i]);
			}
			else if(files[i].isFile()) {
				processFile(files[i]);
			}
		}
	}
	
	public static void processFile(File file) throws IOException {
		
		// check for gzip file
		if (file.getName().toLowerCase().contains(".gz")) {
			processInputStream(new GZIPInputStream(new FileInputStream(file)), file.getName());
		}
		
		// check for zip file
		else if (file.getName().toLowerCase().contains(".zip")) {
			ZipFile zf = new ZipFile(file);
			if (zf.size() > 1) {
				System.err.println("found multiple files in archive, processing only first one.");
			}
			ZipEntry entry = zf.entries().nextElement();
			if (entry.isDirectory()) {
				System.err.println("found directory instead of normal file in archive: " + entry.getName());
				System.exit(1);
			}
			
			processInputStream(zf.getInputStream(entry), entry.getName());
		} 
		
		// process data stream of file
		else {
			processInputStream(new FileInputStream(file), file.getName());
		}
	}
	
	public static void processInputStream(InputStream input, String filename) throws IOException {
		
		long start = System.currentTimeMillis();
		System.err.println("processing " + filename);
		
		// identify parser format
		RDFFormat format = Rio.getParserFormatForFileName(filename);
		if (format == null) {
			System.err.println("can not identify RDF format for: " + filename);
			System.exit(1);
		}
		// initalize parser
		VoidGenerator handler;
		handler = new VoidGenerator(filename);
		RDFParser parser = Rio.createParser(format);
//		parser.setVerifyData(false);
		parser.setStopAtFirstError(false);
		parser.setRDFHandler(handler);
		
		try {
			parser.parse(input, "");
		} catch (RDFParseException e) {
			System.err.println("encountered error while parsing " + filename + ": " + e.getMessage());
			System.exit(1);
		} catch (RDFHandlerException e) {
			System.err.println("encountered error while processing " + filename + ": " + e.getMessage());
			System.exit(1);
		}
		finally {
			input.close();
		}
		
		System.err.println((System.currentTimeMillis() - start)/1000 + " seconds elapsed");
	}

}
