package kisti.lod.classification;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map.Entry;

import kisti.lod.classification.db.DBConnector;
import kisti.lod.classification.db.Domain;

import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ResIterator;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.tdb.TDBFactory;
import com.j256.ormlite.dao.CloseableIterator;
import com.j256.ormlite.dao.Dao;
import com.j256.ormlite.dao.DaoManager;

/**
 * Creates an FCA Table as a comma separated list (might be very slow..)
 * 
 * @author Sebastian
 * 
 */
public class FCATable {

	protected static final String SEPARATOR = ",";
	protected static final String TEXT_QUALIFIER = "";

	//TODO: general model has to be created
	//protected static String generalModel = FileFactory.getTdbFolder();
	protected static String generalModel = "tdb";
	
	@SuppressWarnings("unchecked")
	public static void main(String[] args) throws IOException, SQLException {

		// TODO: maybe a vector should suffice for the predicates.
		// as soon as the structure is given, the uris arent needed any more
		LinkedHashMap<String, Integer> predicateMap = new LinkedHashMap<String, Integer>();
		HashMap<String, LinkedHashMap<String, Integer>> fcaTable = new HashMap<String, LinkedHashMap<String, Integer>>();
		
		System.out.println("Generating Matrix...");
		
		StringBuffer header = new StringBuffer();
		File predFile = new File(FileFactory.getFcaTableFolder() + "/predicates.txt");
		BufferedReader br = new BufferedReader(new FileReader(predFile));

		String line;
		while((line = br.readLine()) != null) {
			predicateMap.put(line, 0);
			header.append(SEPARATOR + TEXT_QUALIFIER + line.replace(",", "%2C") + TEXT_QUALIFIER);
		}
		br.close();
				
//		StmtIterator statements = generalModel.listStatements();
//		int i = 0;
//		int done = 0;
//		while (statements.hasNext()) {
//			Statement statement = statements.next();
//			String predicate = statement.getPredicate().toString();
//			if (predicateMap.get(predicate) == null) {
//				predicateMap.put(statement.getPredicate().toString(), 0);
//				header += statement.getPredicate().toString() + SEPARATOR;
//			}
//			i++;
//			if(i >= 10000000) {
//				done += 10;
//				System.out.println(done + "% done..");
//				i = 0;
//			}
//		}
//		header += "\n";

		// iterate over domains and their respective triplestores
		Dao<Domain, String> domainDao = DaoManager.createDao(
				DBConnector.getConnectionSource(), Domain.class);
		CloseableIterator<Domain> domainIterator = domainDao.iterator();
		System.out.println("Iterating Domains..");
		while (domainIterator.hasNext()) {
			String domain = domainIterator.next().getName();
			if(!domain.equals("location")) {
				continue;
			}
			System.out.println("Iterating " + domain);
			Dataset ds = TDBFactory.createDataset(FileFactory.getTdbDomainFolder(domain));
			Model model = ds.getDefaultModel();
			
			File tableFile = FileFactory.getFcaTableFile(domain);
			BufferedWriter tableWriter = new BufferedWriter(new FileWriter(
					tableFile, true));
			tableWriter.append(header + "\n");
			// iterate over all subjects in the selected model
			ResIterator subjects = model.listSubjects();
			int n = 1;
			int writes = 0;
			while (subjects.hasNext()) {
				Resource subject = subjects.next();
				
				if(!subject.isAnon()) {
					StmtIterator predicates = subject.listProperties();
					LinkedHashMap<String, Integer> predicateVector = (LinkedHashMap<String, Integer>) predicateMap
							.clone();

					// System.out.println(subject.toString());
					// iterate over all predicates for each subject
					while (predicates.hasNext()) {
						String predicate = predicates.next().getPredicate().toString();
						
						if(predicateMap.containsKey(predicate)) {
							predicateVector.put(predicate, 1);
						}
					}
					
					fcaTable.put(subject.toString(), predicateVector);
				}
								
				// write to file and create new table after 10.000 subjects
				if (n >= 100000) {
					writeToFile(fcaTable, tableWriter);
					fcaTable = new HashMap<String, LinkedHashMap<String, Integer>>();
					writes++;
					System.out.println((writes*n) + " subjects processed ...");
					n = 0;
				}
				n++;
			}

			writeToFile(fcaTable, tableWriter);
			tableWriter.close();
		}
	}

	/**
	 * Writes the contents of the Uri-Predicate-Map using the passed writer
	 * 
	 * @param table
	 * @param writer
	 * @throws IOException
	 */
	public static void writeToFile(
			HashMap<String, LinkedHashMap<String, Integer>> table,
			BufferedWriter writer) throws IOException {
		Iterator<Entry<String, LinkedHashMap<String, Integer>>> it = table
				.entrySet().iterator();
		StringBuffer sb = new StringBuffer();
		while (it.hasNext()) {
			Entry<String, LinkedHashMap<String, Integer>> entry = it.next();
			sb.append(TEXT_QUALIFIER + entry.getKey().replace(",", "%2C") + TEXT_QUALIFIER);
			Iterator<Entry<String, Integer>> predIterator = entry.getValue()
					.entrySet().iterator();
			while (predIterator.hasNext()) {
				Entry<String, Integer> predEntry = predIterator.next();
				sb.append(SEPARATOR + predEntry.getValue());
			}
			sb.append("\n");
		}
		writer.append(sb.toString());
	}
}