package kisti.lod.classification;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map.Entry;

import kisti.lod.classification.db.DBConnector;
import kisti.lod.classification.db.Domain;
import kisti.lod.classification.group.Group;
import kisti.lod.classification.group.Groups;

import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ResIterator;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.tdb.TDBFactory;
import com.j256.ormlite.dao.CloseableIterator;
import com.j256.ormlite.dao.Dao;
import com.j256.ormlite.dao.DaoManager;

/**
 * Creates an FCA Table as a comma separated list (might be very slow..)
 * 
 * @author Sebastian
 * 
 */
public class FCATableGrouped {

	protected static final String SEPARATOR = ",";
	protected static final String TEXT_QUALIFIER = "";

	//TODO: general model has to be created
//	protected static String generalModel = FileFactory.getTdbFolder();
	protected static String generalModel = "tdb";
	
	@SuppressWarnings("unchecked")
	public static void main(String[] args) throws IOException, SQLException {

		// TODO: maybe a vector should suffice for the predicates.
		// as soon as the structure is given, the uris arent needed any more
		LinkedHashMap<String, Integer> predicateMap = new LinkedHashMap<String, Integer>();
		LinkedHashMap<String, String> predicateList = new LinkedHashMap<String, String>();
		HashMap<String, LinkedHashMap<String, Integer>> fcaTable = new HashMap<String, LinkedHashMap<String, Integer>>();
		
		System.out.println("Generating Matrix...");

		StringBuffer header = new StringBuffer();
		
		Groups groups = new Groups();
		HashMap<String, Group> groupsMap = groups.getGroups();
		Iterator<String> it = groupsMap.keySet().iterator();
		while(it.hasNext()) {
			String groupName = it.next();
			Group group = groupsMap.get(groupName);
			
			predicateMap.put(groupName, 0);
			header.append(SEPARATOR + TEXT_QUALIFIER + groupName + TEXT_QUALIFIER);
			
			ArrayList<String> predicates = group.getPredicates();
			for(int i=0;i<predicates.size();i++) {
				predicateList.put(predicates.get(i), groupName);
			}
		}
		
//		File predFile = new File(FileFactory.getFcaTableFolder() + "/predicates.txt");
//		BufferedReader br = new BufferedReader(new FileReader(predFile));
//
//		String line;
//		while((line = br.readLine()) != null) {
//			predicateMap.put(line, 0);
//			header.append(SEPARATOR + TEXT_QUALIFIER + line + TEXT_QUALIFIER);
//		}
//		br.close();
		
		// iterate over domains and their respective triplestores
		Dao<Domain, String> domainDao = DaoManager.createDao(
				DBConnector.getConnectionSource(), Domain.class);
		CloseableIterator<Domain> domainIterator = domainDao.iterator();
		System.out.println("Iterating Domains..");
		while (domainIterator.hasNext()) {
			String domain = domainIterator.next().getName();
			if(!domain.equals("location")) {
				continue;
			}
			System.out.println("Iterating " + domain);
			Dataset ds = TDBFactory.createDataset(FileFactory.getTdbDomainFolder(domain));
			Model model = ds.getDefaultModel();
			
			File tableFile = FileFactory.getFcaTableFile(domain);
			BufferedWriter tableWriter = new BufferedWriter(new FileWriter(
					tableFile, true));
			tableWriter.append(header + "\n");
			// iterate over all subjects in the selected model
			ResIterator subjects = model.listSubjects();
			int n = 1;
			int writes = 0;
			while (subjects.hasNext()) {
				
				Resource subject = subjects.next();
				
				if(!subject.isAnon()) {
					StmtIterator predicates = subject.listProperties();
					LinkedHashMap<String, Integer> predicateVector = (LinkedHashMap<String, Integer>) predicateMap
							.clone();

					// System.out.println(subject.toString());
					// iterate over all predicates for each subject
					while (predicates.hasNext()) {
						String predicate = predicates.next().getPredicate().toString();
						if(predicateList.containsKey(predicate)) {
							predicateVector.put(predicateList.get(predicate), 1);
						}
					}
					
					fcaTable.put(subject.toString(), predicateVector);
				}
								
				// write to file and create new table after 10.000 subjects
				if (n >= 10000) {
					writeToFile(fcaTable, tableWriter);
					fcaTable = new HashMap<String, LinkedHashMap<String, Integer>>();
					writes++;
					System.out.println((writes*n) + " subjects processed...");
					n = 0;
				}
				n++;
			}

			writeToFile(fcaTable, tableWriter);
			tableWriter.close();
		}
	}

	/**
	 * Writes the contents of the Uri-Predicate-Map using the passed writer
	 * 
	 * @param table
	 * @param writer
	 * @throws IOException
	 */
	public static void writeToFile(
			HashMap<String, LinkedHashMap<String, Integer>> table,
			BufferedWriter writer) throws IOException {
		Iterator<Entry<String, LinkedHashMap<String, Integer>>> it = table
				.entrySet().iterator();
		StringBuffer sb = new StringBuffer();
		while (it.hasNext()) {
			Entry<String, LinkedHashMap<String, Integer>> entry = it.next();
			sb.append(TEXT_QUALIFIER + entry.getKey().replace(",", "") + TEXT_QUALIFIER);
			Iterator<Entry<String, Integer>> predIterator = entry.getValue()
					.entrySet().iterator();
			while (predIterator.hasNext()) {
				Entry<String, Integer> predEntry = predIterator.next();
				sb.append(SEPARATOR + predEntry.getValue());
			}
			sb.append("\n");
		}
		writer.append(sb.toString());
	}
}