package org.nlp2rdf.navigator.benchmark;

import com.google.common.base.Joiner;
import com.google.common.collect.*;
import com.hp.hpl.jena.rdf.model.Resource;
import org.aksw.commons.util.Files;
import org.aksw.commons.util.SerializationUtils;
import org.aksw.commons.util.experiments.*;
import org.aksw.commons.semweb.sparql.core.CachingSparqlEndpoint;
import org.aksw.commons.semweb.sparql.core.HttpSparqlEndpoint;
import org.aksw.commons.semweb.sparql.core.ISparqlEndpoint;
import org.aksw.commons.util.collections.*;
import org.aksw.commons.util.experiments.Table;
import org.aksw.commons.util.experiments.impl.GNUPlotRowFormatter;
import org.aksw.commons.util.experiments.impl.LatexRowFormatter;
import org.aksw.commons.util.random.RandomUtils;
import org.apache.commons.lang.ObjectUtils;
import org.dllearner.core.EvaluatedDescription;
import org.nlp2rdf.navigator.refactored.IClassifier;
import org.nlp2rdf.navigator.component.learn.ILearn;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.aksw.commons.util.strings.StringUtils;

import java.io.File;
import java.io.IOException;
import java.net.URLEncoder;
import java.util.*;


/********
 *
 * THIS FILE IS SUBJECT TO REMOVAL!
 * HOWEVER, THERE ARE A FEW REFERENCES TO IT LEFT
 *
 */


/**
 * Created by Claus Stadler
 * Date: Oct 6, 2010
 * Time: 6:22:10 PM
 */
public class ClausBenchmark<E, D>
{
    private static final Logger logger = LoggerFactory.getLogger(ClausBenchmark.class);

    /**
     * Returns a string where the content of a collection is preceeded by its size
     *
     * @param collection
     * @return
     */
    public static <T> String toMyString(Collection<T> collection)
    {
        return toMyString(collection, 0);
    }    

    public static <T> String toMyString(Collection<T> collection, int limit)
    {
        List<String> list = new ArrayList<String>();
        for(T item : collection) {
            list.add(ObjectUtils.toString(item).replaceAll("http://dbpedia.org/resource/", ""));
        }

        Collections.sort(list);

        String str = "(" + list.size() + ")" + list;
        if(limit > 0) {
            str = StringUtils.cropString(str, limit, 0);
        }
        return str;
    }

    /**
     * Static init stuff (bootstrapping and such)
     *
     */
    static ApplicationContext context = new ClassPathXmlApplicationContext(new String[] {});


    static List<String> negativesFix = Arrays.asList(new String[] { "http://dbpedia.org/resource/Alfonso_Arau",
            "http://dbpedia.org/resource/Alfonso_Cuar%C3%B3n",
            "http://dbpedia.org/resource/Arnold_Schwarzenegger__Arnold_StrongArnie",
            "http://dbpedia.org/resource/Amitabh_Bachchan", "http://dbpedia.org/resource/Ardal_O%27Hanlon",
            "http://dbpedia.org/resource/Anthony_Hopkins", "http://dbpedia.org/resource/Bille_August",
            "http://dbpedia.org/resource/Bill_Macy", "http://dbpedia.org/resource/Bill_Mumy",
            "http://dbpedia.org/resource/Bill_Bixby" });


    public static String toString(Map<?, ?> map)
    {
        String result = "";
        for(Map.Entry<?, ?> entry : map.entrySet()) {
            result += "" + entry.getKey() + ": " + entry.getValue() + "\n";
        }
        return result;
    }

    public static void main(String[] args)
        throws Exception
    {
        //Pattern p = Pattern.compile("\\d+");

        //System.out.println(p.matcher("testasd").find());
        //findLearnableCategories();
        simulateNavigation();
    }


    /**
     *
     * @param category
     * @return
     * /
    public Sample<Resource> createSimulatedNavigationPool(ISparqlEndpoint sparqlEndpoint, Resource category, Set<Resource> seed)
    {
        Sample<Resource> pool = Sample.create();
        //pool.getPositives().addAll(allTyped.getPositives());

        Set<Resource> remaining = new HashSet<Resource>(allTyped.getPositives());
        int numPoolIterations = 0;
        while(!remaining.isEmpty() && (pool.getPositives().size() < 100 && pool.getNegatives().size() < 100)) {
            ++numPoolIterations;

            // FIXME May result in endless loop
            logger.debug("Constructing pool: remaining/+/- = " + Joiner.on("/").join(remaining.size(), pool.getPositives().size(), pool.getNegatives().size()));
            Resource navigationSeed = RandomUtils.randomItem(remaining, random);

            Sample<Resource> tmpPool = MySparqlTasks.getNavigationExamples(con.getSparqlEndpoint(), con.getGraphNames(), navigationSeed, category, 1);

            // Just to make sure the pool is actually correct
            Sample<Resource> tmpPool2 = Sample.create(
                    Sets.intersection(allTyped.getPositives(), tmpPool.getPositives()),
                    tmpPool.getNegatives());

            remaining.remove(navigationSeed);
            //remaining.removeAll(tmpPool.getPositives());
            pool.addAll(tmpPool2);

        }
        logger.info("poolIterations: " + numPoolIterations);

    }*/

    public static void simulateNavigation()
        throws Exception
    {
        // Actually here should be something like (BenchmarkConfig)context.get("benchmarkBean")
        BenchmarkContext con = new BenchmarkContext(context);
        String basePath = (String)context.getBean("basePath");
        int numPhases = (Integer)context.getBean("numPhases");
        logger.info("Max number of phases is " + numPhases);
        String experimentName = "Quick_Max3FoldsAndPhases_TenFoldCV_Navigation";

        Random random = new Random(0l);


        Multimap<Integer, Resource> sizeToCategory = MySparqlTasks.getCategoriesWithTypedMembersAndWithoutNumbers(con.getSparqlEndpoint(), con.getGraphNames());

        String experimentPath = basePath + "/" + experimentName;


        boolean entryPointFound = false;
        for(Map.Entry<Integer, Resource> entry : sizeToCategory.entries()) {

            Resource category = entry.getValue();
            try {

                if(entry.getKey() < 100) {
                    logger.debug("Skipping " + category);
                    continue;
                }

                // FIXME
                if(category.toString().equals("http://dbpedia.org/resource/Category:Albion_Rovers_F.C._players")) {
                    entryPointFound = true;
                }

                if(!entryPointFound) {
                    logger.info("Skipping " + category);
                    continue;
                }

                logger.info("Entering category " + category);


                String categoryPath = experimentPath + "/" + URLEncoder.encode(category.toString(), "UTF-8");


                Sample<Resource> all = Sample.create(
                        MySparqlTasks.getDBpediaCategoryMembers(con.getSparqlEndpoint(), con.getGraphNames(), category),
                        null);

                SerializationUtils.serializeXml(Sample.createCopy(all), new File(categoryPath + "/all.xml"), true);

                Sample<Resource> allTyped = Sample.create(
                        MySparqlTasks.getTypedDBpediaCategoryMembers(con.getSparqlEndpoint(), con.getGraphNames(), category),
                        null);

                SerializationUtils.serializeXml(Sample.createCopy(all), new File(categoryPath + "/allTyped.xml"), true);

                logger.info("Typed Members: " + SampleStats.toStringWithSize(allTyped.getPositives()));


                // Fixme: It seems as if there not all positives come back again
                // Try to retrieve enough seed negatives
                Sample<Resource> pool = Sample.create();
                //pool.getPositives().addAll(allTyped.getPositives());

                Set<Resource> remaining = new HashSet<Resource>(allTyped.getPositives());
                int numPoolIterations = 0;
                while(!remaining.isEmpty() && (pool.getPositives().size() < 100 && pool.getNegatives().size() < 100)) {
                    ++numPoolIterations;

                    // FIXME May result in endless loop
                    logger.debug("Constructing pool: remaining/+/- = " + Joiner.on("/").join(remaining.size(), pool.getPositives().size(), pool.getNegatives().size()));
                    Resource navigationSeed = RandomUtils.randomItem(remaining, random);

                    Sample<Resource> tmpPool = MySparqlTasks.getNavigationExamples(con.getSparqlEndpoint(), con.getGraphNames(), navigationSeed, category, 1);

                    // Just to make sure the pool is actually correct
                    Sample<Resource> tmpPool2 = Sample.create(
                            Sets.intersection(allTyped.getPositives(), tmpPool.getPositives()),
                            tmpPool.getNegatives());

                    remaining.remove(navigationSeed);
                    //remaining.removeAll(tmpPool.getPositives());
                    pool.addAll(tmpPool2);

                }
                logger.info("poolIterations: " + numPoolIterations);
                logger.info("poolPositives: " + SampleStats.toStringWithSize(pool.getPositives()));
                logger.info("poolNegatives: " + SampleStats.toStringWithSize(pool.getNegatives()));

                SerializationUtils.serializeXml(Sample.createCopy(pool), new File(categoryPath + "/navigationPool.xml"), true);


                ExperimentStatisticsCollector collector = new ExperimentStatisticsCollector(experimentName, numPhases);

                KFoldCollection<Resource> kFolds = KFoldCollection.create(10, pool, 0.5f, random);

                int foldId = 0;
                for(KFoldContext<Resource> kFold : kFolds) {
                    ++foldId;

                    if(foldId >= 3)
                        break;

                    logger.info("  Entering fold " + foldId);
                    logger.info("    testPositives:  " + SampleStats.toStringWithSize(kFold.getTestSample().getPositives()));
                    logger.info("    testNegatives:  " + SampleStats.toStringWithSize(kFold.getTestSample().getNegatives()));
                    logger.info("    foldPositives:  " + SampleStats.toStringWithSize(kFold.getTrainSample().getPositives()));
                    logger.info("    foldNegatives:  " + SampleStats.toStringWithSize(kFold.getTrainSample().getNegatives()));

                    String foldPath = categoryPath + foldId;

                    Sample<Resource> init = new Sample<Resource>(
                            RandomUtils.randomSampleSet(kFold.getTrainSample().getPositives(), 5, random),
                            RandomUtils.randomSampleSet(kFold.getTrainSample().getNegatives(), 5, random));

                    PhaseIterator phaseIterator = new PhaseIterator(allTyped, kFold.getTrainSample(), init, random, con.getLearner(), con.getClassifier());

                    for(int i = 0; i < numPhases; ++i) {
                        logger.info("    Entering phase " + (i + 1));

                        String phasePath = foldPath + "-" + i + "-";

                        logger.info("      trainPositives: " + SampleStats.toStringWithSize(phaseIterator.getNext().getPositives()));
                        logger.info("      trainNegatives: " + SampleStats.toStringWithSize(phaseIterator.getNext().getNegatives()));

                        PhaseContext<Resource, EvaluatedDescription> phaseResult = phaseIterator.next();

                        SampleStats<Resource> allStats = SampleStats.create(all, phaseResult.getExamples());
                        SampleStats<Resource> allTypedStats = SampleStats.create(allTyped, phaseResult.getExamples());

                        SampleStats<Resource> testAllStats = SampleStats.create(kFold.getTestSample(), phaseResult.getExamples(), all);
                        SampleStats<Resource> testAllTypedStats = SampleStats.create(kFold.getTestSample(), phaseResult.getExamples(), allTyped);


                        add(collector, i, "all", allStats);
                        add(collector, i, "allTyped", allTypedStats);
                        add(collector, i, "testAll", testAllStats);
                        add(collector, i, "typedAllTyped", testAllTypedStats);

                        logger.info("      learned:        " + phaseResult.getLearnedDescription().getDescription().toKBSyntaxString());
                        logger.info("      #instances      " + con.getClassifier().countInstances(phaseResult.getLearnedDescription()));
                        logger.info("      accuracy:       " + phaseResult.getLearnedDescription().getAccuracy());
                        logger.info("      all:            " + allStats);
                        logger.info("      allTyped:       " + allTypedStats);
                        logger.info("      testAll:        " + testAllStats);
                        logger.info("      typedAllTyped:  " + testAllTypedStats);

                        SerializationUtils.serializeXml(phaseResult, new File(phasePath + "phaseResult.xml"), true);

                        String label = "BAD";
                        if(allTypedStats.getFMeasure() >= 0.5 && phaseResult.getLearnedDescription().getAccuracy() > 0.99) {
                            label = "GOOD";
                        }

                        logger.info("      [" + label + "] " + category + " --- " + allTypedStats);



                        if(label.equals("GOOD")) {
                            foldId = 999;
                            break;
                        }


                    }

                    writeOutStatistics(categoryPath + "table", collector.getTable());
                }
            } catch(Exception e) {
                logger.error("Error while processing " + category, e);
            }
        }
    }


    public static void serializeCollection(File file, Collection<?> collection, boolean force)
                throws IOException
    {
        Set<String> tmp = new TreeSet<String>();
        for(Object item : collection) {
            tmp.add(StringUtils.coalesce(item, "").toString());
        }

        SerializationUtils.serializeXml(tmp, file, force);
    }


    public static void add(ExperimentStatisticsCollector collector, int phaseId, EvaluatedDescription description)
    {
        collector.getMonitor("learningAccuracy", phaseId, Static.Units.PERCENTAGE).add(description.getAccuracy());        
    }

    public static void add(ExperimentStatisticsCollector collector, int phaseId, String prefix, SampleStats<?> stats)
    {
        collector.getMonitor(prefix + "Precision", phaseId, Static.Units.PERCENTAGE).add(stats.getPrecision());
        collector.getMonitor(prefix + "Recall", phaseId, Static.Units.PERCENTAGE).add(stats.getRecall());
        collector.getMonitor(prefix + "FMeasure", phaseId, Static.Units.PERCENTAGE).add(stats.getFMeasure());
    }
    
    public static void findLearnableCategories()
        throws Exception
    {
        HttpSparqlEndpoint coreSparqlEndpoint = new HttpSparqlEndpoint("http://hanne.aksw.org:8892/sparql", "http://dbpedia.org");
        ISparqlEndpoint sparqlEndpoint = new CachingSparqlEndpoint(coreSparqlEndpoint, "/tmp/sparqlCache");
        Set<String> graphNames = Collections.singleton("http://dbpedia.org");

        ILearn learnerBean = (ILearn)context.getBean("dbpedia.learn");
        IClassifier classifierBean = (IClassifier)context.getBean("dbpedia.classifier");

        String benchmarkResultsBasePath = (String)context.getBean("basePath");
        Integer numPhases = (Integer)context.getBean("numPhases");

        NkeGenericLearner learnerCore = new NkeGenericLearner(learnerBean);
        NkeGenericClassifier classifierCore = new NkeGenericClassifier(classifierBean);

        IGenericLearner<Resource, EvaluatedDescription> learner = new WrapperGenericLearner<Resource, String, EvaluatedDescription>(learnerCore, new StringTransformer<Resource>());
        IGenericClassifier<Resource, EvaluatedDescription> classifier = new WrapperGenericClassifier<Resource, String, EvaluatedDescription>(classifierCore, new StringToResourceTransformer());

        Random random = new Random(0l);



        Multimap<Integer, Resource> sizeToCategory = MySparqlTasks.getCategoriesWithTypedMembersAndWithoutNumbers(sparqlEndpoint, graphNames);

        //System.out.println(toString(sizeToCategory.asMap()));


        //if(true)
            //return;

        //countToCategory.keySet().
        logger.info("Retrieving a large amount of resources serving as a pool for negative examples");
        Set<Resource> allNegativeCandidates = MySparqlTasks.getDBpediaSubjects(sparqlEndpoint, graphNames, 1000);

        for(Map.Entry<Integer, Resource> entry : sizeToCategory.entries()) {
            Resource category = entry.getValue();
            if(entry.getKey() < 105) {
                logger.info("Skipping " + category);
                continue;
            }




            //logger.info("Retrieving all members of category " + category);
            //Set<Resource> allPositives = MySparqlTasks.getSubjectsByPredicateObject(sparqlEndpoint, graphNames, Skos.subject, category);
            Set<Resource> allPositives = MySparqlTasks.getTypedDBpediaCategoryMembers(sparqlEndpoint, graphNames, category);

            Sample<Resource> pool = Sample.createCopy(allPositives, Sets.difference(allNegativeCandidates, allPositives));


            logger.info("Category: " + category);
            logger.info("Members: " + entry.getKey());
            int numPositivesPerFold = Math.round(allPositives.size() / 10.0f);

            logger.info("allPositivesSize = " + allPositives.size());
            logger.info("numPositivesPerFold = " + numPositivesPerFold);
            
            KFoldCollection<Resource> kFolds = new KFoldCollection<Resource>(10, 2 * numPositivesPerFold, pool, random);

            Sample<Resource> all = new Sample(allPositives, null);

            logger.info("-------------------------------------------------");
            logger.info("Processing category: " + category);

            int foldCount = 0;
            for(KFoldContext<Resource> kFold : kFolds) {
                ++foldCount;

                if(foldCount > 3)
                    break;

                Sample<Resource> init = new Sample<Resource>(
                        RandomUtils.randomSampleSet(kFold.getTrainSample().getPositives(), 5, random),
                        RandomUtils.randomSampleSet(kFold.getTrainSample().getNegatives(), 5, random));

                PhaseIterator phaseIterator = new PhaseIterator(all, kFold.getTrainSample(), init, random, learner, classifier);

                boolean success = false;

                for(int i = 0; i < 3; ++i) {
                    PhaseContext<Resource, EvaluatedDescription> phaseResult = phaseIterator.next();

                    Set<Resource> examples = phaseResult.getExamples();
                    Set<Resource> totalTruePositives = Sets.intersection(all.getPositives(), examples);
                    Set<Resource> totalFalsePositives = Sets.difference(examples, all.getPositives());


                    SampleStats<Resource> totalStats = SampleStats.create(all, examples);
                    logger.info("VALIDATE precision/recall/fMeasure: " + Joiner.on("/").join(SebsKFoldCrossValidationExperiment.humanReadable(totalStats.getPrecision()), SebsKFoldCrossValidationExperiment.humanReadable(totalStats.getRecall()), SebsKFoldCrossValidationExperiment.humanReadable(totalStats.getFMeasure())));

                    

                    double totalPrecision = totalTruePositives.size() / (double)examples.size();
                    double totalRecall =  totalTruePositives.size() / (double)all.getPositives().size();
                    double totalFMeasure = SebsKFoldCrossValidationExperiment.fMeasure(totalPrecision, totalRecall);

                    //if(totalFMeasure >= 0.5 && phaseResult.getLearnedDescription().getAccuracy() > 0.99) {
                        logger.info("Positives: " + ClausBenchmark.toMyString(phaseResult.getCurrent().getPositives()));
                        logger.info("Negatives: " + ClausBenchmark.toMyString(phaseResult.getCurrent().getNegatives()));

                        logger.info("Learned: " + phaseResult.getLearnedDescription().getDescription().toKBSyntaxString());
                        logger.info("Missed positives: " +  ClausBenchmark.toMyString(Sets.difference(all.getPositives(), totalTruePositives)));
                        logger.info("Missed test samples: " + ClausBenchmark.toMyString(Sets.difference(kFold.getTestSample().getPositives(), totalTruePositives)));

                        String stats = "Total precision/recall/fMeasure: " + Joiner.on("/").join(SebsKFoldCrossValidationExperiment.humanReadable(totalPrecision), SebsKFoldCrossValidationExperiment.humanReadable(totalRecall), SebsKFoldCrossValidationExperiment.humanReadable(totalFMeasure));
                        logger.info(stats);

                    //}

                    if(totalFMeasure >= 0.5 && phaseResult.getLearnedDescription().getAccuracy() > 0.99) {
                        logger.info("GOOD CANDIDATE: " + category + " --- " + stats);
                        success = true;
                        break;
                    }

                    //phaseResult.get
                }
                if(success)
                    break;

            }

            String experimentName = "FindCategoriesSuitableForLearning";

            //SebsKFoldCrossValidationExperiment<Resource, EvaluatedDescription> experiment =
                    //SebsKFoldCrossValidationExperiment.create(experimentName, all, kFolds, numPhases, random, learner, classifier);

            

        }

        //public static Table create


        /*

        System.out.println(toString(sizeToCategory.asMap()));


        long total = 0;
        for(Integer i : sizeToCategory.keySet()) {
            total +=i;
        }
        System.out.println("total: " + total);
        */
    }


    public static void writeOutStatistics(String basePath, String experimentName, String runName, Table table)
        throws Exception
    {
        String baseName = basePath + "/" + experimentName + "/" + runName;
        writeOutStatistics(baseName, table);
    }

    public static void writeOutStatistics(String basePath, Table table)
            throws Exception
    {
        new File(basePath).getParentFile().mkdirs();

        // Serialize object
        File outFile = new File(basePath + ".xml");
        SerializationUtils.serializeXml(table, outFile, true);

        // Serialize latex
        TableFormatter latexTableFormatter = new LatexRowFormatter();
        String tableStr = latexTableFormatter.format(table);
        outFile = new File(basePath + ".latex");
        Files.writeToFile(outFile, tableStr, false);

        // Serialize gnu plu
        TableFormatter gnuplotTableFormatter = new GNUPlotRowFormatter();
        tableStr = gnuplotTableFormatter.format(table);
        outFile = new File(basePath + ".gnuplot");
        Files.writeToFile(outFile, tableStr, false);
    }


    public static void myBenchmark()
            throws Exception
    {
        HttpSparqlEndpoint coreSparqlEndpoint = new HttpSparqlEndpoint("http://hanne.aksw.org:8892/sparql", "http://dbpedia.org");
        ISparqlEndpoint sparqlEndpoint = new CachingSparqlEndpoint(coreSparqlEndpoint, "/tmp/sparqlCache");
        Set<String> graphNames = Collections.singleton("http://dbpedia.org");

        ILearn learnerBean = (ILearn)context.getBean("dbpedia.learn");
        IClassifier classifierBean = (IClassifier)context.getBean("dbpedia.classifier");

        String benchmarkResultsBasePath = (String)context.getBean("basePath");
        Integer numPhases = (Integer)context.getBean("numPhases");

        NkeGenericLearner learnerCore = new NkeGenericLearner(learnerBean);
        NkeGenericClassifier classifierCore = new NkeGenericClassifier(classifierBean);

        IGenericLearner<Resource, EvaluatedDescription> learner = new WrapperGenericLearner<Resource, String, EvaluatedDescription>(learnerCore, new StringTransformer<Resource>());
        IGenericClassifier<Resource, EvaluatedDescription> classifier = new WrapperGenericClassifier<Resource, String, EvaluatedDescription>(classifierCore, new StringToResourceTransformer());

        Random random = new Random(0l);

        //ClausBenchmark<Resource, EvaluatedDescription> benchmark = new ClausBenchmark<Resource, EvaluatedDescription>(sparqlEndpoint, graphNames, learner, classifier, random);



        logger.info("Caching all categories");
        //Multimap<Integer, Resource> sizeToCategory = MySparqlTasks.getCategories(sparqlEndpoint, graphNames);
        Multimap<Integer, Resource> sizeToCategory = MySparqlTasks.getCategoriesContainingDBpediaInstances(sparqlEndpoint, graphNames);


        Collection<Resource> categories = sizeToCategory.get(100);
        System.out.println("Category pool: " + categories.size() + " " + categories);

        // Retrieve FULL type hierarchy (so it gets cached once and for all)
        logger.info("Retrieving a large amount of resources serving as a pool for negative examples");
        Set<Resource> allNegativeCandidates = MySparqlTasks.getDBpediaSubjects(sparqlEndpoint, graphNames, 1000);
        logger.info("Retrieved " + allNegativeCandidates.size() + " negative Candidates");

        //logger.info("Picking 1000 negative candidates at random");
        //allNegativeCandidates = RandomUtils.randomSampleSet(allNegativeCandidates, 1000, random);



        /*
        allNegativeCandidates =
                new HashSet<Resource>(
                new TransformCollection<String, Resource>(negativesFix, new StringToResourceTransformer()));
        */



        //categories = RandomUtils.randomSampleSet(categories, 1, random);

        //Collection<Resource> categories = Collections.singleton(ResourceFactory.createResource("http://dbpedia.org/resource/Category:Television_networks"));

        logger.info("Processing " + categories.size() + " categories");
        for(Resource category : categories) {
            String categoryLabel = category.toString().substring(category.toString().lastIndexOf("/"));

            //if(!category.toString().equals("http://dbpedia.org/resource/Category:1975_novels"))
                    //continue;


            // Retrieve all members of this category
            logger.info("Retrieving all members of category " + category);
            //Set<Resource> allPositives = MySparqlTasks.getSubjectsByPredicateObject(sparqlEndpoint, graphNames, Skos.subject, category);
            Set<Resource> allPositives = MySparqlTasks.getTypedDBpediaCategoryMembers(sparqlEndpoint, graphNames, category);


            //Set<Resource> negativeCandidates = new HashSet<Resource>(Sets.difference(allNegativeCandidates, allPositives));

            Sample<Resource> pool = Sample.createCopy(allPositives, Sets.difference(allNegativeCandidates, allPositives));


            // All negatives set to null, as we don't know them
            Sample<Resource> all = new Sample(allPositives, null);


            //Sample<Resource> all = createSample(subjects, instance);
            logger.info("All: " + pool.getPositives().size() + "/" + pool.getNegatives().size());
            //logger.info("Manually counted All: " + count(all.getPositives()) + "/" + count(all.getNegatives()));

            /*
            assert Sets.intersection(all.getPositives(), all.getNegatives()).isEmpty();
            assert Sets.union(all.getPositives(), all.getNegatives()).containsAll(subjects);
            assert subjects.size() == all.getPositives().size() + all.getNegatives().size();
*/

            //benchmark.performKFoldCrossValidation(10, 20, all, candidate, random, callback);


            //KFoldCrossValidationExperiment<Resource, EvaluatedDescription> experiment =
            //       KFoldCrossValidationExperiment.create()

            //ExperimentResult result = callback.getExperimentResult();
            //List<Sample<Resource>> kFolds = ClausBenchmark.createStratifiedKFolds(10, 20, all.getPositives(), candidate.getNegatives(), random);
            try {
                KFoldCollection<Resource> kFolds = new KFoldCollection<Resource>(10, 20, pool, random);


                String experimentName = "TenFold_TypedInstances_";

                SebsKFoldCrossValidationExperiment<Resource, EvaluatedDescription> experiment =
                        SebsKFoldCrossValidationExperiment.create(experimentName, all, kFolds, numPhases, random, learner, classifier);

                experiment.run();

                Table table = experiment.getResult();

                String basePath = benchmarkResultsBasePath;
                String baseName = basePath + "/" + experimentName + "/" + URLEncoder.encode(categoryLabel, "UTF-8");


                new File(baseName).getParentFile().mkdirs();

                // Serialize object
                File outFile = new File(baseName + ".xml");
                //SerializationUtils.serializeXML(table, outFile);

                // Serialize latex
                TableFormatter latexTableFormatter = new LatexRowFormatter();
                String tableStr = latexTableFormatter.format(table);
                outFile = new File(baseName + ".latex");
                Files.writeToFile(outFile, tableStr, false);

                // Serialize gnu plu
                TableFormatter gnuplotTableFormatter = new GNUPlotRowFormatter();
                tableStr = gnuplotTableFormatter.format(table);
                outFile = new File(baseName + ".gnuplot");
                Files.writeToFile(outFile, tableStr, false);



            }
            catch(Throwable t) {
                logger.error("Error while processing category " + category, t);
            }
        }

        //logger.info("Starting benchmark");
        //benchmark.run();
    }

/*
    private ISparqlEndpoint sparqlEndpoint;
    private Set<String> graphNames;
    //private ILearningClassifier<T> learningClassifier;
    private IGenericLearner<E, D> learner;
    private IGenericClassifier<E, D> classifier;

    private Random random;


    public ClausBenchmark(ISparqlEndpoint sparqlEndpoint, Set<String> graphNames, IGenericLearner<E, D> learner, IGenericClassifier<E, D> classifier, Random random)
    {
        this.sparqlEndpoint = sparqlEndpoint;
        this.graphNames = graphNames;
        this.learner = learner;
        this.classifier = classifier;
        this.random = random;
    }



    public static String toString(Map<?, ?> map)
    {
        String result = "";
        for(Map.Entry<?, ?> entry : map.entrySet()) {
            result += "" + entry.getKey() + ": " + entry.getValue() + "\n";
        }
        return result;
    }
*/



}
