/*
 * ClassifiedDataSet.java
 */
package org.osdtsystem.dataset;

import org.osdtsystem.hierarchy.HClass;
import org.osdtsystem.hierarchy.Hierarchy;
import org.osdtsystem.utils.Storage;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.osdtsystem.options.CoverOptions;

/**
 * Implementation of a classification.
 * @param <H> the type of the classes in the classification
 * @author Matthias Buch-Kromann and Martin Haulrich
 */
public class ClassificationEngine<H extends HClass> implements Classification<H> {
    // Storage and hierarchy
    private final Storage storage;
    private final Hierarchy<H> hierarchy;
    private final H rootClass;
    private final int rootClassDepth;

    // The classes in the hierarchy
    private final List<Map<H,ClassificationEntry<H>>> classes;
    private Dataset removedData = null;
    private Set<H> filteredClasses = null;

    // K-best list (partially sorted in increasing order: the last k entries are
    // guaranteed to contain the largest k entries in sorted order)
    private TreeSet<H> classesSorted;

    // Statistics
    private int maximalClassSize = 0;
    private double maximalClassWeight = 0;
    private int cumulativeClassSize = 0;
    private double cumulativeClassWeight = 0;
    private CoverOptions parameters;

    public ClassificationEngine(Storage storage, Hierarchy<H> hierarchy, H rootClass) {
        // Save parameters
        this.storage = storage;
        this.hierarchy = hierarchy;
        this.rootClass = rootClass;

        // Initialize variables
        this.rootClassDepth = rootClass.depth();
        this.classes = new ArrayList<Map<H,ClassificationEntry<H>>>();
        this.classesSorted = new TreeSet<H>(SIZECOMPARATOR);

        // Initialize parameters
        this.parameters = hierarchy.classificationParameters();
    }

    @Override
    public CoverOptions parameters() {
        return parameters;
    }

    @Override
    public boolean isMarkedForPruning(H hclass) {
        ClassificationEntry<H> entry = classificationEntry(hclass);
        return entry == null ? true : isMarkedForPruning(hclass, entry.dataset());
    }

    public boolean isMarkedForPruning(H hclass, Dataset dataset) {
        return isBelowDepthThreshold(hclass.depth() - rootClassDepth)
                || isBelowSizeThreshold(dataset.size());
    }

    @Override
    public boolean isBelowSizeThreshold(int size) {
        return
            size < parameters.classificationMinClassCount()
                || size < parameters.classificationMinRelativeClassSize() * maximalClassSize();
    }

    @Override
    public boolean isBelowDepthThreshold(int depth) {
        return depth > parameters.maximalClassDepth();
    }


    @Override
    public void pruneAll() {
        List<H> hclasses = new ArrayList<H>(classesSorted);
        for (H hclass : hclasses) {
            if (isMarkedForPruning(hclass))
                removeClass(hclass);
        }
    }


    @Override
    public Hierarchy<H> hierarchy() {
        return hierarchy;
    }

    @Override
    public H rootClass() {
        return rootClass;
    }

    @Override
    public int size() {
        return classesSorted.size();
    }

    ClassificationEntry<H> classificationEntry(H hclass) {
        // Find map for class
        int depth = classDepth(hclass);
        Map<H,ClassificationEntry<H>> map = classes.get(depth);
        return (map == null) ? null : map.get(hclass);
    }

    @Override
    public int classSize(H hclass) {
        ClassificationEntry<H> entry = classificationEntry(hclass);
        return entry == null ? 0 : entry.size();
    }

    @Override
    public int maximalClassSize() {
        return maximalClassSize;
    }

    @Override
    public int cumulativeClassSize() {
        return cumulativeClassSize;
    }

    @Override
    public double classWeight(H hclass) {
        ClassificationEntry<H> entry = classificationEntry(hclass);
        return entry == null ? 0 : entry.weight();
    }

    @Override
    public double maximalClassWeight() {
        return maximalClassWeight;
    }

    @Override
    public double cumulativeClassWeight() {
        return cumulativeClassWeight;
    }

    @Override
    public int classDepth(H hclass) {
        return hclass.depth() - rootClassDepth;
    }

    @Override
    public int maximalDepth() {
        return classes.size();
    }

    @Override
    public List<H> classes(int depth) {
        Map<H,ClassificationEntry<H>> map = classes.get(depth);
        if (map == null)
            return Collections.emptyList();
        return new ArrayList<H>(map.keySet());
    }

    @Override
    public Dataset dataset(H hclass) {
        // Find classification entry
        ClassificationEntry<H> entry = classificationEntry(hclass);
        if (entry == null)
            return null;

        // Make dataset up-to-date by filtering it with removed dataset
        if (removedData != null && ! filteredClasses.contains(hclass)) {
            Dataset newDataset = dataset(hclass).difference(removedData);
            changeDataset(hclass, newDataset);
        }

        // TODO: recompute dataset with intersection
        return entry == null ? null : entry.dataset();
    }

    @Override
    public void addClass(H hclass, Dataset dataset) {
        // Check whether class exists already
        if (classificationEntry(hclass) != null) {
            changeDataset(hclass, dataset);
            return;
        }

        // Do not add class to dataset if it falls below thresholds
        if (isBelowDepthThreshold(hclass.depth() - rootClassDepth)
                || isBelowSizeThreshold(dataset.size()))
            return;

        // Compute depth and create map at depth if missing
        int depth = classDepth(hclass);
        Map<H,ClassificationEntry<H>> map = classes.get(depth);
        if (map == null)
            classes.set(depth, map = new HashMap<H,ClassificationEntry<H>>());

        // Create new classification entry and store it in map
        ClassificationEntry<H> entry = new ClassificationEntry<H>(hclass, dataset);
        map.put(hclass, entry);

        // Update class size and weights
        cumulativeClassSize += entry.size();
        cumulativeClassWeight += entry.weight();

        // Add class to sorted list
        classesSorted.add(hclass);

        // Add class to filtered classes if relevant
        if (removedData != null)
            filteredClasses.add(hclass);
    }

    @Override
    public void changeDataset(H hclass, Dataset dataset) {
        // Remove class if it should be pruned with new dataset
        if (isMarkedForPruning(hclass, dataset)) {
            removeClass(hclass);
            return;
        }

        // Remove old dataset
        ClassificationEntry<H> entry = classificationEntry(hclass);
        Dataset oldDataset = entry.dataset();
        cumulativeClassSize -= oldDataset.size();
        cumulativeClassWeight -= oldDataset.weight();

        // Set new dataset
        classesSorted.remove(hclass);
        entry.setDataset(dataset);
        cumulativeClassSize += dataset.size();
        cumulativeClassWeight += dataset.weight();
        classesSorted.add(hclass);

        // Mark class as filtered if relevant
        if (removedData != null)
            filteredClasses.add(hclass);
    }


    @Override
    public void removeClass(H hclass) {
        // Find classification entry, return if it doesn't exist
        ClassificationEntry<H> entry = classificationEntry(hclass);
        if (entry == null)
            return;

        // Remove classification from map
        classes.get(classDepth(hclass)).remove(hclass);
        classesSorted.remove(hclass);

        // Update class sizes and weights
        cumulativeClassSize -= entry.size();
        cumulativeClassWeight -= entry.weight();
    }

    @Override
    public void removeData(Dataset dataset) {
        // Update removed dataset
        Dataset newRemovedData;
        if (removedData == null) {
            newRemovedData = removedData = dataset;
            filteredClasses = new HashSet<H>();
        } else {
            newRemovedData = removedData.union(dataset);
            removedData = dataset;
        }

        // Update all previously filtered classes
        List<H> filteredClassesCopy = new ArrayList<H>(filteredClasses);
        filteredClasses.clear();
        for (H hclass : filteredClassesCopy) {
            dataset(hclass);
        }

        // Set new dataset
        removedData = newRemovedData;
    }

    @Override
    public void filterAll() {
        for (Map<H,ClassificationEntry<H>> classesAtDepth : classes) {
            for (H hclass : classesAtDepth.keySet()) {
                dataset(hclass);
            }
        }
    }

    @Override
    public void setComparator(Comparator<? super H> comparator) {
        TreeSet<H> oldClassesSorted = classesSorted;
        classesSorted = new TreeSet<H>(comparator);
        classesSorted.addAll(oldClassesSorted);
    }

    @Override
    public Comparator<H> sizeComparator() {
        return SIZECOMPARATOR;
    }

    @Override
    public Comparator<H> weightComparator() {
        return WEIGHTCOMPARATOR;
    }

    private Comparator<H> SIZECOMPARATOR = new Comparator<H>() {
        @Override
        public int compare(H hclass1, H hclass2) {
            return classSize(hclass1) - classSize(hclass2);
        }
    };

    private Comparator<H> WEIGHTCOMPARATOR = new Comparator<H>() {
        @Override
        public int compare(H hclass1, H hclass2) {
            return Double.compare(classWeight(hclass1), classWeight(hclass2));
        }
    };

    @Override
    public List<H> kBestClasses(int k) {
        // Return current kbest list if no data have been removed
        if (removedData == null)
            return kbestWithoutFilteringUpdate(k);

        // TODO: It is somewhat inefficient to recompute the
        // kbest list in every iteration. We could perhaps do better by
        // carefully considering the 2k- or 3k-best list, and
        // building the k-best list from there, so that we do not have
        // to restart from position 1 every time.

        // Loop through sorted set until the first k entries are updated
        while (true) {
            // Do not update unless something changes
            boolean update = false;
            List<H> list = kbestWithoutFilteringUpdate(k);

            // Compute current kbest list and check whether the classes in it
            // have been filtered
            for (H hclass : list) {
                if (! filteredClasses.contains(hclass)) {
                    update = true;
                    dataset(hclass);
                }
            }

            // Return list if unchanged
            if (! update)
                return list;

        }
    }

    List<H> kbestWithoutFilteringUpdate(int k) {
        // Find first k entries in the sorted classes (without update)
        List<H> list = new ArrayList<H>(k);
        int count = 0;
        for (H hclass : classesSorted) {
            list.add(hclass);
            if (count == k)
                break;
        }
        return list;
    }

//    @Override
//    public Classification classify(View view, Dataset dataset, AffixHClass root, int maxDepth) {
//        // Create classification engine
//        String rootAffix = root.affix();
//        Classification classification = new ClassificationEngine(storage, this, root);
//        classification.setMaximalClassDepth(maxDepth);
//
//        Map<HClass, List<Integer>> classification = new HashMap<HClass, List<Integer>>();
//        Classification result =
//                new ClassificationEngine(storage, minCount, cutFraction);
//
//        // Classify data
//        for (Integer dataID : dataIDs) {
//            AffixHClass dataClass = dataClass(view, dataID);
//            if (contains(root, dataClass)) {
//                int offset = root.depth() + 1;
//                for (AffixHClass superClass : superclasses(dataClass, offset, offset + maxDepth)) {
//                    // Retrieve data list
//                    List<Integer> list = classification.get(superClass);
//                    if (list == null) {
//                        classification.put(superClass, list = new ArrayList<Integer>());
//                    }
//
//                    // Add data to list
//                    list.add(dataID);
//                }
//            }
//        }
//
//        // Copy classifications to classified data set
//        for (Entry<HClass, List<Integer>> e : classification.entrySet()) {
//            classification.addClassificationWithData(e.getKey(), e.getKey().depth() - root.depth(), e.getValue());
//        }
//
//        // Return result
//        return classification;
//    }

    @Override
    public void addDataPoint(H hclass, int identifier) {
        throw new UnsupportedOperationException("Not supported yet.");
    }

    @Override
    public void removeDataPoint(H hclass, int identifier) {
        throw new UnsupportedOperationException("Not supported yet.");
    }

    @Override
    public void trim() {
        throw new UnsupportedOperationException("Not supported yet.");
    }
}
