package OrderedCoverPackage;

import ClassAlgebra.Expression;
import ClassAlgebra.SimpleExpression;
import ClassifiedDataSetPackage.ClassifiedDataSet;
import DataSetPackage.View;
import EstimatorPackage.EstimatorExp;
import HierarchyPackage.HClass;
import HierarchyPackage.ProductHierarchy.ProductHierarchy;
import OptionsManager.DiscountOptions;
import OptionsManager.TrainOptions;

import org.osdtsystem.utils.TroveDoubleList;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;

/**
 *
 * @author Martin Haulrich
 */
public class BaseHPM extends AbstractTrainOC {

    /**
     * Counter used for asigning unique IDs to partitions.
     */
    private int nextID;

    /**
     * Flag to tell if the structure of the cover has changed since last calculation of proabalities
     */
    private boolean structureChanged;

    /**
     * Flag to tell if the number of data has changed since last calculation of dataSize
     */
    private boolean datasizeChanged;

    /**
     * The current number of datapoints in the cover.
     * This is not necessarily updated so one should always call dataSize() instead of using this variable.
     */
    private int dataSize;

    /**
     * The log-likelihood of all the points in the cover.
     * This is not necessarily updated and should not be used directly.
     */
    private double logLikelihood;

    /**
     * The log-likelihood of all the points in the cover.
     * This is not necessarily updated and should not be used directly. Use getBumpPostMDL() instead
     */
    private double bumpPostWeigth;

    /**
     * The trTop-partition of the cover.
     */
    protected OCPartitionTrain trTop;

    /**
     * Used during training. If this is set we are currently checking a partitioning that will
     * make the current partition (in method partition) a ghost-partition. This will decrease the
     * sizde of the current cover with one.
     */
    protected boolean tempGhost;

    DiscountOptions discountOptions;

    /**
     * Constructor for the ordered cover that is to be trained.
     * No training will be done until 'train' has been called.
     * @param xphm True if this is a XHPM-cover, false if it is a HPM-cover.
     * @param hierarchy Hierarchy to base the OC on.
     * @param prior Estimator for prior-probabilities.
     * @param minCount Minimun number of datapoints allowed in each partition.
     *
     */
    public BaseHPM(ProductHierarchy hier, EstimatorExp prior, DiscountOptions discountOptions) {
        super(hier, prior);
        this.discountOptions = discountOptions;
        partitionList = new LinkedList<OCPartition>();
        trTop = new OCPartitionTrain(hier.getTop(), 0);
        top = trTop;
        trTop.restDataPoints = new HashSet<Integer>();
        trTop.restLogEmpUTD = false;
        trTop.restLogLikeUTD = false;
        trTop.restLogPriorUTD = false;
        structureChanged = true;
        datasizeChanged = true;
        partitionList.add(trTop);

        nextID = 1;

        tempGhost = false;
    }

    /**
     * Train the cover with the given datapoints.
     * @param dids Data to train the cover with.
     * @param view View on the datapoints.
     * @param depth Search depth used in training.
     */
    public void train(Collection<Integer> dids, View view, TrainOptions options) {

        dataSize = dids.size();
        datasizeChanged = false;

        Expression e = new SimpleExpression(trTop.mainClass);
        trTop.restLogPriorProbDenom = prior.getEstimate(e, hierarchy);
        trTop.restLogPriorUTD = true;

        trTop.restLogEmpProb = Math.log10(1);
        trTop.restLogEmpUTD = true;

        trTop.resLogPseuEmpProb = trTop.restLogEmpProb;
        trTop.restLogPseuEmpUTD = true;

        trTop.inh = 0;
        trTop.inhUTD = true;

        trTop.back = 0;
        trTop.backUTD = true;

        trTop.ptree = prior.getEstimate(trTop.mainExp, hierarchy);

        trTop.resLogPseuEmpProb = trTop.restLogEmpProb;
        trTop.restLogPseuEmpUTD = true;

        trTop.setTotalWeight(trTop.restLogEmpProb - trTop.restLogPriorProbDenom);

        // Put all datapoints in the trTop-partition
        trTop.restDataPoints.addAll(dids);

        // Create a list for probabilities for all the data points
        System.err.println("Calculating priorProbs...");

//        // Java-collection solution
//        priorDataPointProbs = new ArrayList<Double>(Collections.<Double>nCopies(dids.size(), Double.NaN));

        // OSDT-Trove collection solution. More memory-effiecient.
        double[] doubleArray = new double[dids.size()];
        Arrays.fill(doubleArray, Double.NaN);
        priorDataPointProbs = new TroveDoubleList(doubleArray);

        // Get and save all probabilites for points in dataset (trTop-partition)
        for (Integer did : trTop.restDataPoints) {
            priorDataPointProbs.set(did, prior.getEstimate(did, view));
        }

        // Start partitioning of trTop
        // The partition method is a recursive left-first-depth partitioning method. Therefore the result
        // of the following call is a cover where all the partitionings that results in improvements are made.
        //  partition(trTop, dids, view, depth);
        partition(trTop, view, options, true);

        // In all but the first iteration we only repartition the partitions that have changed
        // during the sanitize-run. This list keeps track of those.
        LinkedHashSet<OCPartition> changed = new LinkedHashSet<OCPartition>();

        int i = 1;
        for (i = 1; i < options.getIntOption("rounds"); i++) {

            sanitize(trTop, 0, changed, options.getIntOption("mincount"), view);
            structureChanged = true;

            adjustWeights(trTop, 0);

            System.err.println("-------------------------------------");
            System.err.println("    " + (i) + " round finished");
            System.err.println("    Score: " + getBumpPostMDL(view));
            System.err.println("-------------------------------------");
            Collection<OCPartition> toTest;
            if (i == 0) {
                List<OCPartition> copy = new ArrayList<OCPartition>(partitionList());
                toTest = copy;
            } else {
                toTest = changed;
            }

            for (OCPartition p : toTest) {

                if (!p.ghost) {
                    partition((OCPartitionTrain) p, view, options, false);
                }
            }
            System.err.println("Score after partition: " + getBumpPostMDL(view));
            changed.clear();
        }

        sanitize(trTop, 0, changed, options.getIntOption("mincount"), view);
        structureChanged = true;

        adjustWeights(trTop, 0);

        System.err.println("-------------------------------------");
        System.err.println("    " + (i) + " round finished");
        System.err.println("    Score: " + getBumpPostMDL(view));
        System.err.println("-------------------------------------");
    }

    /**
     * Sanitize the cover. This means ghosting all partitions that are either degenerate because their
     * weigth is lower that that of their parent or because the have less that mincount datapoints
     * in them.
     * The sanitizing is done recursively bottom-up, left-first
     * @param current The partition to consider santizing.
     * @param backPar The back-off mass of the parent partition
     * @param changed A list of partitions that are merged
     * @return The data point from the partition if it has been ghosted.
     *
     */
    private Collection<Integer> sanitize(OCPartitionTrain current, double backPar,
            LinkedHashSet<OCPartition> changed, int minCount, View view) {

        // Update scores for this partition
        update(current, backPar);

        Set<Integer> myUpData = new HashSet<Integer>();

        List<OCPartition> children = getChildren(current);

        int size = children.size();
System.err.println("CP: 1");
        for (int i = 0; i < size; i++) {
            OCPartitionTrain cht = (OCPartitionTrain) children.get(i);
            Collection<Integer> upData = sanitize(cht, current.back, changed, minCount, view);
            System.err.println("CP: 2");
            if (!upData.isEmpty()) {
                if (current.ghost) {
                    System.err.println("I am also ghost!!");
                    myUpData.addAll(upData);
                } else {
                    System.err.println("Adding: " + upData);
                    //current.restDataPoints.addAll(upData);


                    int indexOfChild = children.indexOf(cht);
                    if (indexOfChild == children.size() - 1) {
                        current.restDataPoints.addAll(upData);
                    } else {
                        distributeRec(current, children.subList(indexOfChild + 1, children.size()),
                                upData, view, changed);
                    }
                    System.err.println("CP: A");
                }
                System.err.println("CP: 3");

                // If subpartition has been ghosted update my scores
                update(current, backPar);
            }
            System.err.println("CP: 4");
        }
        System.err.println("CP: B");

        // Top cannot be ghosted
        if (current.equals(trTop)) {
            return Collections.<Integer>emptyList();
        }


        if (!current.ghost) {

            System.err.println("CP: C");

            OCPartitionTrain parent = findNoGhostParent(current);
            if (getLogWeight(current) < getLogWeight(parent) ||
                    current.restDataPoints.size() < minCount) {
                String op;

                    System.err.println("CP: D");
                /**
                 * @TODO For now we ghost everything - change this so that partitions with no children are insted deleted.
                 */
                if (getChildren(current).isEmpty()) {
                    parent.children.remove(current);
                    // current = null;
                    current.ghost = true;
                    op = "Merging";

                } else {
                    current.ghost = true;
                    op = "Ghosting";

                }

                System.err.print(op + ": " + current.mainClass.getName());
                System.err.print("(" + current.restDataPoints.size() + ") into ");
                System.err.println(parent.mainClass.getName());
                List<Integer> upData = new ArrayList<Integer>(current.restDataPoints);
                current.restDataPoints.clear();
                return upData;
            }
        }
        System.err.println("CP: E");
        return myUpData;

    }

    /**
     * Update scores such as weight, backoff mas, inh mass and so on for partition.
     * @param current Partition to update.
     * @param backPar The back-off mass of the parent partition.
     *
     */
    private void update(OCPartitionTrain current, double backPar) {

        updateWeigth(current, backPar);

        current.restLogEmpProb = Math.log10(current.restDataPoints.size() / dataSize());
        current.restLogEmpUTD = true;

        current.restPriorLogLikelihood = priorLogLik(current.restDataPoints);
        current.restPriorLogLikeUTD = true;

        double weight = current.resLogPseuEmpProb - current.restLogPriorProbDenom;
        double nWeight = current.restDataPoints.size() * weight;

        current.restLogLikelihood = current.restPriorLogLikelihood + nWeight;
        current.restLogLikeUTD = true;
    }

    /**
     * Get discounting for a partition.
     * If the partition is a ghost this is the sum off the discount of its children.
     * @param p Partition to get discount for.
     * @return Discount-mass for the partition.
     */
    private double getDisc(OCPartitionTrain p) {
        double myDisc = 0;
        if (!p.ghost) {

            switch (p.restDataPoints.size()) {
                case 1:
                    myDisc = discountOptions.getDoubleOption("d1");
                    break;
                case 2:
                    myDisc = discountOptions.getDoubleOption("d2");
                    ;
                    break;
                default:
                    myDisc = discountOptions.getDoubleOption("d3+");
                    ;
            }
        } else {
            for (OCPartition ch : getChildren(p)) {
                myDisc = myDisc + getDisc((OCPartitionTrain) ch);
            }
        }
        return myDisc;
    }

    /**
     * Find the nearest transitive parent that is not a ghost.
     * @param current Partition to find no-ghost parent to.
     * @return Nearest non-ghost parent.
     */
    private OCPartitionTrain findNoGhostParent(OCPartitionTrain current) {
        OCPartitionTrain parent = (OCPartitionTrain) getParent(current);
        if (parent.ghost) {
            return findNoGhostParent(parent);
        }
        return parent;
    }

    private void updateWeigth(OCPartitionTrain current, double backPar) {
        if (!current.ghost) {

            PartitionExpressionTuple pet = getPartitionExpression(current);

            current.restLogPriorProbDenom = prior.getEstimate(pet.partitionExpression, hierarchy);
            current.restLogPriorUTD = true;
            current.ptree = prior.getEstimate(pet.partitionTreeExpression, hierarchy);


            double inh;
            if (current.equals(trTop)) {
                inh = 0;
            } else {
                OCPartitionTrain par = findNoGhostParent(current);
                double f = Math.pow(10, current.ptree - par.ptree);
                inh = backPar * f;
            }
            current.inh = inh;

            double myDisc;
            switch (current.restDataPoints.size()) {
                case 1:
                    myDisc = discountOptions.getDoubleOption("d1");
                    break;
                case 2:
                    myDisc = discountOptions.getDoubleOption("d2");
                    break;
                default:
                    myDisc = discountOptions.getDoubleOption("d3+");
            }
            if (current.equals(trTop)) {
                myDisc = 0;
            }

            double chDisc = 0;

            for (OCPartition ch : getChildren(current)) {

                OCPartitionTrain chT = (OCPartitionTrain) ch;
                double chD = getDisc(chT);
                chDisc = chDisc + chD;
            }

            double back = inh - myDisc + chDisc;
            current.back = back;


            double pseuCount = Math.log10(current.restDataPoints.size() + back * Math.pow(10, current.restLogPriorProbDenom - current.ptree));
            double restLogPseuEmp = pseuCount - Math.log10(dataSize());
            current.resLogPseuEmpProb = restLogPseuEmp;
            current.restLogPseuEmpUTD = true;
        } else {
            current.back = backPar;
        }

    }

    /**
     * Adjust weigth of partition and its subpartition.
     * During the partition phase the weight are not really kept up-to-date.
     * This method is used to run through the partitions and update the weights.
     * @param current The partition that is currently beeing updated.
     * @param backPar The kack-off mass of the parent partition.
     *
     */
    private void adjustWeights(OCPartitionTrain current, double backPar) {

        updateWeigth(current, backPar);

        for (OCPartition ch : getChildren(current)) {
            adjustWeights((OCPartitionTrain) ch, current.back);
        }
    }

    private Collection<Integer> distributeRec(OCPartition current, List<OCPartition> children,
            Collection<Integer> toDist, View view, LinkedHashSet<OCPartition> changed) {

        for (OCPartition child : children) {

            Set<Integer> classifiedByCurrentChild = new HashSet<Integer>();

            // Check to see which points are classified under the current child-partition.
            for (Integer did : toDist) {
                if (hierarchy.isClassifiedBy(did, view, child.mainClass)) {
                    classifiedByCurrentChild.add(did);
                }
            }

            // Move points to the child-partition.
            Collection<Integer> notDist = distributeRec(child, getChildren(child),
                    classifiedByCurrentChild, view, changed);

            classifiedByCurrentChild.removeAll(notDist);
            // And remove them from topData so they are not placed in current partition as well.
            toDist.removeAll(classifiedByCurrentChild);
        }

        // All of the myData-points that could not be distributed to subpartition are
        // placed in the current-partition and removed from the mergedData-set

        if (current.ghost) {
            return toDist;
        }

        if (!toDist.isEmpty()) {
            ((OCPartitionTrain) current).restDataPoints.addAll(toDist);
            changed.add(current);
        }
        return Collections.<Integer>emptyList();
    }

    /**
     * Partition the partitio recursicely.
     * The method finds the best partitioning and the applis this. It then calls itselv recursively
     * with the newly added partition. This leads to a depth-first left-first partitioning of the cover.
     * Note, that after the method has been used the weight of the partitions are not necessarily correct
     * so the adjust weights method should be called.
     * Also the cover can be degenerate.
     * @param current Partition to try and split into smallet ones.
     * @param view View on data in cover.
     * @param depth Maximum search depth.
     * @param cutFraction Cut fraction.
     * @param minCountGhost Should partitions with below mincount datapoints be ghosted
     * @return If the current partition is ghosted return the datapoints from it.
     */
    private Set<Integer> partition(OCPartitionTrain current, View view,
            TrainOptions options, boolean minCountGhost) {

        int minCount = options.getIntOption("mincount");

        // If the current partition is made ghost any datapoints in it, should be put in the
        // parent partition of this partition. These datapoints are saved in upPoints
        // and returned from the method.
        Set<Integer> upPoints = new HashSet<Integer>();

        // Classify the data in this partition
        System.err.println("Classifying data...");
        ClassifiedDataSet cd = hierarchy.getClassifications(current.restDataPoints, view, current.mainClass,
                options.getIntOption("depth"), options.getIntOption("mincount"), options.getIntOption("cutfraction"), true);

        // We use an SortedClassifications-object to keep track on which classifications remain
        // and how many datapoints remain in them.
        System.err.println("Creating sorted-classifications...");
        // SortedClassifications sClasses = new SortedClassificationsBasic(cd, current.restDataPoints, minCount);
        SC sClasses = new SC(cd, minCount);

        // The current class is included in the classification. Remove this while it should not be added to itself
        sClasses.removeTrue(current.mainClass);
        Boolean done = false;

        while (!sClasses.isEmpty() && !done) {

//            // How much data is left in the current partition if this partition is added
//            int dataLeftSize = sClasses.getPool().size() - classDataPoints.size();
//            if (dataLeftSize < minCount) {
//                tempGhost = true;
//            }

            int toTry = Math.min(cd.classCount() / 2, options.getIntOption("breadth"));
            System.err.println("Trying " + toTry + " classes");
            System.err.println("BumpPost: " + getBumpPostMDL(view));

            Map<HPMDeltaAdd, Double> tested = new HashMap<HPMDeltaAdd, Double>(toTry);
            boolean tryDone = false;

            List<HClass> bestN = sClasses.getNBest(toTry);
            System.err.println(toTry + "  " + bestN.size());
            if (sClasses.getData(bestN.get(0)).size() < minCount) {
                break;
            }

            double currentScore = getBumpPostMDL(view);
            for (int i = 0; i < bestN.size() && (!tryDone) && (!sClasses.isEmpty()); i++) {

                HClass c = bestN.get(i);

                Collection<Integer> classDataPoints = sClasses.getData(c);
                if (classDataPoints.size() < minCount) {
                    tryDone = true;
                } else {
                    System.err.println("Trying: " + c.getName() + " (" + classDataPoints.size() + ")  to " + current.mainClass.getName());
                    HPMDeltaAdd delta = new HPMDeltaAdd(hierarchy, this, prior, view, current, c,
                            classDataPoints, minCount, priorDataPointProbs, discountOptions);
                    OCPartitionTrain addedPartition = delta.getAddedPartition();

                    Double score = delta.getBumpPostMDL(view);
                    System.err.println("Score: " + score);
                    if ((delta.getLogWeight(addedPartition) > getLogWeight(current)) && (score > currentScore)) {
                        System.err.println("A good class!!!!!!!!!!");
                        tested.put(delta, score);
                    }
                }
            }

            List<HClass> copy = new ArrayList<HClass>(bestN);
            for (HClass c : copy) {
                sClasses.removeTemp(c);
            }

            if (!tested.isEmpty()) {

                sClasses.reset();

                HPMDeltaAdd best = null;
                Double bestScore = Double.NEGATIVE_INFINITY;
                for (Entry<HPMDeltaAdd, Double> e : tested.entrySet()) {
                    if (e.getValue() > bestScore) {
                        best = e.getKey();
                        bestScore = e.getValue();
                    }
                }
                HPMDeltaAdd delta = best;

                // Apply the delta

                OCPartitionTrain addedPartition = delta.getAddedPartition();

                System.err.println("Best was: " + addedPartition.mainClass.getName());
                addedPartition = applyAddDelta(delta);

                // And removeTemp added points from point-pool
                Set<Integer> classDataPoints = new HashSet<Integer>(sClasses.getData(addedPartition.mainClass));
                sClasses.removeTrue(addedPartition.mainClass);

                // Output stuff - does not do anything
                System.err.println("Old score: " + currentScore);
                double newScoreDelta = delta.getBumpPostMDL(view);
                double newScoreBase = getBumpPostMDL(view);
                System.err.println("New score: " + newScoreDelta + "  " + newScoreBase);

                // Partition the newly created class. If this is ghosted some datapoints from this may have to return to this class
                Set<Integer> ghostedPoints = partition(addedPartition, view, options, true);

                // The added partition was merged
                if (!ghostedPoints.isEmpty()) {
                    System.err.println("Subpartition has been ghosted...");
                    current.restDataPoints.addAll(ghostedPoints);

                    // These point has been returned and should not be removed from the 'pool'
                    // Therefore we remove them from the points that should be removed
                    classDataPoints.removeAll(ghostedPoints);
                    current.restLogEmpUTD = false;
                    current.restLogLikeUTD = false;
                    current.restLogPriorUTD = false;
                    current.restPriorLogLikeUTD = false;
                    /**
                     * @TODO This might be too inefficient because it will lead to that all partitions are run through and tested for changes
                     */
                    structureChanged = true;
                }
                sClasses.remove(classDataPoints);
                System.err.println("After return: " + getBumpPostMDL(view));
            }

        }

        // Now it is time to check whether or not the current partition should be ghosted.

        // For now we only do the simply mincount check
        if (!current.equals(trTop)) {

            if (minCountGhost && current.restDataPoints.size() < minCount) {
                System.err.println("Ghosting (MINCOUNT) : " + current.mainClass.getName() + "(" + current.restDataPoints.size() +
                        ") into " + current.parent.mainClass.getName());
                upPoints.addAll(current.restDataPoints);
                current.restDataPoints.clear();
                current.ghost = true;

            }
        }
        return upPoints;
    }

    /**
     * Apply the changes from a delta to the base cover.
     * Using this is equivalent to using addPartition but is meant to be faster because scores are not recalculated.
     * @param delta Add-delta to apply to current delta.
     * @return Partition that has been added
     */
    private OCPartitionTrain applyAddDelta(HPMDeltaAdd delta) {

        // Do not create new partition - instead used the one from the delta
        OCPartitionTrain addedPart = delta.getAddedPartition();
        OCPartitionTrain parent = delta.getOldParent();
        if (parent.children == null) {
            parent.children = new ArrayList<OCPartition>();
        }
        parent.children.add(addedPart);

        parent.restDataPoints.removeAll(delta.getMovedDataPoints());

        parent.restLogEmpProb = delta.newParent.restLogEmpProb;
        parent.restLogEmpUTD = true;
        parent.restLogLikelihood = delta.newParent.restLogLikelihood;
        parent.restLogLikeUTD = true;
        parent.restPriorLogLikelihood = delta.newParent.restPriorLogLikelihood;
        parent.restPriorLogLikeUTD = true;
        parent.restLogPriorProbDenom = delta.newParent.restLogPriorProbDenom;
        parent.restLogPriorUTD = true;

        parent.back = delta.newParent.back;
        parent.backUTD = true;
        parent.inh = delta.newParent.inh;
        parent.inhUTD = true;
        parent.resLogPseuEmpProb = delta.newParent.resLogPseuEmpProb;
        parent.restLogPseuEmpUTD = true;

        partitionList.add(partitionList.indexOf(parent), addedPart);
        bumpPostWeigth = delta.BumpPostMDL;

        return addedPart;
    }

    @Override
    public int dataSize() {
        if (datasizeChanged) {
            dataSize = 0;
            for (Iterator<OCPartition> it = partitionList.iterator(); it.hasNext();) {
                int md = ((OCPartitionTrain) it.next()).restDataPoints.size();
                //      System.err.println("ds " + md + " " + dataSize);
                dataSize = dataSize + md;
            }
            datasizeChanged = false;
        }
        return dataSize;
    }

    @Override
    protected double getLogWeight(OCPartition p) {
        if (p.ghost) {
            System.err.println("BaseXHPM.getLogWeight - ghost - should not happen!!!!!!!!!!!!");
        }
        OCPartitionTrain partition = (OCPartitionTrain) p;
        if (!partition.restLogEmpUTD) {
            calculateLogEmpProb(partition);
        }
        if (!partition.restLogPriorUTD) {
            calculateLogPriorProbDenom(partition);
        }
        if (!partition.restLogPseuEmpUTD) {
            calculatePseuEmpProb(partition);
        }
//        return partition.restLogEmpProb - partition.restLogPriorProbDenom;
        return partition.resLogPseuEmpProb - partition.restLogPriorProbDenom;
    }

    /**
     * Calculates the logaritmic empirical probability of a partition and saves the value in the partition.
     * @param partition Partition to calculate empirical probability for.
     */
    private void calculateLogEmpProb(OCPartitionTrain partition) {
        System.err.println("CalculateLogEmpProb");
        partition.restLogEmpProb = Math.log10((double) partition.restDataPoints.size() / dataSize());
        partition.restLogEmpUTD = true;
    }

    /**
     * Calculates the denominator of logarithmic prior probability of a partition and saves the value in the partition.
     * @param partition Partition to calculate probability for.
     *
     */
    private void calculateLogPriorProbDenom(OCPartitionTrain partition) {

        partition.restLogPriorProbDenom = prior.getEstimate(getPartitionExpression(partition).partitionExpression, hierarchy);
        partition.restLogPriorUTD = true;
    }

    @Override
    protected int nextID() {
        int ID = nextID;
        nextID++;
        return ID;
    }

    protected void calculatePseuEmpProb(OCPartitionTrain p) {
        double backPar;
        if (p.equals(trTop)) {
            backPar = 0;
        } else {
            backPar = findNoGhostParent(p).back;
        }
        double inh;
        if (p.equals(trTop)) {
            inh = 0;
        } else {
            double f = Math.pow(10, p.ptree - ((OCPartitionTrain) p.parent).ptree);
            inh = backPar * f;
        }
        p.inh = inh;

        double myDisc;
        switch (p.restDataPoints.size()) {
            case 1:
                myDisc = discountOptions.getDoubleOption("d1");
                break;
            case 2:
                myDisc = discountOptions.getDoubleOption("d2");
                break;
            default:
                myDisc = discountOptions.getDoubleOption("d3+");
        }

        double chDisc = 0;
        for (OCPartition ch : getChildren(p)) {

            OCPartitionTrain chT = (OCPartitionTrain) ch;
            double chD = getDisc(chT);
            chDisc = chDisc + chD;

        }


        p.back = inh - myDisc + chDisc;

        double restLogPseuEmp = Math.log10(p.restDataPoints.size() +
                p.back * Math.pow(10, p.restLogPriorProbDenom - p.ptree));
        p.resLogPseuEmpProb = restLogPseuEmp;
        p.restLogPseuEmpUTD = true;
    }

    /**
     * Returns bumpPriorMDL for the cover.
     * @return BumpPriorMDL for the cover.
     */
    private double getBumpPriorMDL() {

        int size = OCsize();
        int dsize = dataSize();
        return ((double) (1 - size) / 2) * Math.log10(dsize);
    }

    @Override
    protected double getPriorLogLik(OCPartitionTrain p) {

        if (p.ghost) {
            System.err.println("BaseXHPM.getPriorLogLik - ghost - should not happen!!!!!!!!!!!!");
        }


        if (!p.restPriorLogLikeUTD) {
            p.restPriorLogLikelihood = priorLogLik(p.restDataPoints);
            p.restPriorLogLikeUTD = true;
        }
        return p.restPriorLogLikelihood;
    }

    @Override
    public double getLogLikelihood(OCPartition p, View v) {

        if (p.ghost) {
            return 0;
        }

        OCPartitionTrain partition = (OCPartitionTrain) p;

        if (!partition.restLogEmpUTD) {
            calculateLogEmpProb(partition);
        }
        if (!partition.restLogPriorUTD) {
            calculateLogPriorProbDenom(partition);
        }


        double weigth = getLogWeight(p);
//        System.err.println(weigth);
        double nWeigth = weigth * partition.restDataPoints.size();

        double xNom = getPriorLogLik(partition) + nWeigth;

//        System.err.println(getPriorLogLik(partition)+ "+ " +nWeigth)        ;

        partition.restLogLikelihood = xNom;

        partition.restLogLikeUTD = true;
        return xNom;
    }

    /**
     * Returns log-likelihood for all data points in cover.
     * @param view View on data.
     * @return Log-likelihood for all data points in cover.
     */
    private double getLogLikelihood(View view) {

        // We must recompute loglikelihood
        if (structureChanged || datasizeChanged) {
            logLikelihood = 0;
            for (Iterator<OCPartition> it = partitionList.iterator(); it.hasNext();) {

                OCPartition part = it.next();
                double l = getLogLikelihood(part, view);
//                System.err.println(part.mainClass.getName() + " " + part.ghost + "  " + l);
                logLikelihood = logLikelihood + l;
            }

        }
        structureChanged = false;
        datasizeChanged = false;
        return logLikelihood;
    }

    @Override
    public double getBumpPostMDL(View view) {

        if (structureChanged || datasizeChanged) {
//            System.err.println("LogLik: " + getLogLikelihood(view));
            //      bumpPostWeigth = getBumpPriorMDL() + getLogLikelihood(view);

//
            bumpPostWeigth = getLogLikelihood(view);
            structureChanged = false;
            datasizeChanged = false;
        }
        return bumpPostWeigth;
    }

    @Override
    protected Collection<Integer> getData(OCPartitionTrain partition) {
        if (partition.ghost) {
            System.err.println("BaseXHPM.getData - ghost - should not happen!!!!!!!!!!!!");
        }

        return partition.restDataPoints;
    }
}
