package OrderedCoverPackage;

import DataSetPackage.ProductView;
import EstimatorPackage.Expressions.ComplexExpression;
import EstimatorPackage.Expressions.Expression;
import EstimatorPackage.Expressions.ListExpression;
import DataSetPackage.View;
import EstimatorPackage.Consts;
import EstimatorPackage.EstimatorExp;
import EstimatorPackage.Expressions.SimpleExpression;
import HierarchyPackage.HClass;
import HierarchyPackage.Hierarchy;
import HierarchyPackage.ProductHierarchy.ProductHClass;
import HierarchyPackage.ProductHierarchy.ProductHierarchy;
import OptionsManager.DiscountOptions;

import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;

/**
 * Delta to represent the addtition of a partition to an existing OC. This delta is in it self an Ordered Cover and
 * implements all the methods for this.
 *
 * @author Martin Haulrich
 */
public class HPMDeltaAdd extends AbstractHPMDelta {

    /**
     * The partition that a new partition is added to.
     */
    protected final OCPartitionTrain parent;

    /**
     * The classification that the new partition should be based on.
     */
    private final HClass newPartition;

    /**
     * The data points that are moved from the parent-partition to the added partition
     */
    private final Collection<Integer> movedDataPoints;

    /**
     * The new partition that has been added to the cover.
     */
    private OCPartitionTrain addedPartition;

    /**
     * Minimun partition size for this cover.
     */
    private int minCount;

    /**
     * Pseudo partition to hold information about the parent class in the new cover.
     */
    protected OCPartitionTrain newParent;

    /**
     * Is the cover degenerate.
     */
    private boolean degenerate;

    protected double logMDLweigth;

    /**
     * Constructor for add-cover.
     * @param parentCover The cover that this is a 'delta' of.
     * @param hier The hierarchy the cover is based on.
     * @param prior Estimator for prior-probabilities.
     * @param view View on datapoints.
     * @param parent Partition to add new partition to.
     * @param dataPointProbs Probabilities for all datapoints in conver.
     * @param newPartitionClass The class of the partition to be added.
     * @param dataPoints Datapoint in the partition to be added.
     * @param minCount Minimum number of datapoints allowed in partitions.
     */
    public HPMDeltaAdd(Hierarchy hier, AbstractTrainOC parentCover, EstimatorExp prior, View view,
            OCPartitionTrain parent, HClass newPartitionClass, Collection<Integer> dataPoints, int minCount,
            List<Double> dataPointProbs, DiscountOptions discountOptions) {
        super(parentCover, hier, view, prior, dataPointProbs);

        this.parent = parent;
        this.newPartition = newPartitionClass;
        this.minCount = minCount;
        this.movedDataPoints = dataPoints;
        createCover(discountOptions);

        logMDLweigth = Math.log10(discountOptions.getDoubleOption("MDL"));

        BumpPostMDL = calculateBumpPostMDL();
    }

    /**
     * This is method that constructs the Add-delta-cover
     */
    protected void createCover(DiscountOptions discountOptions) {

        // Make new partition based on the given class
        addedPartition = new OCPartitionTrain(newPartition, parentCover.nextID());
        addedPartition.parent = parent;
        addedPartition.restDataPoints = movedDataPoints;

        partitionList.add(partitionList.indexOf(parent), addedPartition);

        // The following partition is only used to keep relevevant information about the parentPartition, and is not in itself a valid partition.
        newParent = new OCPartitionTrain(parent.mainClass, 0 - parent.ID - 1);
        newParent.parent = parentCover.getParent(parent);
        newParent.children = new ArrayList<OCPartition>(parentCover.getChildren(parent));
        newParent.children.add(addedPartition);
        newParent.restDataPoints = new HashSet<Integer>(parentCover.getData(parent));
        newParent.restDataPoints.removeAll(movedDataPoints);

        double disc;
        switch (movedDataPoints.size()) {
            case 1:
                disc = discountOptions.getDoubleOption("d1");
                break;
            case 2:
                disc = discountOptions.getDoubleOption("d2");
                ;
                break;
            default:
                disc = discountOptions.getDoubleOption("d3+");
                ;
        }


        // We do not use the getPartitionExpression(newParent) here because it does not work
        // This is because of some problems with whick parent is returned by the getParent method
        // Both returning 'parent' and 'newParent' causes troubles
        Expression newParMain = newParent.mainExp;
        Collection<Expression> newParSubs = parentCover.subtractsExpression(parent);
        newParSubs.add(addedPartition.mainExp);
        Expression newParSubsE = new ListExpression(ListExpression.UNION, newParSubs);
        Expression newParE = new ComplexExpression(ComplexExpression.DIFFERENCE, newParMain, newParSubsE);
        double newParPriorProb = prior.getEstimate(newParE, hierarchy);
//        System.err.println("NewParPriorProb: " + newParPriorProb);

        newParent.inh = parent.inh;
        newParent.inhUTD = true;

        newParent.back = newParent.back + disc;

        newParent.backUTD = true;

        double backOffFraction = newParPriorProb - parent.ptree;
        double b = newParent.back * Math.pow(10, backOffFraction);


        double logPseuCount = Math.log10(newParent.restDataPoints.size() + b);

        newParent.resLogPseuEmpProb = logPseuCount - Math.log10(parentCover.dataSize());
        newParent.restLogPseuEmpUTD = true;

        newParent.restLogPriorProbDenom = newParPriorProb;
        newParent.restLogPriorUTD = true;

        double removedDataPriorLogLik = priorLogLik(movedDataPoints);
        newParent.restPriorLogLikelihood = parentCover.getPriorLogLik(parent) - removedDataPriorLogLik;
        newParent.restPriorLogLikeUTD = true;

        newParent.restLogEmpProb = Math.log10((double) newParent.restDataPoints.size() / parentCover.dataSize());
        newParent.restLogEmpUTD = true;

        double weight = newParent.resLogPseuEmpProb - newParent.restLogPriorProbDenom;
        weight = weight * newParent.restDataPoints.size();

        newParent.restLogLikelihood = newParent.restPriorLogLikelihood + weight;
        newParent.restLogLikeUTD = true;

        changed.put(parent, newParent);

        // Calculate weigth for added partition

        // // First calculate empirical probability
        addedPartition.restLogEmpProb = Math.log10((double) addedPartition.restDataPoints.size() / parentCover.dataSize());
        addedPartition.restLogEmpUTD = true;

        // // Then calculate denominator of prior probability
        PartitionExpressionTuple pet = getPartitionExpression(addedPartition);

        addedPartition.restLogPriorProbDenom = prior.getEstimate(pet.partitionExpression, hierarchy);
//        System.err.println("added prior: " + addedPartition.restLogPriorProbDenom);

        addedPartition.restLogPriorUTD = true;

        // The tree prob is the same as the normal prob without children
        // And since addedpartition has just been added is has no children.
        addedPartition.ptree = addedPartition.restLogPriorProbDenom;

        addedPartition.inh = newParent.back * Math.pow(10, addedPartition.ptree - newParent.ptree);
        addedPartition.inhUTD = true;

        addedPartition.back = addedPartition.inh - disc;
        addedPartition.backUTD = true;

        b = addedPartition.back;

        logPseuCount = Math.log10(addedPartition.restDataPoints.size() + b);

        addedPartition.resLogPseuEmpProb = logPseuCount - Math.log10(parentCover.dataSize());
        addedPartition.restLogPseuEmpUTD = true;

        double newPartLogWeigth = addedPartition.resLogPseuEmpProb - addedPartition.restLogPriorProbDenom;

        // Set the weight for the total partition - this should never change after this.
        addedPartition.setTotalWeight(newPartLogWeigth);

        // Calculate logLikelihood for points in new partition
        newPartLogWeigth = newPartLogWeigth * addedPartition.restDataPoints.size();
        addedPartition.restPriorLogLikelihood = priorLogLik(movedDataPoints);
        addedPartition.restPriorLogLikeUTD = true;

        double xNom = addedPartition.restPriorLogLikelihood + newPartLogWeigth;

        addedPartition.restLogLikelihood = xNom;

        addedPartition.restLogLikeUTD = true;

        // Add the partitions to the list of partitions altered in this delta
        changed.put(addedPartition, addedPartition);
    }

    /**
     * Note that the partitions returned by this method serves only as a list of all partitions
     * The partitions themselves are NOT the partitions in the cover, so information from these
     * should always be accesed via the parentCover.
     */
    @Override
    protected List<OCPartition> partitionList() {
        return partitionList;
    }

    /**
     * Calculate the BumpPostMDL for this Delta-Cover. Do this by subtracting old stuff and adding new
     * @return The BumpPostMDL for the cover.
     */
    protected double calculateBumpPostMDL() {

        // Get bumpPstMDL for previos cover
        double altBumpPost = parentCover.getBumpPostMDL(view);

        for (OCPartition chi : changed.keySet()) {
            OCPartitionTrain ch = (OCPartitionTrain) chi;
//            System.err.println("altBump: " + altBumpPost);
            if (!ch.equals(addedPartition)) {
//                System.err.println("oldparent: " + parentCover.getLogLikelihood(ch, view));
                altBumpPost = altBumpPost - parentCover.getLogLikelihood(ch, view);
//                System.err.println("New parent: " + ((OCPartitionTrain) changed.get(ch)).restLogLikelihood);
                altBumpPost = altBumpPost + ((OCPartitionTrain) changed.get(ch)).restLogLikelihood;
            }
        }

        // Add score for new partition
//        System.err.println("altBump - bef add: " + altBumpPost);
        altBumpPost = altBumpPost + ((OCPartitionTrain) changed.get(addedPartition)).restLogLikelihood;
//System.err.println("altBump - baft add: " + altBumpPost);

        // The size of the cover has increased with one.

//        double res = altBumpPost - Math.log10(Math.sqrt(dataSize()));
//        double res = altBumpPost;

        double res = altBumpPost;
        if (!Double.isInfinite(logMDLweigth)) {
            res = res - (Math.log10(Math.sqrt(dataSize())) + logMDLweigth);
        }
//        System.err.println(altBumpPost + " --- " + res);
        return res;
    }

    /**
     * Return partition that has been created and added in this delta.
     * @return Partition that has been added.
     */
    public OCPartitionTrain getAddedPartition() {
        return addedPartition;
    }

    /**
     * Return the parent as it was before a new partition was added.
     * @return The parent to which the partition was added as it was before a new partition was added.
     */
    OCPartitionTrain getOldParent() {
        return parent;
    }

    /**
     * Return datapoints that were moved into the new partition.
     * @return Datapoints that were moved into the new partition.
     */
    Collection<Integer> getMovedDataPoints() {
        return movedDataPoints;
    }

    /**
     * Check if the cover is degenerate.
     * @return True if the cover is degenerate, false if not.
     */
    boolean isDegenerate() {
        return degenerate;
    }

    @Override
    protected double getLogLikelihood(OCPartition partition, View view) {
        OCPartitionTrain changedPart = (OCPartitionTrain) changed.get(partition);
        if (changedPart != null) {
            return changedPart.restLogLikelihood;
        }

        return parentCover.getLogLikelihood(partition, view);
    }

    @Override
    public int OCsize() {
        return parentCover.OCsize() + 1;
    }

    @Override
    protected OCPartition getParent(OCPartition partition) {

        if (partition.equals(addedPartition)) {
            return parent;
        }
        if (partition.equals(newParent)) {
            return newParent.parent;
        }

        return parentCover.getParent(partition);
    }

    @Override
    public OCPartition getTop() {

        return parentCover.getTop();
    }

    @Override
    protected List<OCPartition> getChildren(OCPartition partition) {
        if (partition.equals(parent)) {
            return newParent.children;
        }

        if (partition.equals(addedPartition)) {
            return Collections.<OCPartition>emptyList();
        }
        return parentCover.getChildren(partition);
    }

    @Override
    protected Collection<Integer> getData(OCPartitionTrain partition) {
        if (partition.equals(parent)) {
            return newParent.restDataPoints;
        }

        if (partition.equals(addedPartition)) {
            return movedDataPoints;
        }

        return parentCover.getData(partition);
    }

    @Override
    public double getBumpPostMDL(View view) {
        return BumpPostMDL;
    }

    @Override
    protected double getLogWeight(OCPartition partition) {
        OCPartition part = changed.get(partition);
        if (part != null) {
            return part.resLogPseuEmpProb - part.restLogPriorProbDenom;
        }

        return parentCover.getLogWeight(partition);
    }

    @Override
    protected double getPriorLogLik(OCPartitionTrain partition) {
        OCPartitionTrain part = (OCPartitionTrain) changed.get(partition);
        if (part != null) {
            return part.restPriorLogLikelihood;
        }

        return parentCover.getPriorLogLik(partition);
    }

    public double getConditonalProb(View view) {

        Map<HClass, Double> condClassProbs = new HashMap<HClass, Double>();

        ProductHierarchy phier = (ProductHierarchy) hierarchy;
        View[] views = ((ProductView) view).getViews();
        Hierarchy[] hiers = phier.getHierarchies();
        HClass lastTop = hiers[hiers.length - 1].getTop();

        double condLogProb = 0;
        double weight = getLogWeight(addedPartition);
//        System.err.println(addedPartition.mainClass.getName());
        for (Integer did : addedPartition.restDataPoints) {
            double priorProb = priorDataPointProbs.get(did);
            double uncond = priorProb + weight;
            ProductHClass condClass = createCondClass(did, phier, hiers, views, lastTop);
            Double condProb = condClassProbs.get(condClass);
            if (condProb == null) {
                condProb = getEstimate(condClass);
                condClassProbs.put(condClass, condProb);
            }
            double prob = uncond - condProb;
//            System.err.println("\t" + condClass.getName() + " = " + prob);
            condLogProb += prob;

        }

//        System.err.println(addedPartition.mainClass.getName() + ": " + getLogLikelihood(addedPartition, view) + "  =  " + condLogProb);

        return condLogProb;
    }

    public double getCond() {
        double uncond = getEstimate(addedPartition.mainClass);

        ProductHClass pc = (ProductHClass) addedPartition.mainClass;
        HClass[] classes = new HClass[pc.getID().length];
        System.arraycopy(pc.getID(), 0, classes, 0, pc.getID().length);
        classes[classes.length - 1] = ((ProductHierarchy) hierarchy).getHierarchies()[classes.length - 1].getTop();
        ProductHClass newTopClass = new ProductHClass((ProductHierarchy) hierarchy, classes);
//        System.err.println(pc.getName() + "\t" + newTopClass.getName());
        double conditioning = getEstimate(newTopClass);
        return uncond - conditioning;
    }

    private ProductHClass createCondClass(Integer did, ProductHierarchy phier,
            Hierarchy[] hiers, View[] views, HClass lastTop) {

        HClass[] classes = new HClass[hiers.length];
        for (int i = 0; i < hiers.length - 1; i++) {
            classes[i] = hiers[i].newClass(did, views[i]);
        }
        classes[hiers.length - 1] = lastTop;
        return new ProductHClass(phier, classes);
    }

    private double getEstimate(HClass c) {

//        System.err.println("------ " + c.getName() + " -------------");
//        System.err.println("PL-size: " + partitionList.size());

        Double est;
        Expression cexp = new SimpleExpression(c);

        double sum = Consts.logZero;
        List<OCPartition> parts = new ArrayList<OCPartition>();
        OCPartition top = parentCover.top;
//        System.err.println("------" + top + "\t" + top.mainClass.getName()) ;
        findPartitionsRec(c, top, parts);
//            System.err.println("parts-size: " + partitionList.size());
        for (OCPartition p : parts) {
            if (!p.ghost) {
                double w = getLogWeight(p);
                Expression pmexp = p.mainExp;
//                Expression psexp = ocExpr.get(p);
//
//                psexp = AbstractEstimatorExp.reduceList(new ListExpression(ListExpression.UNION, subtractsExpression(p)), hierarchy);

                Expression psexp = new ListExpression(ListExpression.UNION, subtractsExpression(p));

                Expression pexp = new ComplexExpression(ComplexExpression.DIFFERENCE, pmexp, psexp);
                Expression intersect = new ComplexExpression(ComplexExpression.INTERSECTION, pexp, cexp);

                double priorProb = prior.getEstimate(intersect, hierarchy);
//                    System.err.println("int: " + intersect);
//System.err.println(p.mainExp + " : " + priorProb + "\t" + prior.getEstimate(pexp, hierarchy));

                if (Double.isNaN(priorProb)) {
                    priorProb = Consts.logZero;
                } else {
                    if (priorProb > Consts.logZero) {

//                            System.err.println("s1: " + sum);
                        double pr = priorProb + w;
//                            System.err.println(Math.pow(10, pr) + " --- " + Math.pow(10, sum));
//                            System.err.println(p.mainExp + "\t" + priorProb + " + " + w + " = " + pr);
                        double l = Math.max(sum, pr);
//                            System.err.println("l: " + l + " + " + (Math.pow(10, sum - l)) + " + "+(Math.pow(10, pr - l)));
//                            System.err.println("l1: " + (l + Math.log10(Math.pow(10, sum - l))));
//                            System.err.println("l2: " + (Math.log10(Math.pow(10, sum - l) + Math.pow(10, pr - l))));
//                            System.err.println("l3: " +  (l + Math.log10(Math.pow(10, sum - l) + Math.pow(10, pr - l))));
                        sum = l + Math.log10(Math.pow(10, sum - l) + Math.pow(10, pr - l));
//                            System.err.println("s2: " + sum);

                    }
                }
            }
//                System.err.println("sum: " + sum);
        }

        est = sum;
//            System.err.println("est: " + est);

        return est;
    }

    private void findPartitionsRec(HClass c, OCPartition current, List<OCPartition> res) {

//        System.err.println("+-+: " + c.getName() + " " + current);
        if (getChildren(current).isEmpty()) {
            res.add(current);
            return;
        }
        for (OCPartition ch : getChildren(current)) {

            if (!hierarchy.getIntersection(c, current.mainClass).isEmpty()) {
                findPartitionsRec(c, ch, res);
            }
        }

        // For now we just add the rest-partition hoping that this we be filtered out later.
        // When the ClassAlgebra improves we could check if the intersection is empty first
        res.add(current);
    }
}
