package org.yagnus.stats.sampler.disrete;

import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;

/**
 *
 * @author hc.busy
 *
 * TODO: implement and compare performance
 * TODO: implement auto selection
 * TODO: implement cacheline sensitive autoselection(detect the sizes of various levels of cache)
 *
 * This class uses an array to store the sample and weights. each call to draw draws a random number between zero and
 * the sum of the weights. draw then traverses the list, adding numbers from left to right until it reaches a range in
 * which the generated random number fall. It returns the item corresponding to that segment on the real line.
 * 
 * This class is memory efficient: Linear in sample size instead of nlogn in the case of TreeArraySampler
 * However, the performance of the draw function is nondeterministic, but could be efficient after amortization
 * 
 * In addition, this implementation allows you to add to the samle space in constant time, where as TreeArraySampler
 * uses O(nlogn) to add an element.
 * 
 * @param <BASETYPE> the type that the sampler wil return.
 * 
 */
public class ListArraySampler<BASETYPE> extends ArraySampler<BASETYPE> {

    protected class Node implements Comparable<Node> {

        Node l, r;
        double weight;
        BASETYPE samp;

        public Node(BASETYPE samp, double weight) {
            this.weight = weight;
            this.samp = samp;
        }

        /**
         * deletes this node from a linked list.
         */
        public void delete() {
            if (l != null) {
                l.r = r;
            }
            if (r != null) {
                r.l = l;
            }
            if (this == root) {
                root = null;
            }
        }

        /**
         * add one to the right
         * @param r
         */
        public void append(Node r) {
            this.r = r;
            r.l = this;
        }

        public void prepend(Node r) {
            this.r = root;
            this.l = null;
            root.l = this;
            root = this;
        }

        public int compareTo(Node n) {
            return -Double.compare(weight, n.weight);
        }
    }
    /**stuff for each instance of the sampler**/
    Node root = null;
    ArrayList<Node> rawData = null;
    int len = 0;    //Run time bookkeeping 
    double curTotal;
    int numberSampled;
    boolean replacement;

    /**
     * Indicate if this class is being used as with replacement or without relacement.
     * 
     * @param b
     */
    public void setReplacement(boolean b) {
        replacement = b;
    }

    public ListArraySampler(BASETYPE[] t, double[] weights) {
        super(t, weights);
    }

    public ListArraySampler(BASETYPE[] t, long[] weights) {
        super(t, weights);
    }

    public ListArraySampler(BASETYPE[] t, int[] weights) {
        super(t, weights);
    }

    public ListArraySampler(BASETYPE[] t) {
        super(t);
    }

    @Override
    protected void _init(BASETYPE[] t, double[] weights) {

        len = t.length;

        //check the parameter        
        if (len == 0) {
            return;
        }
        if (len != weights.length) {
            throw new IllegalArgumentException("sample and weight size are different.");
        }

        //make one node, and create array from it.
        root = new Node(t[0], weights[0]);
        rawData = new ArrayList<Node>(len);

        //sort the array by probability 
        for (int i = 1; i < len; ++i) {
            if (Double.isNaN(weights[i]) || Double.isInfinite(weights[i]) || weights[i] < 0) {
                throw new IllegalArgumentException("The weights must be finite positive real numbers");
            }
            if (t[i] == null) {
                throw new IllegalArgumentException("The sample space cannot contain null objects, please use wrapper if this is necessary");
            }
            rawData.add(new Node(t[i], weights[i]));
        }

        Collections.sort(rawData);

        _reset();
    }

    /**
     * 
     * Resets the sampler
     */
    public void reset() {
        synchronized (rawData) {
            _reset();
        }
    }

    protected void _reset() {

        len = rawData.size();

        if (len == 0) {
            return;
        }
        Node trav = root = rawData.get(0);
        curTotal = 0d;
        for (int i = 1; i < len; ++i) {
            Node nodei = rawData.get(i);
            trav.append(nodei);
            trav = trav.r;
            curTotal += nodei.weight;
        }
        numberSampled = 0;
    }

    public Node _findSample() {
        if (len == 0) {
            return null;
        }

        Node ret = null;

        do {
            double curSamp = rng().nextDouble() * curTotal;
            double curSum = 0;

            //loop traverses list, add weights until weights exceeded the random number
            //At that point, the previous item is set as item to return;

            for (Node trav = root;
                    curSum < curSamp;
                    trav = trav.r) {
                curSum += trav.weight;
                ret = trav;
            }
        } while (ret == null);

        return ret;
    }

    /**
     * take something from the sample pool.
     * 
     * @return the sample, or null if the sample space has been exhausted.
     * 
     */
    public BASETYPE sample_wor() {
        synchronized (rawData) {
            if (numberSampled == len) {
                return null;
            }
            Node n = _findSample();
            if (n == null) {
                return null;
            }
            n.delete();
            curTotal -= n.weight;
            numberSampled += 1;
            return n.samp;
        }
    }

    /**
     * take something from the sample pool.
     * 
     * @return the sample, or null if the sample space has been exhausted.
     * 
     */
    public BASETYPE sample_wr() {
        synchronized (rawData) {
            Node n = _findSample();
            if (n == null) {
                return null;
            }
            return n.samp;
        }
    }

    public void addSample_wr(BASETYPE t, double w) {
        synchronized (rawData) {
            rawData.add(new Node(t, w));
            _reset();
        }
    }

    public void addSample_wor(BASETYPE t, double w) {
        synchronized (rawData) {
            rawData.add(new Node(t, w));//after we finish, the next call to _init will sort and optimize
            root.prepend(new Node(t, w));//but for now, just add it to the sample list in linear time.
            curTotal += w;
        }
    }

    public void removeSample_wor(BASETYPE t) {
        synchronized (rawData) {
            for (Node trav = root; t != null;) {
                if (trav.samp.equals(trav)) {
                    Node n = trav.r;
                    curTotal -= trav.weight;
                    len -= 1;
                    trav.delete();
                    if (trav == root) {
                        root = null;
                    } else {
                        trav = n;
                    }
                }
            }
            rawData.remove(t);
        }
    }

    public void removeSample_wr(BASETYPE t) {
        synchronized (rawData) {
            rawData.remove(t);
            _reset();
        }
    }

    public void removeSamples_wr(Collection<BASETYPE> t) {
        synchronized (rawData) {
            rawData.removeAll(t);
            _reset();
        }
    }

    @Override
    public void addSample(BASETYPE t, double w) {
        if (replacement) {
            addSample_wr(t, w);
        } else {
            addSample_wor(t, w);
        }
    }

    @Override
    public void removeSample(BASETYPE t) {
        if (replacement) {
            removeSample_wr(t);
        } else {
            removeSample_wor(t);
        }
    }

    /**
     * This function omputes the average depth into the linked list we traverse per WR sample operation.
     * 
     * Of course the amount of time actually more dependent on the cache sizes at various levels, and the
     * condition of the distribution.(low entropy ==> faster sampling for this imlementation but not for 
     * the Tree sampler)
     * 
     * @return the theoretic amortized operations per draw
     */
    public double computeCurrentAmortizedCostPerDraw() {
        double avgOp = 0d;
        int depth = 1;
        for (Node trav = root; trav != null; trav = trav.r, depth += 1) {
            avgOp += depth * trav.weight / curTotal;
        }
        return avgOp;
    }
}
