package LibDL.eval.classification;

import LibDL.core.*;
import LibDL.eval.SupervisedEvaluator;

import java.util.*;
import java.util.stream.Collectors;

public abstract class AbstractClassificationEvaluator<T> implements SupervisedEvaluator {
    private List<T> rawTruth;
    private List<T> rawPred;
    private List<? extends List<T>> rawMultilabelTruth;
    private List<? extends List<T>> rawMultilabelPred;
    private List<T> rawLabels;
    private Tensor truth;
    private Tensor pred;
    private Tensor sampleWeight;
    private T rawPosValue;
    private double zeroDivValue = 0;
    /**
     * direction used when calculating confusion matrix
     */
    private boolean sampleWise = false;
    /**
     * raw == false means truth and pred are directly assigned
     */
    private boolean raw = false;
    /**
     * control whether truth and pred is converted from raw- or rawMultilabel-
     * truth and pred.
     * when user set a rawMultilabel-, this property will be set as true,
     * or otherwise false
     */
    private boolean multilabel = false;

    @Override
    public double evaluate(Tensor truth, Tensor pred){
        setPred(pred);
        setTruth(truth);
        return evaluate();
    }

    @Override
    public double evaluate(){
        return core(getTruth(),getPred());
    }

    protected abstract double core(Tensor truth, Tensor pred);

    /*
     ********** tool methods **********
     */
    /**
     * precondition: labels must be specified.
     * @param target
     * @return
     */
    private Tensor convertSingleLabel(List<T> target){
        StdVectorLong targetList = new StdVectorLong(target.stream()
                .map(t -> rawLabels.indexOf(t)).collect(Collectors.toList()));
        if (targetList.contains(-1)) {
            System.err.println("There contains unspecified labels in raw truth");
        }
        return functional.as_tensor(targetList);
    }

    private Tensor convertMultilabel(List<? extends List<T>> target){
        Tensor indicator = functional.zeros(new StdVectorLong(new int[]{target.size(), getRawLabels().size()})).to(Dtype.FLOAT64);
        for (int dim1 = 0;dim1<target.size();dim1++){
            for (T label:target.get(dim1)){
                int dim2 = getRawLabels().indexOf(label);
                StdVectorTensor indexes = new StdVectorTensor(Arrays.asList(
                        functional.as_tensor(dim1),
                        functional.as_tensor(dim2)
                ));
                indicator.index_put_(indexes,functional.as_tensor(1).to(Dtype.FLOAT64));
            }
        }
        return indicator;
    }

    protected enum TargetType{
        BINARY, MULTICLASS, MULTILABLE_INDICATOR, NONE;
    }
    /**
     * the type of classification, which is
     *  - 'binary': `y` contains <= 2 unique values and is 1d Tensor
     *  - 'multiclass': `y` contains more than two unique values,
     *              and is 1d Tensor.
     *  - 'multilabel-indicator': `y` is a label indicator matrix, an Tensor
     *              of two dimensions with at least two columns, and at most 2 unique
     *              values.
     *   - 'none': `y` is array-like but none of the above. NONE means this target
     *              is checked, and if the type of a target is null, that means
     *              it has never checked.
     * @param y target
     * @return type of target
     */
    protected TargetType typeOfTarget(Tensor y){
        Tensor unique = functional.unique(y,false).get(0);
        if (y.sizes().size() == 1 && unique.size(0)<=2){
            return TargetType.BINARY;
        } else if (y.sizes().size() == 1) {
            return TargetType.MULTICLASS;
        } else if (y.sizes().size() == 2 && unique.size(0) <= 2 && y.size(1) >= 2){
            return TargetType.MULTILABLE_INDICATOR;
        }
        return TargetType.NONE;
    }

    /**
     * check the type of classification and ensure the type is valid
     * @param truth ground truth
     * @param pred predicted list
     * @return type of classification decided with the types of truth and pred
     */
    protected TargetType checkTarget(Tensor truth, Tensor pred){
        // todo check the shape
        // belong to the same type of classification
        // have the same length
        TargetType truthType = typeOfTarget(truth), predType = typeOfTarget(pred);
        TargetType type = null;

        // get type value
        if (truthType.equals(predType)){
            type = truthType;
        }else if (truthType.equals(TargetType.MULTILABLE_INDICATOR)
                || truthType.equals(TargetType.NONE)
                || predType.equals(TargetType.MULTILABLE_INDICATOR)
                || predType.equals(TargetType.NONE)){
            type = TargetType.NONE;
        }else {
            type = TargetType.MULTICLASS;
        }

        // verify type
        if (type.equals(TargetType.MULTILABLE_INDICATOR)
                && (truth.size(1)!=pred.size(1))){
            type = TargetType.NONE;
        }
        if (type.equals(TargetType.BINARY)
                || type.equals(TargetType.MULTILABLE_INDICATOR)) {
            Set<Double> uniqueClass = new HashSet<>(functional.unique(truth, false).get(0).tolist_double());
            uniqueClass.addAll(functional.unique(pred, false).get(0).tolist_double());
            if (uniqueClass.size() > 2) {
                if (type == TargetType.BINARY) {
                    type = TargetType.MULTICLASS;
                } else {
                    type = TargetType.NONE;
                }
            }
        }

        if (type.equals(TargetType.NONE)){
            throw new IllegalArgumentException("Only BINARY, MULTICLASS and MULTILABLE_INDICATOR are supported types of classification");
        }
        return type;
    }

    /**
     * @param sampleScore 1-dim tensor
     * @param sampleWeight 1-dim tensor
     * @param normalize whether return weighted sum or avg of scores
     * @return double
     */
    protected Tensor weightedSum(Tensor sampleScore, Tensor sampleWeight, boolean normalize, int axis){
        if (normalize){
            sampleWeight = sampleWeight.div(new Scalar(sampleWeight.sum().item().to_double()));
        }
        if (sampleScore.sizes().size() == 1){
            return sampleScore.mul(sampleWeight).sum(axis);
        }else{
            return sampleScore.mul(sampleWeight.unsqueeze(1)).sum(axis);
        }
    }

    /**
     * check whether input have consistent length
     * @param truth
     * @param pred
     * @param sampleWeight
     */
    protected void checkConsistentLength(Tensor truth, Tensor pred, Tensor sampleWeight){
        if (truth.size(0) != pred.size(0) || truth.size(0) != sampleWeight.size(0)){
            throw new IllegalArgumentException("The inputs have inconsistent number of samples");
        }
    }

    /**
     * generate unique labels
     * tensors need to be:
     *  - have the same type, or mix of BINARY & MULTICLASS, and the type is not NONE
     *  - if they are MULTILABEL_INDICATOR or BINARY, all of their values can have only 2 unique value
     *  - if they are MULTILABEL_INDICATOR, lengths of all samples are expected to be the same.
     *
     * @return
     */
    protected List<Double> uniqueLabels (Tensor truth, Tensor pred, TargetType type){
        List<Double> ans;
        if (type.equals(TargetType.BINARY) || type.equals(TargetType.MULTICLASS)){
            Set<Double> labelSet = new HashSet<>(functional.unique(truth, false).get(0).tolist_double());
            labelSet.addAll(functional.unique(pred,false).get(0).tolist_double());
            ans = new ArrayList<>(labelSet);
        }else if (type.equals(TargetType.MULTILABLE_INDICATOR)){
            ans = functional.arange(0,truth.size(1)).to(Dtype.FLOAT64).tolist_double();
        } else {
            throw new IllegalArgumentException(type+" is not supported");
        }
        ans.sort(Double::compareTo);
        return ans;
    }

    public Tensor getMultilabelConfusionMatrix(){
        Tensor truth = getTruth();
        Tensor pred = getPred();
        Tensor sampleWeight = getSampleWeight();
        List<Double> labels = getLabels();

        TargetType type = checkTarget(truth, pred);
        if (!Arrays.asList(TargetType.BINARY, TargetType.MULTICLASS, TargetType.MULTILABLE_INDICATOR).contains(type)){
            throw new IllegalArgumentException(type + " is not supported.");
        }
        List<Double> presentLabel = uniqueLabels(truth,pred,type);
        if (labels == null) {
            labels = presentLabel;
        } else {
            for (double l : presentLabel){
                if (!labels.contains(l)){
                    labels.add(l);
                }
            }
        }
        if (type.equals(TargetType.BINARY) && labels.size() > 2){
            type = TargetType.MULTICLASS;
        }
        if (sampleWeight == null){
            sampleWeight = functional.ones(truth.size(0)).to(Dtype.FLOAT64);
        }
        checkConsistentLength(truth, pred, sampleWeight);

        // compute tp, fp, tn, fn
        StdVectorDouble union = new StdVectorDouble();
        if (type.equals(TargetType.MULTILABLE_INDICATOR)){
            // check the labels
            labels = labels.stream().map(n->(double)n.intValue()).collect(Collectors.toList());
            if (Collections.max(labels)>Collections.max(presentLabel) || Collections.min(labels)<0){
                throw new IllegalArgumentException("All labels must be in [0, n labels) for multilabel targets.");
            }
            Tensor index = functional.as_tensor(new StdVectorDouble(labels)).to(Dtype.INT64);
            truth = truth.index_select(1,index);
            pred = pred.index_select(1,index);

            // convert values of truth & pred to 0-1
            truth = functional.equal_indices(truth, new Scalar(getPosValue())).to(Dtype.FLOAT64);
            pred = functional.equal_indices(pred, new Scalar(getPosValue())).to(Dtype.FLOAT64);

            // calculate
            int sumAxis = isSampleWise()?1:0;
            Tensor weight = isSampleWise()?functional.ones(truth.size(1)):sampleWeight;

            Tensor tntpTensor = weightedSum(functional.equal_indices(truth,pred).to(Dtype.FLOAT64),
                    weight, false, sumAxis);
            Tensor tpTensor = weightedSum(functional.equal_indices(truth.add(pred),new Scalar((double) 2)).to(Dtype.FLOAT64),
                    weight, false, sumAxis);
            Tensor tpfnTensor = weightedSum(truth,weight,false, sumAxis);
            Tensor tpfpTensor = weightedSum(pred,weight,false,sumAxis);

            union.addAll(tpTensor.tolist_double());                         // tp
            union.addAll(tpfnTensor.add(tpTensor.mul(-1)).tolist_double()); // fn
            union.addAll(tpfpTensor.add(tpTensor.mul(-1)).tolist_double()); // fp
            union.addAll(tntpTensor.add(tpTensor.mul(-1)).tolist_double()); // tn
        }
        else {
            if (isSampleWise()){
                throw new IllegalArgumentException("Samplewise multilabel confusion matrix " +
                        "is not supported in " + type + " classification.");
            }
            StdVectorDouble tpList = new StdVectorDouble();
            StdVectorDouble fpList = new StdVectorDouble();
            StdVectorDouble tnList = new StdVectorDouble();
            StdVectorDouble fnList = new StdVectorDouble();

            List<Double> posLabels = type.equals(TargetType.BINARY)? Collections.singletonList(getPosValue()):labels;
            for (double label:posLabels){
                Tensor tpfnTensor = functional.equal_indices(truth,new Scalar(label)).to(Dtype.FLOAT64);
                Tensor tpfpTensor = functional.equal_indices(pred,new Scalar(label)).to(Dtype.FLOAT64);
                double tp_fn = weightedSum(tpfnTensor,sampleWeight,false,-1).item().to_double();
                double tp_fp = weightedSum(tpfpTensor,sampleWeight,false,-1).item().to_double();
                double tp = weightedSum(functional.equal_indices(tpfnTensor.add(tpfpTensor),
                        new Scalar(2)).to(Dtype.FLOAT64),sampleWeight,false,-1).item().to_double();
                double fn = tp_fn - tp;
                double fp = tp_fp - tp;
                double tn = weightedSum(functional.equal_indices(tpfnTensor.add(tpfpTensor),
                        new Scalar(0)).to(Dtype.FLOAT64),sampleWeight,false,-1).item().to_double();
                tpList.add(tp);
                tnList.add(tn);
                fnList.add(fn);
                fpList.add(fp);
            }
            union.addAll(tpList);
            union.addAll(fnList);
            union.addAll(fpList);
            union.addAll(tnList);
        }
        return functional.as_tensor(union)
                .reshape(new StdVectorLong(new int[]{4,union.size() / 4})).t()
                .reshape(new StdVectorLong(new int[]{union.size() / 4,2,2}));
    }

    public Tensor getConfusionMatrix(){
        Tensor truth = getTruth();
        Tensor pred = getPred();
        List<Double> labels = getLabels();

        TargetType type = checkTarget(truth, pred);
        if (!Arrays.asList(TargetType.BINARY,TargetType.MULTICLASS).contains(type)){
            throw new IllegalArgumentException(type+" is not supported.");
        }

        // prepare labels
        if (labels == null){
            labels = uniqueLabels(truth,pred,type);
        } else if (new HashSet<>(labels).size() < labels.size()){
            labels = new ArrayList<>(new HashSet<>(labels));
            labels.sort(Double::compareTo);
        }
        int cntr = 0;
        Set<Double> uniqueTruth = new HashSet<>(functional.unique(truth, false)
                .get(0).tolist_double());
        for (double s:labels) {
            if (uniqueTruth.contains(s)){
                cntr ++;
            }
        }
        if (cntr == 0)
            throw new IllegalArgumentException("At least one label specified must be in y_true");
        // prepare sampleWeight
        if (getSampleWeight() == null) {
            setSampleWeight(functional.ones(truth.size(0)).to(Dtype.FLOAT64));
        }
        checkConsistentLength(truth, pred, getSampleWeight());

        // convert values to indexes and exclude those not existing in label list
        int truthLength = truth.size(0);
        double[] cmArray = new double[labels.size()*labels.size()];
        for (int i = 0;i<truthLength;i++) {
            int truthIdx = labels.indexOf(truth.get(i).item().to_double());
            int predIdx = labels.indexOf(pred.get(i).item().to_double());
            if (truthIdx >= 0 && predIdx >= 0) {
                double weight = getSampleWeight().get(i).item().to_double();
                cmArray[truthIdx*labels.size()+predIdx] += weight;
            }
        }
        return functional.as_tensor(new StdVectorDouble(cmArray))
                .reshape(new StdVectorLong(new int[]{labels.size(),labels.size()}));
    }

    /*
     ********** getter & setter **********
     */
    public boolean isRaw() {
        return raw;
    }

    public boolean isSampleWise() {
        return sampleWise;
    }

    public AbstractClassificationEvaluator<T> setSampleWise(boolean sampleWise) {
        this.sampleWise = sampleWise;
        return this;
    }

    public double getPosValue(){
        if (!isRaw()||isMultilabel())
            return 1;
        return getRawLabels().indexOf(getRawPosValue());
    }

    public T getRawPosValue() {
        return rawPosValue;
    }

    public AbstractClassificationEvaluator<T> setRawPosValue(T rawPosValue) {
        if (!getRawLabels().contains(rawPosValue)){
            throw new IllegalArgumentException("There exists no label named as " + rawPosValue + ".");
        }
        this.rawPosValue = rawPosValue;
        return this;
    }

    public List<T> getRawTruth() {
        return rawTruth;
    }

    public AbstractClassificationEvaluator<T> setRawTruth(List<T> rawTruth) {
        this.multilabel = false;
        this.raw = true;
        this.rawTruth = rawTruth;
        return this;
    }

    public List<T> getRawPred() {
        return rawPred;
    }

    public AbstractClassificationEvaluator<T> setRawPred(List<T> rawPred) {
        this.multilabel = false;
        this.raw = true;
        this.rawPred = rawPred;
        return this;
    }

    public List<? extends List<T>> getRawMultilabelTruth() {
        return rawMultilabelTruth;
    }

    public AbstractClassificationEvaluator<T> setRawMultilabelTruth(List<? extends List<T>> rawMultilabelTruth) {
        this.multilabel = true;
        this.raw = true;
        this.rawMultilabelTruth = rawMultilabelTruth;
        return this;
    }

    public List<? extends List<T>> getRawMultilabelPred() {
        return rawMultilabelPred;
    }

    public AbstractClassificationEvaluator<T> setRawMultilabelPred(List<? extends List<T>> rawMultilabelPred) {
        this.multilabel = true;
        this.raw = true;
        this.rawMultilabelPred = rawMultilabelPred;
        return this;
    }

    public Tensor getTruth() {
        if (this.raw && this.multilabel && getRawMultilabelTruth() != null){
            truth = convertMultilabel(getRawMultilabelTruth());
        }else if (this.raw && !this.multilabel && getRawTruth() != null){
            truth = convertSingleLabel(getRawTruth());
        }
        return truth;
    }

    public AbstractClassificationEvaluator<T> setTruth(Tensor truth) {
        this.truth = truth;
        this.raw = false;
        return this;
    }

    public Tensor getPred() {
        if (this.multilabel && getRawMultilabelPred() != null){
            pred = convertMultilabel(getRawMultilabelPred());
        }else if (!this.multilabel && getRawPred() != null){
            pred = convertSingleLabel(getRawPred());
        }
        return pred;
    }

    public AbstractClassificationEvaluator<T> setPred(Tensor pred) {
        this.pred = pred;
        this.raw = false;
        return this;
    }

    public Tensor getSampleWeight() {
        return sampleWeight;
    }

    public AbstractClassificationEvaluator<T> setSampleWeight(Tensor sampleWeight) {
        this.sampleWeight = sampleWeight;
        return this;
    }

    public boolean isMultilabel() {
        return multilabel;
    }

    public List<T> getRawLabels() {
        return rawLabels;
    }

    public AbstractClassificationEvaluator<T> setRawLabels(List<T> rawLabels) {
        this.rawLabels = rawLabels;
        if (new HashSet<>(rawLabels).size() != rawLabels.size()){
            throw new IllegalArgumentException("Labels must be different with every other.");
        }
        return this;
    }

    public List<Double> getLabels(){
        if (!isRaw())
            return null;
        return getRawLabels().stream().map(l -> (double)getRawLabels().indexOf(l)).collect(Collectors.toList());
    }

    public double getZeroDivValue() {
        return zeroDivValue;
    }

    public AbstractClassificationEvaluator<T> setZeroDivValue(double zeroDivValue) {
        this.zeroDivValue = zeroDivValue;
        return this;
    }
}
