/*
   Copyright [2013] [szhu1@umbc.edu]

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

       http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License.
*/
package szhu.hcc.umbc.crowdsourcing.quality.core;

import org.apache.commons.math3.stat.regression.OLSMultipleLinearRegression;

import java.util.ArrayList;
import java.util.List;

/**
 * Refinement #1: Emipirical Decreasing based szhutest.hcc.umbc.crowdsourcing.quality.core judging refinement method.
 * Please note JDK7 is used.
 *
 * @author szhu1@umbc.edu
 */
public class QualityJudgingCoreImpl implements QualityJudgingCore {
    // Use Apache Math OSL regression helper
    protected static final OLSMultipleLinearRegression OSL_REGRESSION_HELPER = new OLSMultipleLinearRegression();

    protected List<RawData> rawLogs = null;
    protected List<Double> userEfforts = null;
    protected double[] weights = null;

    public double precision = 0;
    public double recall = 0;
    public double fValue = 0;
    public double fValue2 = 0;
    public double accuracy = 0;
    public double optimalX = 0;

    public QualityJudgingCoreImpl(List<RawData> Logs) {
        rawLogs = Logs;
        userEfforts = new ArrayList<>();
    }

    /**
     * Unit test only.
     */
    public QualityJudgingCoreImpl() {
        userEfforts = new ArrayList<>();
    }

    private int findIndex(List<String> ids, String id) {
        for (int i = 0; i < ids.size(); i++) {
            if (ids.get(i).equals(id))//found!
                return i;
        }
        return -1;
    }

    protected void extractTrainingDataSet(List<Integer> trainingTaskIds, List<RawData> rawDataList,
                                          ArrayList[] allChosenTaskNatures, ArrayList[] effortUpperLimits,
                                          ArrayList[] effortLowerLimits, int user_per_task,
                                          List<String> maliciousWorkers) {
        double not_set_upper = 10000000.0;
        double not_set_lower = -1;
        for (int i = 0; i < rawDataList.size(); i += user_per_task) {
            // TODO: allow another filter to choose a subset from full user list per task, e.g. select 3 out of 5
            RawData rawData = rawDataList.get(i);
            int taskId = i / MAX_WORKER_PER_TASK;
            if (!trainingTaskIds.contains(taskId)) {
                continue;
            }
            boolean skipMalicious = false;
            for (int k = 0; k < MAX_WORKER_PER_TASK; k++) {
                if (maliciousWorkers.contains(rawDataList.get(i + k).workerId)) {
                    skipMalicious = true;
                    break;
                }
            }
            if (skipMalicious) {
                continue;
            }
            List<Double> taskNatures = rawData.taskNatures;
            for (int j = 0; j < taskNatures.size(); j++) {
                if (allChosenTaskNatures == null) {
                    allChosenTaskNatures = new ArrayList[taskNatures.size()];
                }
                if (allChosenTaskNatures[j] == null) {
                    allChosenTaskNatures[j] = new ArrayList<Double>();
                }
                allChosenTaskNatures[j].add(taskNatures.get(j));
            }
            // per task nature
            for (int j = 0; j < rawData.userEfforts.size(); j++) {
                // The upper is lowest qualified
                double upper = not_set_upper;
                // The lower bound is most unqualified
                double lower = not_set_lower;
                // within the group, all work on same task, get upper/lower limits
                for (int m = 0; m < user_per_task; m++) {
                    double userMeasure = rawDataList.get(i + m).userEfforts.get(j);
                    if (rawDataList.get(i + m).quality >= 0) {
                        upper = (upper > userMeasure) ? userMeasure : upper;
                    } else {
                        lower = (lower < userMeasure) ? userMeasure : lower;
                    }
                }
                if (upper == not_set_upper) {
                    upper = lower + 1;// if no upper is set, none is qualified, use lower + 1 as the best guess
                } else if (lower == not_set_lower) {
                    lower = upper - 1;// if no lower is set, all qualified, so use upper-1 as best guess
                }
                // Make sure lower is less than upper, if not, then use upper-1 as best guess
                lower = (lower > upper) ? upper - 1 : lower;
                if (effortUpperLimits == null) {
                    effortUpperLimits = new ArrayList[rawData.userEfforts.size()];
                }
                if (effortUpperLimits[j] == null) {
                    effortUpperLimits[j] = new ArrayList<Double>();
                }
                effortUpperLimits[j].add(upper);
                if (effortLowerLimits == null) {
                    effortLowerLimits = new ArrayList[rawData.userEfforts.size()];
                }
                if (effortLowerLimits[j] == null) {
                    effortLowerLimits[j] = new ArrayList<Double>();
                }
                effortLowerLimits[j].add(lower);
            }
        }
    }

    protected double[][] convert(ArrayList[] arrayOfList) {
        if (arrayOfList == null) {
            System.out.println("Array of list is null");
            return null;
        }
        if (arrayOfList[0] == null) {
            System.out.println("Array[0] of list is null");
            return null;
        }
        double[][] result = new double[arrayOfList.length][arrayOfList[0].size()];
        for (int i = 0; i < arrayOfList.length; i++) {
            for (int j = 0; j < arrayOfList[0].size(); j++) {
                result[i][j] = this.castToDoubleValue(arrayOfList[0].get(j));
            }
        }
        return result;
    }

    @Override
    public double[] evaluate(List<Integer> trainingIds, List<Integer> withinTaskWorkerIds, OptimizeType type,
                             String logPrefix, boolean isStrict, double... weights) {
        this.weights = weights;
        ArrayList[] allChosenTaskNatures = new ArrayList[rawLogs.get(0).taskNatures.size()];
        ArrayList[] effortUpperLimits = new ArrayList[rawLogs.get(0).userEfforts.size()];
        ArrayList[] effortLowerLimits = new ArrayList[rawLogs.get(0).userEfforts.size()];
        List<String> maliciousWorkers = new ArrayList<>();
        extractTrainingDataSet(trainingIds, rawLogs, allChosenTaskNatures, effortUpperLimits, effortLowerLimits,
                MAX_WORKER_PER_TASK, maliciousWorkers);
        double[][] convertedEffortUpperLimits = convert(effortUpperLimits);
        double[][] convertedEffortLowerLimits = convert(effortLowerLimits);
        double[][] bestModelFitParams = findBestIntervalParams(allChosenTaskNatures, convertedEffortUpperLimits,
                convertedEffortLowerLimits, trainingIds, type, isStrict);
        // Now chosenIds are training and should be excluded, so pass "chosenIds, false" to evaluate method
        int[] modelFits = evaluateModelFitCounts(trainingIds, false, bestModelFitParams, isStrict);
        // System.out.println("Recognition: " + Arrays.toString(modelFits));
        precision = (double) modelFits[0] / (modelFits[0] + modelFits[1]);
        recall = (double) modelFits[0] / (modelFits[0] + modelFits[2]);
        fValue2 = precision * recall / (precision + recall) * 2;
        accuracy = (double) (modelFits[0] + modelFits[3]) / (modelFits[0] + modelFits[1] + modelFits[2] + modelFits[3]);
//        System.out.println(String.format("%s, Precision: %s; Recall: %s, F-Value: %s", logPrefix, precision, recall, fValue2));
//        precision = (double) modelFits[3] / (modelFits[3] + modelFits[1]);
//        recall = (double) modelFits[3] / (modelFits[3] + modelFits[2]);
//        double fvalue = precision * recall / (precision + recall) * 2;
//        System.out.println(String.format("%s, Precision: %s; Recall: %s, F-Value: %s\n", logPrefix, precision, recall, fvalue));
        return new double[]{precision, recall, fValue2, accuracy};
    }

    @Override
    public double[] evaluate(OptimizeType optimizeType, RefinementType refinementType, List<Integer> trainingTaskIDs, List<Integer> withinTaskWorkerids, String logPrefix, boolean isStrict) {
        return new double[0];  //To change body of implemented methods use File | Settings | File Templates.
    }

    @Deprecated
    private double getModelFitness(int[] modelFits) {
        return getModelFitness(modelFits, OptimizeType.F_VALUE);
    }

    @Override
    public double getModelFitness(int[] modelFits, OptimizeType type) {
        assert (modelFits.length >= 4);
        double precision = (double) modelFits[3] / (modelFits[1] + modelFits[3]);
        double recall = (double) modelFits[3] / (modelFits[2] + modelFits[3]);
        double fValue = precision * recall / (precision + recall) * 2;
        double accuracy = (double) (modelFits[0] + modelFits[3]) /
                (double) (modelFits[0] + modelFits[1] + modelFits[2] + modelFits[3]);
        switch (type) {
            case PRECISION:
                return precision;
            case RECALL:
                return recall;
            case F_VALUE:
                return fValue;
            case ACCURACY:
                return accuracy;
            case CUSTOMIZE:
                if (this.weights != null && weights.length >= 4) {
                    return weights[0] * precision + weights[1] * recall + weights[2] * fValue + weights[3] * accuracy;
                }
                //Otherwise get the average
                return (precision + recall + fValue + accuracy) / 4;
        }
        return 0;
    }

    @Override
    public List<String> getAllUserIds(List<RawData> rawLogs) {
        List<String> userIds = new ArrayList<>();
        String lastID = null;
        for (int i = 0; i < rawLogs.size(); i += MAX_WORKER_PER_TASK) {
            if (!rawLogs.get(i).workerId.equals(lastID)) {
                lastID = rawLogs.get(i).workerId;
                userIds.add(lastID);
            }
        }
        return userIds;
    }

    /**
     * This is the core method to evaluate the model fitness based on "Empirical Decreasing" refinement algorithm.
     *
     * @param interval          The key value to define the impact factor N
     * @param quality
     * @param type              The optimize type for the model (e.g. f-value or precision first)
     * @param chosenTrainingIDs The chosen task ids for the training
     * @param chosenInternalIds The picked user ids
     * @param isTraining        if it is training or evaluating
     * @return the model fitness
     */
    public double getModelFitness(double interval, int quality, OptimizeType type, List<Integer> chosenTrainingIDs,
                                  List<Integer> chosenInternalIds, boolean isTraining) {
        return 0;
//        assert(interval > 0);
//        int decreasingFactorN = chosenInternalIds.size()+1;
//        double[] weights = new double[decreasingFactorN];
//        for (int i = 0; i < weights.length; i++) {
//            weights[i] = 1.0 / ((decreasingFactorN + 1) + i * interval);
//        }
//        // TODO: extend this to multi-dimentional arrays to support multi-level quality judging
//        int[] recognitions = new int[4];
//
//        // Now start evaluation
//        for (int i = 0; i < this.rawLogs.size(); i += MAX_WORKER_PER_TASK)
//        {
//            RawData rawData = rawLogs.get(i);
//            List<Double> taskNatures = rawData.taskNatures;
//            double[][] params = getLinearRegressionParams(taskNatures, tempEfforts);
//            //coefficientsTime, coefficientsEdit
//            double initialTime = coefficientsTime[0] * audioLength + coefficientsTime[1] * wordLength + coefficientsTime[2];
//            double initialEdits = coefficientsEdit[0] * audioLength + coefficientsEdit[1] * wordLength + coefficientsEdit[2];
//
//            double bestGuessTime = initialTime * weightIni;
//            double betsGuessEdits = initialEdits * weightIni;
//            List<double> times = new List<double>();
//            List<double> edits = new List<double>();
//            for (int j = 0; j < MAX_WORKER_PER_TASK; j++)
//            {
//                if (chosenInternalIds.contains(j))//only if chosen
//                {
//                    times.Add(myLogs[i + j].TimeSpentTask);
//                    edits.Add(myLogs[i + j].Proof_change_count);
//                }
//            }
//            times.Sort();
//            edits.Sort();
//            for (int j = times.Count - 1; j >= 0; j--)
//            {
//                bestGuessTime += times[j] * weights[j];
//                betsGuessEdits += edits[j] * weights[j];
//            }
//
//            int ThresTimes = (int)bestGuessTime;
//            int ThresEdits = (int)betsGuessEdits;
//
//            if (chosenTrainingIDs.Contains(tempLog.UserID))
//            {
//                for (int j = 0; j < 5; j++)
//                {
//                    if (chosenInternalIds.Contains(j))
//                    {
//                        if (myLogs[i + j].Quality >= 0)//positive as marked
//                        {
//                            //if ((type==1&&myLogs[i + j].TimeSpentTask >= ThresTimes)
//                            //    || (type==2&&myLogs[i + j].Proof_change_count >= ThresEdits))
//
//                            if (myLogs[i + j].TimeSpentTask >= ThresTimes && myLogs[i + j].Proof_change_count >= ThresEdits)
//                            {
//                                recognitionCountsFirstFour[0]++;
//                            }
//                            else
//                                recognitionCountsFirstFour[1]++;
//                        }
//                        else
//                        {
//                            if (myLogs[i + j].TimeSpentTask >= ThresTimes && myLogs[i + j].Proof_change_count >= ThresEdits)
//                            {
//                                recognitionCountsFirstFour[2]++;
//                            }
//                            else
//                                recognitionCountsFirstFour[3]++;
//                        }
//                    }
//                }
//            }
//            else
//                continue;
//
//        }
//        double precision1 = 0;
//        double recall1 = 0;
//
//        if (quality == 1)
//        {
//            precision1 = (double)recognitionCountsFirstFour[3] / (recognitionCountsFirstFour[1] + recognitionCountsFirstFour[3]);
//            recall1 = (double)recognitionCountsFirstFour[3] / (recognitionCountsFirstFour[2] + recognitionCountsFirstFour[3]);
//        }
//        return precision1 * recall1 / (precision1 + recall1) * 2;
    }

    /**
     * A support method to get the best percentile for regression based refinement method (#2).
     */
    protected double[][] findBestIntervalParams(ArrayList[] taskNatures, double[][] uppers, double[][] lowers,
                                                List<Integer> chosenIDs, OptimizeType type, boolean isStrict) {
        // Based on each interval (in the loop), get the computed Ys, then apply linear regression and check fitness
        double bestFitRate = 0;
        double bestPercentage = 0;
        double[][] bestParams = null;
        for (double p = LINEAR_REGRESSION_BEST_PERCENT_INTERVAL; p < 1; p += LINEAR_REGRESSION_BEST_PERCENT_INTERVAL) {
            double[][] tempEfforts = new double[uppers.length][uppers[0].length];
            for (int i = 0; i < uppers.length; i++) {
                for (int j = 0; j < uppers[0].length; j++) {
                    tempEfforts[i][j] = lowers[i][j] * p + uppers[i][j] * (1 - p);
                }
            }
            double[][] params = getLinearRegressionParams(taskNatures, tempEfforts);
            int[] modelFits = evaluateModelFitCounts(chosenIDs, true, params, isStrict);
            double currentFits = getModelFitness(modelFits, type);
            if (currentFits > bestFitRate) {
                bestFitRate = currentFits;
                bestPercentage = p;
                bestParams = params;
            }
        }
        return bestParams;
    }

    protected double[] computeExpectedValueWithRegressionParams(List<Double> taskNatures, double[][] regressionParams) {
        if (taskNatures == null) {
            System.out.println("Task nature is null");
            return null;
        }
        if (regressionParams == null) {
            System.out.println("regressionParams is null");
            return null;
        }
        if (regressionParams[0] == null) {
            System.out.println("regressionParams[0] is null");
            return null;
        }
        if (taskNatures.size() - regressionParams[0].length == -1) {
            double[] results = new double[regressionParams.length];
            for (int i = 0; i < results.length; i++) {
                // constant first
                results[i] = regressionParams[i][0];
                for (int j = 0; j < taskNatures.size(); j++) {
                    results[i] += taskNatures.get(j) * regressionParams[i][j + 1];
                }
                // Make the minimal threshold as "1"
                results[i] = results[i] < 1 ? 1 : results[i];
            }
            return results;
        }
        return null;
    }

    protected int[] evaluateModelFitCounts(List<Integer> trainingTaskids, boolean isTraining,
                                           double[][] regressionParams, boolean isStrict) {
        // System.out.println("Called evaluateModelFitCounts once: ");
        // True positive, true negative, false positive, false negative
        int recognitionResultSize = 4;
        int[] recognitionResults = new int[recognitionResultSize];
        for (int i = 0; i < rawLogs.size(); i += MAX_WORKER_PER_TASK) {
            RawData rawData = rawLogs.get(i);
            List<Double> taskNatures = rawData.taskNatures;
            // All tasks in same group have the same natures, so we can get thresholds without knowing user efforts
            double[] userEffortThresholds = this.computeExpectedValueWithRegressionParams(taskNatures, regressionParams);
            int taskId = i / MAX_WORKER_PER_TASK;
            for (int j = 0; j < MAX_WORKER_PER_TASK; j++) {
                if (isTraining && !trainingTaskids.contains(taskId)) {
                    continue;
                }
                if (!isTraining && trainingTaskids.contains(taskId)) {
                    continue;
                }
                RawData data = rawLogs.get(i + j);
                // TODO: extend to multiple dimensions of quality classification by enlarging this result matrix
                if (data.quality >= 0) //actually positive
                {
                    if (!isTraining && inWorkerBlackList(data.workerId)) {
                        recognitionResults[1]++;
                    } else if (isPassing(isStrict, data.userEfforts, userEffortThresholds)) {
                        recognitionResults[0]++;
                    } else {
                        recognitionResults[1]++;
                    }
                } else { // actually negative
                    if (!isTraining && inWorkerBlackList(data.workerId)) {
                        recognitionResults[3]++;
                    } if (isPassing(isStrict, data.userEfforts, userEffortThresholds)) {
                        recognitionResults[2]++;
                    } else {
                        recognitionResults[3]++;
                    }
                }
            }
        }
        return recognitionResults;
    }

    protected boolean isPassing(boolean isStrict, List<Double> userEfforts, double[] computedMinThresholds) {
        if (isStrict) {
            return isAllPassThreshold(userEfforts, computedMinThresholds);
        }
        return isPassAnyThreshold(userEfforts, computedMinThresholds);
    }

    /**
     * Return true if all user measurements are bigger than the computed threshold minimals.
     */
    protected boolean isAllPassThreshold(List<Double> userEfforts, double[] computedMinThresholds) {
        for (int i = 0; i < userEfforts.size(); i++) {
            // Most efforts measures are in integer formats, allow some decimal diff
            if (userEfforts == null) System.out.println("userEfforts is null");
            if (computedMinThresholds == null) System.out.println("computedMinThresholds is null");
            if (userEfforts.get(i) < (int) computedMinThresholds[i]) {
                return false;
            }
        }
        return true;
    }

    /**
     * Return true if there is any one user measurement which is larger than the corresponding threshold minimal.
     */
    protected boolean isPassAnyThreshold(List<Double> userEfforts, double[] computedMinThresholds) {
        for (int i = 0; i < userEfforts.size(); i++) {
            if (userEfforts.get(i) >= (int) computedMinThresholds[i]) {
                return true;
            }
        }
        return false;
    }

    /**
     * Gets the coefficients based on linear regression support method.
     */
    @Override
    public double[][] getLinearRegressionParams(ArrayList[] taskNatures, ArrayList[] userEfforts) {
        double[][] xs = new double[taskNatures[0].size()][taskNatures.length];
        for (int i = 0; i < taskNatures.length; i++) {
            for (int j = 0; j < taskNatures[0].size(); j++) {
                xs[j][i] = castToDoubleValue(taskNatures[i].get(j));
            }
        }
        double[][] results = new double[userEfforts.length][];
        for (int i = 0; i < userEfforts.length; i++) {
            double[] ys = new double[userEfforts[i].size()];
            for (int m = 0; m < userEfforts[i].size(); m++) {
                ys[m] = castToDoubleValue(userEfforts[i].get(m));
            }
            // Use Apache Math library to get the regression
            OSL_REGRESSION_HELPER.newSampleData(ys, xs);
            results[i] = OSL_REGRESSION_HELPER.estimateRegressionParameters();
        }
        return results;
    }

    private double[][] getLinearRegressionParams(ArrayList[] taskNatures, double[][] userEfforts) {
        double[][] xs = new double[taskNatures[0].size()][taskNatures.length];
        for (int i = 0; i < taskNatures.length; i++) {
            for (int j = 0; j < taskNatures[0].size(); j++) {
                xs[j][i] = castToDoubleValue(taskNatures[i].get(j));
            }
        }
        double[][] results = new double[userEfforts.length][];
        for (int i = 0; i < userEfforts.length; i++) {
            double[] ys = new double[userEfforts[i].length];
            for (int m = 0; m < userEfforts[i].length; m++) {
                ys[m] = castToDoubleValue(userEfforts[i][m]);
            }
            // Use Apache Math library to get the regression
            OSL_REGRESSION_HELPER.newSampleData(ys, xs);
            results[i] = OSL_REGRESSION_HELPER.estimateRegressionParameters();
        }
        return results;
    }

    /**
     * A support method to cast a value to double.
     */
    private double castToDoubleValue(Object value) {
        if (value instanceof Integer) {
            return Integer.parseInt(value.toString());
        } else if (value instanceof Double) {
            return Double.parseDouble(value.toString());
        }
        return Double.MIN_NORMAL;
    }

    /**
     * A special method used by gold standard impl.
     */
    protected void saveWorkerBlackList(
            List<Integer> trainingTaskids, boolean isTraining, double[][] params, boolean isStrict) {}

    /**
     * Another special method used by gold standard impl to check if a worker is within the black list.
     */
    protected boolean inWorkerBlackList(String workerId) {
        return false;
    }
}
