#ifndef __REGRESSOR_WRAPPER_H__
#define __REGRESSOR_WRAPPER_H__
/*********************************************************************
 * Software License Agreement (Modified BSD License)
 *
 * Copyright (c) 2009-2010, Willow Garage, Daniel Munoz
 * All rights reserved.
 *
 * Redistribution and use in source and binary forms, with or without
 * modification, are permitted provided that the following conditions
 * are met:
 *
 *   * Redistributions of source code must retain the above copyright
 *     notice, this list of conditions and the following disclaimer.
 *   * Redistributions in binary form must reproduce the above copyright
 *     notice, this list of conditions and the following disclaimer in the
 *     documentation and/or other materials provided with the distribution.
 *   * Neither the name of the copyright holders' organizations nor the
 *     names of its contributors may be used to endorse or promote products
 *     derived from this software without specific prior written permission.
 *
 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 * HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
 * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
 * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 *********************************************************************/

#include <string>
#include <vector>
#include <map>
#include <set>
#include <iostream>
#include <cmath>
#include <algorithm>

// --------------------------------------------------------------
/*!
 * \file regressor_wrapper.h
 *
 * \brief Generic wrapper around various regressor/classification algorithms.
 */
// --------------------------------------------------------------

// --------------------------------------------------------------
/*!
 * \brief Generic wrapper around various regressor/classification algorithms.
 *
 * Multi-class regression/classification can be handled by creating a "stacked"
 * sparse feature vector representation. \n
 * \n
 * Example: 3-class (A,B,C) 1-vs-all classification.
 * Assume the feature x has dimension 1 with ground truth label B.
 * Instead of training 3 classifiers f_A, f_B, f_C, where
 * the training samples for each classifier are \n
 *   (x,-1) -> f_A \n
 *   (x,+1) -> f_B \n
 *   (x,-1) -> f_C \n
 * we will use 1 classifier g with "stacked" feature vectors
 * that encapsulates this idea.  In this example, we create a
 * 3-dimensional vector (dim(x)=1) for each label, i.e.
 * non-zero in the dimension of the desired label.  So in the
 * above example, we train g with the features \n
 *   ([x,0,0], -1) \n
 *   ([0,x,0], +1) \n
 *   ([0,0,x], -1) \n
 * When predicting the class of a new feature y, we would evaluate \n
 *   g([y,0,0])  (score for class A) \n
 *   g([0,y,0])  (score for class B) \n
 *   g([0,0,y])  (score for class C) \n
 * and pick the class that had the biggest value. \n
 * \n
 * The stacked feature dimension is defined when instantiating
 * the inherited class. \n
 * This stacked feature representation is OPTIONAL.  Define stacked_feature_dim_
 * and the parameters in addTrainingSample() and predict() accordingly.
 */
// --------------------------------------------------------------
class RegressorWrapper
{
  public:
    /** \brief The minimum |target| value a sample needs in order to use for regression */
    static const double SMALLEST_ABS_TARGET;

    typedef enum algorithm
    {
      // Default flag
      UNKNOWN = 0,
      // OpenCV's regression tree
      OPENCV_RTREE = 1,
      // Linear regression
      LINEAR_REGRESSION = 3,
    } algorithm_t;

    // --------------------------------------------------------------
    /**
     * \brief Instantiate with the specified algorithm type
     */
    // --------------------------------------------------------------
    RegressorWrapper(RegressorWrapper::algorithm_t algorithm_type);

    // --------------------------------------------------------------
    /**
     * \brief Creates a new instance of this RegressorWrapper with the
     *        same training arguments, but the actual model information
     *        is not copied
     *
     * The new instance is untrained
     */
    // --------------------------------------------------------------
    virtual RegressorWrapper* paramClone() const = 0;

    // --------------------------------------------------------------
    /**
     * \brief Clears the state of the regressor so it can be retrained
     *
     * \warning The parameters for training are never reset
     */
    // --------------------------------------------------------------
    void clear();

    // --------------------------------------------------------------
    /**
     * \brief Saves the trained regressor to file format
     *
     * \param directory The path to the directory with the regressor and related files
     * \param basename The basename of the file to save the regressor and related files
     *
     * \return 0 on success, otherwise negative value on error
     */
    // --------------------------------------------------------------
    virtual int saveToFile(const std::string directory,
                           const std::string basename) = 0;

    // --------------------------------------------------------------
    /**
     * \brief Loads a previously saved, trained regressor from file
     *
     * \param directory The path to the directory with the regressor and related files
     * \param basename The basename of the file of the saved regressor and related files
     *
     * \return 0 on success, otherwise negative value on error
     */
    // --------------------------------------------------------------
    virtual int loadFromFile(const std::string directory,
                             const std::string basename) = 0;

    // --------------------------------------------------------------
    /**
     * \brief Adds a (feature,start_idx) -> target data sample to train on
     *
     * This function can be called multiple times with the same
     * (feature_vals, start_idx) values and differing target values;
     * on subsequent calls, the new target value will be added to the
     * target value from the previous call.
     *
     * \param feature_vals Array of features to train on
     * \param start_idx The dimension to place the features in the "stacked"
     *                  feature representation (should be 0 if NOT using
     *                  "stacked" representation)
     * \param target The target response of the features
     *
     * \return 0 on success, otherwise negative value on error
     */
    // --------------------------------------------------------------
    int addTrainingSample(const std::vector<double>& feature_vals,
                          const unsigned int start_idx,
                          const double target);

    // --------------------------------------------------------------
    /**
     * \brief Trains the regressor using all the added training samples
     *
     * \return 0 on succes, otherwise negative value on error.  If error,
     *         all previously added training samples are lost and must be
     *         re-added.
     */
    // --------------------------------------------------------------
    int train();

    // --------------------------------------------------------------
    /**
     * \brief Predicts the value of the feature with this trained regressor/classifier
     *
     * \param feature_vals Array of features to train on
     * \param start_idx The dimension to place the features in the "stacked"
     *                  feature representation (should be 0 if NOT using
     *                  "stacked" representation)
     * \param predicted_val The predicted/response value
     *
     * \warning No check is done to ensure start_idx+length <= stacked_featuer_dim_
     *
     * \return 0 on success, otherwise negative value on error
     */
    // --------------------------------------------------------------
    int predict(const std::vector<double>& feature_vals,
                const unsigned int start_idx,
                double& predicted_val) const;

    // --------------------------------------------------------------
    /**
     * \brief Copies the (features, start index)-->target value pairs from
     *        the given RegressorWrapper into this one
     *
     * \return 0 on success, otherwise negative value on error
     */
    // --------------------------------------------------------------
    int copyTrainingSamples(const RegressorWrapper& source);

    // --------------------------------------------------------------
    /**
     * \brief Sets the total feature length to expect
     */
    // --------------------------------------------------------------
    inline void setStackedFeatureDim(unsigned int stacked_feature_dim)
    {
      m_stacked_feature_dim = stacked_feature_dim;
    }

    // --------------------------------------------------------------
    /**
     * \brief Returns the type of regressor algorithm
     */
    // --------------------------------------------------------------
    RegressorWrapper::algorithm_t getAlgorithmType() const
    {
      return m_algorithm_type;
    }

    // --------------------------------------------------------------
    /**
     * \brief Returns the map of (feature,target) pairings
     *
     * Container format: feature_vals -> start_idx -> target value
     */
    // --------------------------------------------------------------
    const std::map<const std::vector<double>*, std::map<unsigned int, double> >& getTargets() const
    {
      return m_interm_feature_targets;
    }

  protected:
    // --------------------------------------------------------------
    /**
     * \see clear
     */
    // --------------------------------------------------------------
    virtual void doClear() = 0;

    // --------------------------------------------------------------
    /**
     * \see train
     */
    // --------------------------------------------------------------
    virtual int doTrain(const std::vector<const std::vector<double>*>& interm_feature_vals,
                        const std::vector<unsigned int>& interm_start_idx,
                        const std::vector<double>& interm_target) = 0;

    // --------------------------------------------------------------
    /**
     * \see predict
     */
    // --------------------------------------------------------------
    virtual int doPredict(const std::vector<double>& feature_vals,
                          const unsigned int start_idx,
                          double& predicted_val) const = 0;

    RegressorWrapper::algorithm_t m_algorithm_type;
    unsigned int m_stacked_feature_dim;
    bool m_trained;

  private:
    // Intermediate containers used by addTrainingSample
    // feature_vals -> start_idx -> target value
    std::map<const std::vector<double>*, std::map<unsigned int, double> > m_interm_feature_targets;

    // Stores the samples in the order they are added
    std::vector<const std::vector<double>*> m_ordered_samples;
    // To enable easy lookup
    std::set<const std::vector<double>*> m_seen_samples;
};

#endif
