/*********************************************************************
 * Software License Agreement (Modified BSD License)
 *
 * Copyright (c) 2010, Daniel Munoz
 * All rights reserved.
 *
 * Redistribution and use in source and binary forms, with or without
 * modification, are permitted provided that the following conditions
 * are met:
 *
 *   * Redistributions of source code must retain the above copyright
 *     notice, this list of conditions and the following disclaimer.
 *   * Redistributions in binary form must reproduce the above copyright
 *     notice, this list of conditions and the following disclaimer in the
 *     documentation and/or other materials provided with the distribution.
 *   * Neither the name of the copyright holders' organizations nor the
 *     names of its contributors may be used to endorse or promote products
 *     derived from this software without specific prior written permission.
 *
 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 * HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
 * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
 * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 *********************************************************************/

#include <m3n/functional_m3n.h>

using namespace std;

// --------------------------------------------------------------
/* See function definition */
// --------------------------------------------------------------
FunctionalM3N::FunctionalM3N(std::string directory,
                             std::string basename) :
  M3NModel(vector<double> ())
{
  if (loadFromFile(directory, basename) < 0)
  {
    cerr << "FunctionalM3N Could not load from file" << endl;
    throw;
  }
}

// --------------------------------------------------------------
/* See function definition */
// --------------------------------------------------------------
FunctionalM3N::FunctionalM3N(const std::vector<double>& robust_potts_params,
                             const RegressorWrapper& template_regressor) :
  M3NModel(robust_potts_params), m_template_regressor(template_regressor.paramClone())
{
}

// --------------------------------------------------------------
/* See function definition */
// --------------------------------------------------------------
FunctionalM3N::~FunctionalM3N()
{
  delete m_template_regressor;
  clear();
}

// --------------------------------------------------------------
/* See function definition */
// --------------------------------------------------------------
void FunctionalM3N::setTemplateRegressor(const RegressorWrapper& template_regressor)
{
  delete m_template_regressor;
  m_template_regressor = template_regressor.paramClone();
}

// --------------------------------------------------------------
/* See function definition */
// --------------------------------------------------------------
void FunctionalM3N::clear()
{
  M3NModel::clear();
  for (unsigned int i = 0 ; i < m_regressors.size() ; i++)
  {
    delete (m_regressors[i]).second;
  }
  m_regressors.clear();
}

// --------------------------------------------------------------
/* See function definition */
// --------------------------------------------------------------
void FunctionalM3N::doSubgradientUpdate(const std::vector<const RandomField*>& training_rfs,
                                        const bool infer_random,
                                        const double step_size,
                                        M3NLogger& logger)
{
  // ---------------------------------------------------
  // Compute gradients of structured margin loss
  RegressorWrapper* curr_regressor = m_template_regressor->paramClone();
  curr_regressor->setStackedFeatureDim(m_total_stack_feature_dim);
  structLossResiduals(training_rfs, infer_random, *curr_regressor, logger);

  // ---------------------------------------------------
  // Train and augment regressor to the model
  cout << "Starting to train regressor..." << flush << endl;
  time_t start_timer, end_timer;
  time(&start_timer);
  int regressor_ret_val = curr_regressor->train();
  time(&end_timer);
  logger.addTimingRegressors(difftime(end_timer, start_timer));

  // If successfully trained, save regressor
  // (but dont abort if failed, b/c usually means not enough samples to train)
  if (regressor_ret_val == 0)
  {
    m_regressors.push_back(pair<double, RegressorWrapper*> (step_size, curr_regressor));
  }
  else
  {
    cout << "Could not train regressor" << endl;
    delete curr_regressor;
  }
}

// --------------------------------------------------------------
/* See function definition */
// --------------------------------------------------------------
void FunctionalM3N::truncateUnit(double& value) const
{
  if (value > 1.0)
  {
    value = 1.0;
  }
  else if (value < -1.0)
  {
    value = -1.0;
  }
}

// --------------------------------------------------------------
/* See function definition */
// --------------------------------------------------------------
int FunctionalM3N::computePotential(const RandomField::Node& node,
                                    const unsigned int label,
                                    const size_t max_nbr_regressors,
                                    double& potential_val) const
{
  potential_val = 0.0;

  const vector<double>& features = node.getFeatureVals();
  size_t start_idx = m_node_stacked_feature_start_idx.find(label)->second;

  // Only use the specified amount, guaranteed not to exceed array
  size_t nbr_regressors = min(max_nbr_regressors, m_regressors.size());
  if (nbr_regressors == 0)
  {
    nbr_regressors = m_regressors.size();
  }

  // Sum the scores of each regressor
  for (unsigned int i = 0 ; i < nbr_regressors ; i++)
  {
    double curr_step_size = m_regressors[i].first;
    RegressorWrapper* curr_regressor = m_regressors[i].second;

    double curr_predicted_val = 0.0;
    if (curr_regressor->predict(features, start_idx, curr_predicted_val) < 0)
    {
      return -1;
    }

    // Restrict to range [-1,1]
    truncateUnit(curr_predicted_val);

    potential_val += (curr_step_size * curr_predicted_val);
  }

  // Exponentiated functional gradient descent
  potential_val = exp(potential_val);
  return 0;
}

// --------------------------------------------------------------
/* See function definition */
// --------------------------------------------------------------
int FunctionalM3N::computePotential(const RandomField::Clique& clique,
                                    const unsigned int clique_set_idx,
                                    const unsigned int label,
                                    const size_t max_nbr_regressors,
                                    double& potential_val) const
{
  potential_val = 0.0;

  const vector<double>& features = clique.getFeatureVals();
  const size_t start_idx =
      m_clique_set_stacked_feature_start_idx[clique_set_idx].find(label)->second;

  // Only use the specified amount, guaranteed not to exceed array
  size_t nbr_regressors = min(max_nbr_regressors, m_regressors.size());
  if (max_nbr_regressors == 0)
  {
    nbr_regressors = m_regressors.size();
  }

  // Sum the scores of each regressor
  for (unsigned int i = 0 ; i < nbr_regressors ; i++)
  {
    double curr_step_size = m_regressors[i].first;
    RegressorWrapper* curr_regressor = m_regressors[i].second;

    double curr_predicted_val = 0.0;
    if (curr_regressor->predict(features, start_idx, curr_predicted_val) < 0)
    {
      return -1;
    }

    // Restrict to range [-1,1]
    truncateUnit(curr_predicted_val);

    potential_val += (curr_step_size * curr_predicted_val);
  }

  // Exponentiated functional gradient descent
  potential_val = exp(potential_val);
  return 0;
}

// --------------------------------------------------------------
/* See function definition */
// --------------------------------------------------------------
RegressorWrapper* FunctionalM3N::instantiateRegressor(const RegressorWrapper::algorithm_t regressor_algorithm)
{
  RegressorWrapper* created_regressor = NULL;
  if (regressor_algorithm == RegressorWrapper::LINEAR_REGRESSION)
  {
    created_regressor = new LinearRegression();
  }
  else if (regressor_algorithm == RegressorWrapper::OPENCV_RTREE)
  {
    //cout << "Add OpenCV to compilation and uncomment line in m3n/regressors/regressor_includes.h"
    //    << endl;
    //abort();
    created_regressor = new OCVRTreeWrapper();
  }
  else
  {
    cerr << "Invalid regressor algorithm: " << regressor_algorithm << endl;
  }
  return created_regressor;
}

// --------------------------------------------------------------
/* See function definition */
// --------------------------------------------------------------
int FunctionalM3N::doLoadFromFile(ifstream& infile,
                                  std::string directory,
                                  std::string basename)
{
  // -------------------------------------------
  // File format:
  // T = <regressors_.size()>
  // regressors_[0] step-size
  // regressors_[0] algorithm_type
  // ...
  // regressors_[T-1] step-size
  // regressors_[T-1] algorithm_type

  // read regressor information
  unsigned int nbr_regressors = 0;
  infile >> nbr_regressors;
  m_regressors.assign(nbr_regressors, pair<double, RegressorWrapper*> (0.0, NULL));
  for (unsigned int i = 0 ; i < nbr_regressors ; i++)
  {
    // --------------
    // read step-size
    infile >> m_regressors[i].first;

    // --------------
    // read algorithm type and instantiate appropriate regressor
    int algorithm_type = 0;
    infile >> algorithm_type;
    RegressorWrapper::algorithm_t regressor_type =
        static_cast<RegressorWrapper::algorithm_t> (algorithm_type);
    RegressorWrapper* curr_regressor = instantiateRegressor(regressor_type);
    if (curr_regressor == NULL)
    {
      clear();
      return -1;
    }

    // --------------
    // load regressor from file
    stringstream regressor_basename;
    regressor_basename << basename << "_regressor" << i;
    if (curr_regressor->loadFromFile(directory, regressor_basename.str()) < 0)
    {
      clear();
      return -1;
    }

    m_regressors[i].second = curr_regressor;
  }

  // Use last regressor type
  m_template_regressor = m_regressors[nbr_regressors - 1].second->paramClone();
  return 0;
}

// --------------------------------------------------------------
/* See function definition */
// --------------------------------------------------------------
int FunctionalM3N::doSaveToFile(ofstream& outfile,
                                std::string directory,
                                std::string basename)
{
  // -------------------------------------------
  // File format:
  // T = <regressors_.size()>
  // regressors_[0] step-size
  // regressors_[0] algorithm_type
  // ...
  // regressors_[T-1] step-size
  // regressors_[T-1] algorithm_type

  // ------------------------
  // regressors info
  outfile << m_regressors.size() << endl;
  for (unsigned int i = 0 ; i < m_regressors.size() ; i++)
  {
    // step-size
    outfile << m_regressors[i].first << endl;

    // algorithm type
    outfile << (m_regressors[i].second)->getAlgorithmType() << endl;

    // create regressor basename
    stringstream regressor_basename;
    regressor_basename << basename << "_regressor" << i;

    if ((m_regressors[i].second)->saveToFile(directory, regressor_basename.str()) < 0)
    {
      return -1;
    }
  }
  return 0;
}
