/*
 * accelerated_projected_gradient.h
 *
 *  Created on: Jun 4, 2011
 *      Author: tqlong
 */

#ifndef ACCELERATED_PROJECTED_GRADIENT_H_
#define ACCELERATED_PROJECTED_GRADIENT_H_

#ifndef __DEBUG_INFO__
#define __DEBUG_INFO__ __FILE__<<":"<<__LINE__
#endif

/** Optimize a function on a set, using nesterov's accelerated projected gradient method */
template<typename Function, typename Set>
  class AcceleratedProjectedGradient
  {
  public:
    typedef Function func_type;
    typedef Set set_type;
    //typedef LineSearch<Function> linesearch_type;
    typedef typename Function::range_type range_type;
    typedef typename Function::domain_type domain_type;
  public:
    AcceleratedProjectedGradient(func_type &f, set_type& set,
        const domain_type &x0, domain_type &x) :
      f_(f), set_(set), x0_(x0), y_(&x), step_(1.0)
    {
      if (!f.has_value_)
        {
          std::cerr << "Function needs to produce value(x) at "
              << __DEBUG_INFO__ << std::endl;
          exit(1);
        }
      if (!f.has_gradient_)
        {
          std::cerr << "Function needs to produce gradient(x) at "
              << __DEBUG_INFO__ << std::endl;
          exit(1);
        }
      if (!set.has_projection_)
        {
          std::cerr << "Set needs to produce projection(x) at "
              << __DEBUG_INFO__ << std::endl;
          exit(1);
        }
    }

    bool
    search()
    {
      q_ = *y_ = x0_ = set_.projection(x0_);
      q_.zeros();
      t_ = 1.0;
      iter_ = 0;
      for (iter_ = 0; iter_ < 1000; iter_++)
        {
          x_ = *y_ - 1.0 / t_ * (*y_ - x0_ + q_);
          f_.value_gradient(x_, fx_, grad_);

          domain_type ynew;
          bool ok = line_search(ynew);
          double step_norm = arma::norm(ynew - *y_, 2);

          *y_ = ynew;
          q_ = q_ + t_ * (x_ - ynew);
          t_ = 0.5 * (1 + sqrt(1 + 4 * t_ * t_));

          f_.value_gradient(*y_, fx_, grad_);
          double grad_norm = arma::norm(grad_, 2);
          std::cout << "iter=" << iter_ << " fx=" << fx_ << " |grad|="
              << grad_norm << " step=" << step_ << " |step|=" << step_norm << std::endl;

          if (!ok || grad_norm < 1e-5 || step_norm < 1e-5) break; // line search failed
        }
      return true;
    }

    /** line search from x_, using direction -grad_ */
    bool
    line_search(domain_type &xnew)
    {
      step_ *= 2.0;
      range_type f0 = fx_;
      //double rate = arma::dot(grad_, grad_);
      //std::cout << "rate=" << rate << " grad=\n" << grad_ << std::endl;
      range_type minf = f0;
      double min_step = 0;
      range_type need;
      do
        {
          step_ *= 0.5;
          xnew = x_ - step_ * grad_;
          xnew = set_.projection(xnew);
          fx_ = f_.value(xnew);
          need = f0 + arma::dot(grad_, xnew - x_) + 0.5 / step_ * arma::dot(
              xnew - x_, xnew - x_);
          //std::cout << "ls step=" << step_ << " fx=" << fx_ << " need=" << need << " xnew=\n" << xnew << std::endl;
          if (fx_ < minf)
            {
              minf = fx_;
              min_step = step_;
            }
        }
      while (fx_ > need && step_ >= 1e-10);
      if (step_ < 1e-10) // step is too small, stop line search
        {
          if (min_step > 0)
            {
              step_ = min_step;
              fx_ = minf;
              xnew = x_ - step_ * grad_;
              xnew = set_.projection(xnew);
              return true;
            }
          else
            {
              step_ = 0;
              xnew = x_;
              fx_ = f0;
              return false; // failure
            }
        }
      else
        {
          return true;
        }
    }

    range_type
    fx() const
    {
      return fx_;
    }

    const domain_type&
    grad() const
    {
      return grad_;
    }
  protected:
    func_type &f_;
    set_type &set_;
    domain_type x0_;
    range_type fx_;
    domain_type grad_;
    domain_type x_, *y_, q_;
    double step_, t_;
    int iter_;
  };

#endif /* ACCELERATED_PROJECTED_GRADIENT_H_ */
