/*=====================================================================*
 *                   Copyright (C) 2011 Paul Mineiro                   *
 * All rights reserved.                                                *
 *                                                                     *
 * Redistribution and use in source and binary forms, with             *
 * or without modification, are permitted provided that the            *
 * following conditions are met:                                       *
 *                                                                     *
 *     * Redistributions of source code must retain the                *
 *     above copyright notice, this list of conditions and             *
 *     the following disclaimer.                                       *
 *                                                                     *
 *     * Redistributions in binary form must reproduce the             *
 *     above copyright notice, this list of conditions and             *
 *     the following disclaimer in the documentation and/or            *
 *     other materials provided with the distribution.                 *
 *                                                                     *
 *     * Neither the name of Paul Mineiro nor the names                *
 *     of other contributors may be used to endorse or promote         *
 *     products derived from this software without specific            *
 *     prior written permission.                                       *
 *                                                                     *
 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND              *
 * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,         *
 * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES               *
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE             *
 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER               *
 * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,                 *
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES            *
 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE           *
 * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR                *
 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF          *
 * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT           *
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY              *
 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE             *
 * POSSIBILITY OF SUCH DAMAGE.                                         *
 *                                                                     *
 * Contact: Paul Mineiro <paul@mineiro.com>                            *
 *=====================================================================*/

#include "commandline.hh"
#include "learningrate.hh"
#include "loss.hh"
#include "mode.hh"
#include "parse.hh"
#include "regressor.hh"
#include "regressorworker.hh"

#include <boost/format.hpp>
#include <boost/optional.hpp>
#include <boost/none.hpp>
#include <boost/program_options/options_description.hpp>
#include <boost/program_options/parsers.hpp>
#include <boost/program_options/positional_options.hpp>
#include <boost/program_options/variables_map.hpp>
#include <exception>
#include <fstream>
#include <iostream>
#include <memory>
#include <stdexcept>
#include <string>

namespace
{
using namespace flassol;

typedef boost::optional<RegressionExample> MaybeRegressionExample;

struct Stats
  {
    boost::optional<uint64_t>     bad_lineno;
    uint64_t                      badlines;
    uint64_t                      goodlines;
    float                         cur_predict;
    boost::optional<float>        cur_label;
    uint64_t                      cur_features;
    uint64_t                      example_count;
    double                        weighted_example_count;
    double                        loss;

    explicit
    Stats () : bad_lineno (boost::none),
               badlines (0),
               goodlines (0),
               cur_predict (0),
               cur_label (boost::none),
               cur_features (0),
               example_count (0),
               weighted_example_count (0),
               loss (0)
      {
      }

    Stats&
    operator+= (const Stats& s)
      {
        bad_lineno = s.bad_lineno;
        badlines += s.badlines;
        goodlines += s.goodlines;
        cur_predict = s.cur_predict;
        cur_label = s.cur_label;
        cur_features = s.cur_features;
        example_count += s.example_count;
        weighted_example_count += s.weighted_example_count;
        loss += s.loss;

        return *this;
      }

    void
    reset ()
      {
        bad_lineno = boost::none;
        badlines = 0;
        goodlines = 0;
        cur_predict = 0;
        cur_label = boost::none;
        cur_features = 0;
        example_count = 0;
        weighted_example_count = 0;
        loss = 0;
      }
  };

inline float
square (float x)
{
  return x * x;
}

struct BadParse
{
};

float
parse_float (const char* s)
{
  char* endptr;
  float rv = strtof (s, &endptr);

  if (*endptr != '\0')
    {
      throw BadParse ();
    }

  return rv;
}

MaybeRegressionExample
from_general_example (const boost::optional<GeneralExample>& e)
{
  if (e)
    {
      try
        {
          RegressionExample r;

          if (e->label)
            {
              r.label = parse_float (e->label);
            }

          r.importance = (e->importance) ? parse_float (e->importance) : 1;

          r.tag = e->tag;
          r.f = e->f;

          return r;
        }
      catch (BadParse&)
        {
        }
    }

  return boost::none;
}

void
do_report (std::ostream&                  report,
           const Stats&                   since_last,
           const Stats&                   cumulative)
{
  report << boost::format ("%-9s %-9s %9llu %9.0f %9s %9f %8llu")
    % ((cumulative.example_count > 0)
         ? (boost::format ("%-9.6f")
              % (cumulative.loss / cumulative.weighted_example_count)
           ).str ()
         : "undefined"
      )
    % ((since_last.example_count > 0)
         ? (boost::format ("%-9.6f")
              % (since_last.loss / since_last.weighted_example_count)
           ).str ()
         : "undefined"
      )
    % cumulative.example_count
    % cumulative.weighted_example_count
    % (cumulative.cur_label
          ? (boost::format ("%9f") % *cumulative.cur_label).str ()
          : "unknown")
    % cumulative.cur_predict
    % cumulative.cur_features
  << std::endl;

  if (since_last.bad_lineno)
    {
      report << since_last.badlines
             << " bad lines encountered since last, latest at offset " 
             << *since_last.bad_lineno 
             << std::endl;
    }
}

int
input_loop (std::istream&                       in,
            boost::optional<std::ostream&>      report,
            boost::optional<std::ofstream&>     predict,
            bool                                test_only,
            Regressor&                          r,
            std::auto_ptr<Loss>&                loss,
            std::auto_ptr<LearningRate>&        eta,
            float                               lambda,
            bool                                adaptive,
            WorkerPool&                         worker_pool)
{
  using std::endl;

  enum StandardParse::hash_option_value ho = 
    static_cast<StandardParse::hash_option_value> 
      (r.get_header ().hash_option);
  StandardParse parse = StandardParse ().hash_option (ho);
  Stats cumulative;
  Stats since_last;

  if (report)
    {
       *report <<
"cumul     since       example  weighted   current   current  current"
           << endl <<
"loss      last        counter   counter     label   predict features"
           << endl;
    }

  for (uint64_t lineno = 0, output_number = 0; in.good (); ++lineno)
    {
      char buf[1024768];
      boost::optional<GeneralExample> example = parse (in, buf, sizeof (buf));

      if (in.good ())
        {
          MaybeRegressionExample reg (from_general_example (example));

          ++since_last.example_count;

          if (! reg)
            {
              since_last.bad_lineno = lineno;
              ++since_last.badlines;
            }
          else
            {
              ++since_last.goodlines;

              std::vector<Regressor::EstimateResult> result (1);
              worker_pool.estimate (r, reg->f.get (), result);

              since_last.cur_predict = result[0].p;
              since_last.cur_label = reg->label;
              since_last.cur_features = result[0].n;

              if (predict)
                {
                  *predict << result[0].p;

                  if (reg->tag)
                    {
                      *predict << "\t" << reg->tag;
                    }

                  *predict << endl;
                }

              if (reg->label)
                {
                  since_last.loss += 
                    reg->importance * loss->loss (result[0].p, *reg->label);
                  since_last.weighted_example_count += reg->importance;

                  if (result[0].xnorm > 0 && ! test_only)
                    {
                      double t = cumulative.weighted_example_count +
                                 since_last.weighted_example_count;
                      float thiseta = eta->eta (t);

                      float gsq = 
                        square (  loss->dldp (result[0].p, *reg->label)
                                * reg->importance);
                      
                      if (gsq > 0)
                        {
                          std::vector<Regressor::UpdateInfo> info (1);

                          info[0].gsq = gsq;
                          info[0].adaptive = adaptive;

                          if (adaptive)
                            {
                              worker_pool.adaptive_norm (r,
                                                         reg->f.get (),
                                                         info);
                            }
                      
                          // TODO: implement decay

                          std::pair<float, float> update = 
                            loss->invariant_update (result[0].p,
                                                    result[0].wdotx,
                                                    result[0].adotb,
                                                    *reg->label, 
                                                    reg->importance * thiseta,
                                                    result[0].anormplusbnorm,
                                                    result[0].xnorm,
                                                    adaptive,
                                                    info[0].adaptivexnorm,
                                                    lambda);

                          if (fabsf (update.first) > 0)
                            {
                              info[0].sh = update.first;
                              info[0].decay = update.second;

                              worker_pool.update (r, reg->f.get (), info);
                            }
                        }
                    }
                }
            }

          if (since_last.example_count > (1ULL << output_number))
            {
              cumulative += since_last;

              if (report) 
                {
                  do_report (*report, since_last, cumulative);
                }

              since_last.reset ();

              ++output_number;
            }
        }
    }

  cumulative += since_last;

  if (report)
    {
      do_report (*report, since_last, cumulative);
      loss->report (*report, Loss::INFO);
    }

  return 0;
}

int
common_main (int argc,
             char* argv[],
             boost::program_options::options_description& desc,
             boost::program_options::options_description& all,
             const boost::program_options::positional_options_description& pd)
{
  using namespace boost::program_options;
  using std::cerr;
  using std::endl;
  using std::exception;
  using std::string;
  using std::vector;

  desc.add_options ()
    ("help", "produce help message")
    ("adaptive", value<bool> ()->default_value (true), "use adaptive learning rate")
    ("eta", value<string> ()->default_value ("powerlaw (1, 1, 0)"), "learning rate")
    ("lambda", value<float> ()->default_value (0.1f), "dyadic l2 regularizer")
    ("model", value<string>()->default_value ("model"), "model file")
    ("num_threads", value<unsigned int>()->default_value (1), "number of threads")
    ("num_weight_bits", value<unsigned int>()->default_value (16), "log_2 number of weights")
    ("predict", value<string>(), "(optional) file to output predictions")
    ("ngram", value<vector<string> > (), "ngram spec")
    ("quadratic", value<vector<string> > (), "quadratic interactions spec")
    ("dotproduct", value<vector<string> > (), "dotproduct interactions spec")
    ("quiet", "suppress output")
    ("test", "only test: do not learn")
    ;
  all.add (desc);

  variables_map vm;

  try
    { 
      command_line_parser parser (argc, argv);
      parser.options (all);
      parser.positional (pd);
      store (parser.run (), vm);
      notify (vm);
    }
  catch (exception& e)
    { 
      cerr << "ERROR: " << e.what () << endl;
      cerr << desc;
      return 1;
    }

  if (vm.count ("help"))
    { 
      if (vm["mode"].as<string> () == "iwr")
        {
          cerr << "importance weighted regression" << endl;
          cerr << "  estimate a real-valued quantity which (hopefully) minimizes " << endl;
          cerr << "  the conditional expected loss given the input" << endl << endl;
        }
      else 
        {
          cerr << "importance weighted (binary) classification" << endl;
          cerr << "  estimate a class label which (hopefully) minimizes " << endl;
          cerr << "  the conditional expected weighted loss given the input" << endl << endl;
        }
      cerr << desc;
      return 1;
    }

  try
    {
      std::auto_ptr<Loss> tmp = get_loss (vm["loss"].as<string> ());
    }
  catch (std::invalid_argument& ia)
    {
      cerr << "ERROR: invalid loss specification: '" 
           << vm["loss"].as<string> ()
           << "': " 
           << ia.what () 
           << endl;
      return 1;
    }

  if (vm["num_threads"].as<unsigned int> () == 0)
    {
      cerr << "ERROR: num_threads underflow (= "
           << vm["num_threads"].as<unsigned int> ()
           << " )" << endl;
      return 1;
    }

  if (vm["num_weight_bits"].as<unsigned int> () > 63)
    {
      cerr << "ERROR: num_weight_bits overflow (= " 
           << vm["num_weight_bits"].as<unsigned int> ()
           << " )" << endl;
      return 1;
    }

  std::vector<std::pair<uint8_t, Regressor::NGramSpec> > ngram;

  if (vm.count ("ngram"))
    {
      vector<string> ns = vm["ngram"].as<vector<string> > ();

      for (vector<string>::iterator n = ns.begin (); n != ns.end (); ++n)
        {
          boost::optional<std::pair<uint8_t, Regressor::NGramSpec> > 
            parsed_n = parse_ngram (*n);

          if (! parsed_n)
            {
              return 1;
            }

          ngram.push_back (*parsed_n);
        }
    }

  std::vector<std::pair<uint8_t, uint8_t> > quadratic;

  if (vm.count ("quadratic"))
    {
      vector<string> qs = vm["quadratic"].as<vector<string> > ();

      for (vector<string>::iterator q = qs.begin (); q != qs.end (); ++q)
        {
          boost::optional<std::pair<uint8_t, uint8_t> > 
            parsed_q = parse_quadratic (*q);

          if (! parsed_q)
            {
              return 1;
            }

          quadratic.push_back (*parsed_q);
        }
    }

  std::vector<std::pair<uint8_t, uint8_t> > dotproduct;

  if (vm.count ("dotproduct"))
    {
      vector<string> qs = vm["dotproduct"].as<vector<string> > ();

      for (vector<string>::iterator q = qs.begin (); q != qs.end (); ++q)
        {
          boost::optional<std::pair<uint8_t, uint8_t> > 
            parsed_q = parse_dotproduct (*q);

          if (! parsed_q)
            {
              return 1;
            }

          dotproduct.push_back (*parsed_q);
        }
    }

  std::auto_ptr<LearningRate> eta;
  
  try
    {
      std::auto_ptr<LearningRate> tmp = get_learning_rate (vm["eta"].as<string> ());
      eta = tmp;
    }
  catch (std::invalid_argument& ia)
    {
      cerr << "ERROR: invalid learning rate specification '" 
           << vm["eta"].as<string> ()
           << "': " 
           << ia.what () 
           << endl;
      return 1;
    }

  if (vm["lambda"].as<float> () < 0.0f)
    {
      cerr << "ERROR: invalid dyadic regularizer specification "
           << vm["lambda"].as<float> ()
           << endl;
      return 1;
    }

  Regressor r = Regressor::Open (vm["model"].as<string> ())
                  .num_weights (1 << vm["num_weight_bits"].as<unsigned int> ())
                  .loss (vm["loss"].as<string> ())
                  .read_only (vm.count ("test"))
                  .add_quadratic (quadratic)
                  .add_dotproduct (dotproduct)
                  .add_ngram (ngram)
                ;

  if (r.get_header ().num_classes != 2)
    {
      cerr << "ERROR: multiclass model detected: num_classes = " 
           << r.get_header ().num_classes
           << std::endl;

      return 1;
    }


  std::auto_ptr<Loss> loss;
  
  try
    {
      std::auto_ptr<Loss> tmp = get_loss (r.get_header ().loss);
      loss = tmp;
    }
  catch (std::invalid_argument& ia)
    {
      cerr << "ERROR: invalid loss specification: '" 
           << r.get_header ().loss 
           << "': " 
           << ia.what () 
           << endl;
      return 1;
    }

  std::ofstream predict;

  if (vm.count ("predict"))
    {
      predict.open (vm["predict"].as<string> ().c_str ());

      if (! predict.good ())
        {
          cerr << "ERROR: can't open prediction file " 
               << vm["predict"].as<string> ()
               << ": " << strerror (errno)
               << endl;

          return 1;
        }
    }

  if (vm.count ("quiet") == 0)
    {
      cerr << "adaptive = " << (vm["adaptive"].as<bool> () ? 1 : 0) << endl;
      cerr << "eta = " << vm["eta"].as<string> () << endl;
      cerr << "lambda = " << vm["lambda"].as<float> () << endl;
      cerr << "num_threads = " << vm["num_threads"].as<unsigned int> () << endl;
      if (vm.count ("test"))
        {
          cerr << "test only" << endl;
        }

      if (vm.count ("predict"))
        {
          cerr << "write predictions to " 
               << vm["predict"].as<string> ()
               << endl;
        }

      cerr << r.get_header ();
    }

  WorkerPool worker_pool (vm["num_threads"].as<unsigned int> () - 1);

  return input_loop (std::cin, 
                     (vm.count ("quiet") 
                        ? boost::optional<std::ostream&> (boost::none)
                        : boost::optional<std::ostream&> (cerr)),
                     (vm.count ("predict")
                        ? boost::optional<std::ofstream&> (predict)
                        : boost::optional<std::ofstream&> (boost::none)),
                     vm.count ("test"),
                     r, 
                     loss,
                     eta,
                     vm["lambda"].as<float> (),
                     vm["adaptive"].as<bool> (),
                     worker_pool);
}

}

namespace flassol
{

int
iwr_main (int   argc,
          char* argv[])
  {
    using namespace boost::program_options;
    using std::string;
    using std::vector;

    options_description desc ("Allowed options");
    options_description all ("");
    positional_options_description pd;

    desc.add_options ()
      ("loss", value<string>()->default_value ("squaredclipped"), get_registered_losses ().c_str ());
    all.add_options ()
      ("mode", value<string> ()->default_value ("iwr"), modes_string.c_str ());

    pd.add ("mode", 1);
    pd.add ("model", -1);

    return common_main (argc, argv, desc, all, pd);
  }

int
iwc_main (int   argc,
          char* argv[])
  {
    using namespace boost::program_options;
    using std::string;
    using std::vector;

    options_description desc ("Allowed options");
    options_description all ("");
    positional_options_description pd;

    desc.add_options ()
      ("loss", value<string> ()->default_value ("hinge"), get_registered_losses ().c_str ());
    all.add_options ()
      ("mode", value<string> ()->default_value ("iwc"), modes_string.c_str ());

    pd.add ("mode", 1);
    pd.add ("model", -1);

    return common_main (argc, argv, desc, all, pd);
  }

}
