#include "block.hh"

typedef func_poisson_t<network_stat_t> net_score_t;
typedef boost::shared_ptr<net_score_t> net_score_ptr;


double tot_score(const network_t& G, std::vector<net_score_ptr>& net_score_vec)
{
  double tot_score = 0.;

  for (size_t k = 0; k < net_score_vec.size(); ++k)
  {
    const net_score_t& obj = *(net_score_vec[k].get());
    tot_score += obj.score();
  }

  return tot_score;
}

struct prog_args_t
{
  explicit prog_args_t() :
      depth(0), network_file(""),            //
      init_color_file(""), output("output"), //
      iter(100), repeat(1), tol(1e-2),       // # iterations and tol
      a0(.1), b0(.1), eb(true)               // hyperparameter
  {
  }
  int depth;
  std::string network_file;
  std::string init_color_file;
  std::string output;
  int iter;
  int repeat;
  double tol;
  double a0;
  double b0;
  bool eb;
};

void print_help(const char* prog, const prog_args_t& args)
{
  std::cerr << prog << " -d [depth] -n [network] -o [output]" << std::endl;	    
  std::cerr << "      -r [repeat] -c [color] -i [iter] -t [tol]" << std::endl;
  std::cerr << "      -a0 [a0] -b0 [b0]" << std::endl;	    
  std::cerr << std::endl;
  std::cerr << " depth   = depth of the model tree" << std::endl;
  std::cerr << " network = undirected network data file" << std::endl;
  std::cerr << "     e.g., u v w if u ~ v with the weight w" << std::endl;
  std::cerr << "           u, v must be positive integers" << std::endl;
  std::cerr << " output  = output file header (default: " << args.output << ")"
      << std::endl;
  std::cerr << " color   = initial vertex color assignment" << std::endl;
  std::cerr << " repeat  = number of repeated fitting" << std::endl;
  std::cerr << " iter    = number of iterations (default: " << args.iter << ")"
      << std::endl;
  std::cerr << " tol     = tolerance to check convergence (default: "
      << args.tol << ")" << std::endl;
  std::cerr << " a0      = hyper-parameter for Gamma(lambda|a0,b0)"
      << " default: " << args.a0 << std::endl;
  std::cerr << " b0      = hyper-parameter for Gamma(lambda|a0,b0)"
      << " default: " << args.b0 << std::endl;
  std::cerr << std::endl;
}

bool parse_args(const int argc, const char* argv[], prog_args_t& args_out)
{
  for (int j = 1; j < argc; ++j)
  {
    std::string curr = argv[j];
    if (curr == "-d" && ++j < argc)
    {
      args_out.depth = boost::lexical_cast<int>(argv[j]);
    }
    else if (curr == "-n" && ++j < argc)
    {
      args_out.network_file = argv[j];
    }
    else if (curr == "-o" && ++j < argc)
    {
      args_out.output = argv[j];
    }
    else if (curr == "-r" && ++j < argc)
    {
      args_out.repeat = boost::lexical_cast<int>(argv[j]);
    }
    else if (curr == "-i" && ++j < argc)
    {
      args_out.iter = boost::lexical_cast<int>(argv[j]);
    }
    else if (curr == "-c" && ++j < argc)
    {
      args_out.init_color_file = argv[j];
    }
    else if (curr == "-a0" && ++j < argc)
    {
      args_out.a0 = boost::lexical_cast<double>(argv[j]);
      args_out.eb = false;
    }
    else if (curr == "-b0" && ++j < argc)
    {
      args_out.b0 = boost::lexical_cast<double>(argv[j]);
      args_out.eb = false;
    }
  }
  if (args_out.depth < 1)
    return false;
  if (args_out.network_file.size() < 1)
    return false;

  TLOG( "tree depth = " << args_out.depth << ", network file = " << args_out.network_file
	<< ", color file = " << args_out.init_color_file);

  return true;
}

bool read_init_color(const char* cfile, network_t& G)
{
  boost::property_map<network_t, boost::vertex_color_t>::type color =
      boost::get(boost::vertex_color, G);

  int u, k;
  std::ifstream ifs(cfile, std::ios::in);
  while (ifs >> u >> k)
  {
    if (u < 0 || k < 0)
    {
      ifs.close();
      return false;
    }
    boost::put(color, u, k);
  }
  ifs.close();
  return true;
}

////////////////////////////////////////////////////////////////
int main(const int argc, const char* argv[])
{
  prog_args_t args;

  if (!parse_args(argc, argv, args))
  {
    print_help(argv[0], args);
    return -1;
  }

  // read network data
  network_t G;
  assert_msg(read_network(args.network_file.c_str(), G),
      "data network must not be empty");
  assert_msg(args.depth > 0, "must have depth > 0");

  const int D = args.depth;
  const int K = num_leaves(D);
  const int n = num_nodes(D);
  const int max_iter = args.iter;
  const double tol = args.tol;

  bool has_init_color = false;
  double best_tot_score = 0.;                      // best score
  random_index_t rIdx(K);                          // random generator

  if (args.init_color_file.size() > 0)
  {
    has_init_color = read_init_color(args.init_color_file.c_str(), G);
  }

  args.repeat = has_init_color ? 1 : args.repeat;  // 
  int max_eb_iter = args.eb ? args.iter : 1;

  double tau = 0;
  double kappa = 0.75;

  for (int r = 1; r <= args.repeat; ++r)
  {

    double a0 = args.a0, b0 = args.b0;                // initial hyper-parameter

    typedef btree_t<net_score_ptr> model_tree_t;        // model tree
    std::vector<net_score_ptr> net_score_vec;           // network score vector
    for (int j = 0; j < n; ++j)
      net_score_vec.push_back(net_score_ptr(new net_score_t(a0, b0)));

    TLOG("created network scores");

    model_tree_t model_tree(D, net_score_vec);     // build a network model tree

    if (!has_init_color)                            // don't have initial coloring
      {
      TLOG("randomize coloring");                   //
      random_vertex_color(G, rIdx);                  // randomize them
    }

    tree_initializer_t<net_score_ptr, network_stat_t> initializer(G, K);
    tree_path_finder_t<net_score_ptr, network_stat_t> path_finder(model_tree);
    tree_updater_t<net_score_ptr, network_stat_t> tree_updater(model_tree);

    initializer(model_tree);

    TLOG("have initial tree model ready");

    // find optimal hyper-parameters
    for (int eb_iter = 1; eb_iter <= max_eb_iter; ++eb_iter)
    {
      for (int opt_iter = 1; opt_iter <= max_iter; ++opt_iter)
      {
        path_finder(G);                 // find an optimal path finder

        double rate = std::pow(((double) opt_iter) + tau, -kappa);

        double delta = tree_updater(G, rate); // call tree updater

        TLOG("OPT = " << opt_iter << " rate = " << rate << " delta = " << delta);
        if (delta < tol)
          break;
      }

      if (!args.eb)
        break;

      double old_a0 = a0;
      double old_b0 = b0;

      // empirical Bayes of network scores
      boost::tie(a0, b0) = empirical_bayes_gamma(net_score_vec);
      double eb_delta = (a0 - old_a0) * (a0 - old_a0)
          + (b0 - old_b0) * (b0 - old_b0);

      TLOG("a0 = " << a0 << ", b0 = " << b0);

      if (eb_delta < tol)
        break;
    }

    double curr_score = tot_score(G, net_score_vec);

    TLOG("repeat = " << r << ", current score = " << curr_score);

    if (r == 1 || curr_score > best_tot_score)
    {
      best_tot_score = curr_score;

      TLOG("found better score = " << best_tot_score);

      ////////////////////////////////////////////////////////////////
      // output results
      std::string color_file = args.output + ".color";
      std::string log_file = args.output + ".log";

      write_color(G, color_file.c_str());

      // log
      std::ofstream log_out(log_file.c_str(), std::ios::out);
      log_out << "net_a0\t" << a0 << std::endl;
      log_out << "net_b0\t" << b0 << std::endl;
      log_out << "score\t" << best_tot_score << std::endl;
      log_out.close();

    }
  }

  TLOG("done. best score = " << best_tot_score);

  return 0;
}
