#include "distrib.hh"

////////////////////////////////////////////////////////////////
network_distrib_t::network_distrib_t() : a(.1), b(.1), X(0.), N(0.), opt(false) {}
network_distrib_t::network_distrib_t( const hyper_t& hyper ) : 
  a(hyper.a), b(hyper.b), X(0.), N(0.), opt(false) {}

network_distrib_t::~network_distrib_t() {}

void
network_distrib_t::add_stat(const double& x, const latent_t& latent, const rate_t& rate)
{
  double z = latent.z * rate.eta;
  X += x*z;
  N += z;
  opt = false;
}

void
network_distrib_t::add_stat(const double& x, const latent_t& latent)
{
  add_stat(x, latent, rate_t(1.));
}

void
network_distrib_t::add_stat(const double& x)
{
  add_stat(x, latent_t(1.), rate_t(1.));  
}

void
network_distrib_t::discount_stat(const rate_t& rate)
{
  double c = rate.eta;
  X *= c;
  N *= c;
  opt = false;
}


size_t
network_distrib_t::size() const
{
  return 1;
};


#ifdef DEBUG
const double&
network_distrib_t::get_stat() const
{
  return X;
}

double
network_distrib_t::get_total () const
{
  return X;
}
#endif


#ifdef DEBUG
bool
network_distrib_t::optimized()
  const
{
  return opt;
}
#endif

void
network_distrib_t::add_edge(const edge_t& x)
{
  X += x.count;
}

void
network_distrib_t::add_tot(const tot_t& x)
{
  N += x.count;
}

void
network_distrib_t::add_edge(const edge_t& x, const rate_t& rate)
{
  X += x.count * rate.eta;
}

void
network_distrib_t::add_tot(const tot_t& x, const rate_t& rate)
{
  N += x.count * rate.eta;
}

double
network_distrib_t::num_edge()
  const
{
  return X;
}

double
network_distrib_t::num_tot()
  const
{
  return N;
}

void
network_distrib_t::operator += (const network_distrib_t& rhs)
{
  add_edge( edge_t(rhs.num_edge()) );
  add_tot( tot_t(rhs.num_tot()) );
  opt = false;
}


bool
network_distrib_t::read( string str )
{
  // read a, b, X, N
  istringstream iss( str );
  if( !(iss >> a >> b >> X >> N ) )
    return false;

  opt = false;
  return true;
}

string
network_distrib_t::write()
  const
{
  ostringstream oss;
  oss << a << "\t" << b << "\t" << X << "\t" << N << endl;
  return oss.str();
}



////////////////////////////////////////////////////////////////
// bernoulli-beta distribution
bernoulli_t::bernoulli_t() : 
  network_distrib_t(), thetaEdge(0.), thetaHole(0.) {}
bernoulli_t::bernoulli_t(const hyper_t& hyper) : 
  network_distrib_t(hyper), thetaEdge(0.), thetaHole(0.) {}
bernoulli_t::~bernoulli_t(){}

// calculate gradient
double
bernoulli_t::gradient(const double& x)
  const
{
#ifdef DEBUG
  assert( optimized() );
  assert( x >= 0. && x <= 1. );
#endif
  return thetaEdge*x + thetaHole*(1.-x);
}

double
bernoulli_t::gradient(const edge_t& e, const tot_t& t)
  const
{
#ifdef DEBUG
  assert( optimized() );
#endif
  double edge = e.count, hole = t.count - edge;
#ifdef DEBUG
  assert( (hole + 1e-10) >= 0. );
#endif
  return thetaEdge*edge + thetaHole*hole;
}

// log predictive
double
bernoulli_t::log_pred(const edge_t& e, const tot_t& t) 
  const
{
  double edge = e.count;
  double hole = t.count - edge;
  double alpha = a + X;
  double beta = b + N-X;
  using namespace boost::math;

  double ret = lgamma( edge + alpha ) + lgamma( hole + beta );
  ret -= lgamma( edge + hole + alpha + beta );
  ret += lgamma( alpha + beta );
  ret -= lgamma( alpha ) + lgamma( beta );
  return ret;
}

// mean parameter
double
bernoulli_t::mean() 
  const
{
#ifdef DEBUG
  assert( optimized() );
#endif
  return exp(thetaEdge);
}

// variational approximation
double
bernoulli_t::update_param()
{
  using namespace boost::math;
#ifdef DEBUG
  assert( (N+1e-10) >= 0. && (N-X+1e-10) >= 0. );
#endif
  double eOld = thetaEdge, hOld = thetaHole;
  double denom = digamma(a+b+N);
  thetaEdge = digamma(a+X) - denom;
  thetaHole = digamma(b+N-X) - denom;
  opt = true;

  double delt = abs(eOld-thetaEdge) + abs(hOld-thetaHole);
#ifdef DEBUG
  assert( !boost::math::isnan(delt) && !boost::math::isinf(delt) );
#endif
  return delt;
}

// log likelihood based on MLE parameters
double
bernoulli_t::log_lik() const
{
#ifdef DEBUG
  assert( (N+1e-10) >= 0. && (N-X+1e-10) >= 0. );
#endif
  double edge = X, hole = N-X;
  double ret = 0.;
  if( edge > 0. ) ret += edge*log(edge);
  if( hole > 0. ) ret += hole*log(hole);
  if( N > 0. ) ret -= N*log(N);
  return ret;
}


double
bernoulli_t::log_marg() const
{
  using namespace boost::math;
  return lgamma(a+X) + lgamma(b+N-X) - lgamma(a+b+N);
}

bool
bernoulli_t::read( string str )
{
  bool ret = network_distrib_t::read( str );
  update_param();
  return ret;
}

////////////////////////////////////////////////////////////////
// poisson distribution
poisson_t::poisson_t() : 
  network_distrib_t (network_distrib_t::hyper_t(.1,.2)), lambda(0.), logLambda(0.) {}
poisson_t::poisson_t(const hyper_t& hyper) :
  network_distrib_t(hyper), lambda(0.), logLambda(0.) {}
poisson_t::~poisson_t() {}

// log(lambda) * x - lambda
double
poisson_t::gradient(const double& x)
  const
{
#ifdef DEBUG
  assert( optimized() );
#endif

  return logLambda * x - lambda;
}

double
poisson_t::mean() 
  const
{
#ifdef DEBUG
  assert( optimized() );
#endif
  return lambda;
}

// variational approximation
double
poisson_t::update_param()
{
#ifdef DEBUG
  assert( (N+1e-10) >= 0. );
#endif

  using namespace boost::math;
  double lgLmdOld = logLambda;
  logLambda = digamma(a+X) - log(b+N);
  lambda = (a+X)/(b+N);
  opt = true;
  double delt = abs( lgLmdOld - logLambda );
#ifdef DEBUG
  assert( !boost::math::isnan(delt) && !boost::math::isinf(delt) );
#endif
  return delt;
}

// log likelihood based on MLE parameters
double
poisson_t::log_lik() const
{
#ifdef DEBUG
  assert( (N+1e-10) >= 0. );
#endif
  if( X == 0 ) return 0.;
  return (X * log(X) - X*log(N) - X);
};

double
poisson_t::log_marg() const
{
  using namespace boost::math;
  double s1 = lgamma(a+X) - lgamma(a) - lgamma(1+X);
  double s2 = a*log(b) - (a+X)*log(b+N);
  return s1 + s2;
}


double
poisson_t::gradient(const edge_t& e, const tot_t& t)
  const
{
#ifdef DEBUG
  assert( optimized() );
#endif

  return logLambda* e.count - lambda*t.count;
}

double
poisson_t::log_pred(const edge_t& e, const tot_t& t) 
  const
{
  using namespace boost::math;
  double alpha = a + X;
  double beta = b + N;
  double edge = e.count;
  double tot = t.count;
  double s1 = lgamma( alpha + edge ) - lgamma( alpha );
  double s2 = alpha*log(beta) - (alpha+edge)*log(beta+tot);
  return s1 + s2;
}


bool
poisson_t::read( string str )
{
  bool ret = network_distrib_t::read( str );
  update_param();
  return ret;
}

