# python imports
import math
from time import time

# local imports
from misc.util import Counter
from misc.poly import Polynomial

from world import exec_dep_varkey, is_stochastic_varkey, fn_varkey, WorldDiff,\
     VARTYPE_DETERMINISTIC, VARTYPE_STOCHASTIC, VARTYPE_GROUPBY_CHILD,\
     VARTYPE_GROUPBY_PARENT
from dist.categorical import Categorical
import glob
from glob import GROUPBY_NOTHING, GROUPBY_UNUSED_ARGS, GROUPBY_INDICATOR_FN

IS_NOT_INDICATOR, IS_INDICATOR, IS_PURE_INDICATOR = range(3)

def flip_var(var, world, rand, trace, statsobj, skip_switching=False):
  """
  propose a new value for the variable
  returns 1 if failed,  0 if successful (i.e. move not rejected)
  
  This procedure it may not always be able to do Gibbs moves
  (e.g. switching variables). Any deterministically dependent child random
  variable will also be changed and the distributions of the
  stochastically dependent children will be changed to reflect the new
  value of the changed variable.

  For switching variables, the world might generate new variables or
  delete some variables.
  """
  t1 = time()
  func = fn_varkey(var.varkey)
  mystats = statsobj[func]
  try:
    return _flip_var(var, world, rand, trace, mystats, statsobj,
                     skip_switching)
  finally:
    diff = time() - t1
    mystats[0] += 1
    mystats[1] += diff
    mystats[2] += diff * diff
    
def _flip_var(var, world, rand, trace, mystats, statsobj, skip_switching):
  if trace:
    print "flipping", str(var)
  # get the definition for this function
  vardef = glob.var_def[fn_varkey(var.varkey)]

  if skip_switching and vardef.is_switching:
    return 1
  
  # has the user provided a sampler for it?
  if hasattr(vardef, "gibbs_simple"):
    propfn = vardef.gibbs_simple
    newval = propfn(rand, world.get_dist(var), var.value,
                    *world.get_children_values(var))
                    
    mystats[propfn.__name__] += 1
    if trace:
      print "User supplied Gibbs proposer", propfn.__name__,\
            "-->", newval
      print "Child values for Gibbs proposer",\
            world.get_children_values(var)

    # check if the variable has changed before trying to change its value
    # can't do != for matrices, assume its different
    if hasattr(newval, "shape") or newval != var.value:
      # a variable with gibbs_simple proposer has to be non-switching
      numvar = len(world.variables)
      world.update_nonswitching_var(var, newval)
      assert(numvar == len(world.variables))
    return 0

  # use the switching proposer for switching variables or
  # if we have been asked to not use conjugacy
  if vardef.is_switching or (not glob.PARAM_GIBBS_CONJUGATE):
    if skip_switching:
      return 1
    return  switching_proposer(world, var, rand, trace, mystats, statsobj)

  # get the likelihood of this variable
  like = get_likelihood(world, var, trace)
  # if we don't have a likelihood then this is a switching variable
  if like is None:
    if trace:
      print "no likelihood -> switching variable"
    vardef.is_switching = True
    if skip_switching:
      return 1
    return switching_proposer(world, var, rand, trace, mystats, statsobj)
  elif trace:
    print "likelihood:", str(like)

  # store the likelihood just to inform the caller
  var.like = like
  
  # if the likelihood is constant then simply sample from the prior
  if like == 1:
    mystats[var.dist.__class__.__name__] += 1
    rej, newval = 0, world.get_dist(var).sample(rand)
  # otherwise, try to multiply the likelihood with the prior and see
  # if we get a distribution
  else:
    try:
      prior_dist = world.get_dist(var)
      prop = (like * prior_dist).get_normalized()
      if trace:
        print "Conjugate prior and likelihood -> proposer", prior_dist, like,\
              prop
      mystats[prop.__class__.__name__] += 1
      rej, newval = 0, prop.sample(rand)
    except:
      if trace:
        print "Non-conjugate prior and likelihood", prior_dist, like
      # see if the prior has a proposer
      failed = True
      if hasattr(prior_dist, "propose"):
        try:
          rej, newval = prior_dist.propose(rand, like)
          mystats[prior_dist.__class__.__name__+".propose()"] += 1
          failed = False
        except:
          pass
      # for non-conjugate priors use switching variable proposer
      if failed:
        if trace:
          print "non-conjugate prior-likelihood -> switching variable"
        vardef.is_switching = True
        if skip_switching:
          return 1
        return switching_proposer(world, var, rand, trace, mystats, statsobj)
        
  if trace:
    print "Proposed value %s was %s" % (newval, ["ACCEPTED", "REJECTED"][rej])

  # was the proposal rejected?
  if rej == 1:
    return 1

  # if accepted then we need to update the world, but only if the variable
  # has actually been assigned a new value!
  
  # can't do != for matrices, assume its different  
  if hasattr(newval, "shape") or newval != var.value:
    numvar = len(world.variables)
    world.update_nonswitching_var(var, newval)
    # sanity check, the number of variables shouldn't have changed
    assert(numvar == len(world.variables))
  
  return 0

def get_likelihood(world, var, trace):
  """
  Get the likelihood object of the given variable in the world.
  If the likelihood is unknown then this will return None
  """
  # the variable better have children, otherwise the world is not minimal
  assert(var.children)

  # the try-finally block is to restore the value of the variable being
  # modified here and the state of the world
  try:
    oldval = var.value
    world.raise_on_new_var = True
    # for deterministic variables since the parent already must be a polynomial
    # it suffices to simply compute the value of the variable
    if var.type == VARTYPE_DETERMINISTIC:
      var.value = exec_dep_varkey(var.varkey)
      # if we don't get a polynomial for the variables value then
      # it is not a polynomial in the parent and we can't compute the
      # likelihood
      if not hasattr(var.value, "get_polynomial_coeffs"):
        if trace:
          print "deterministic dep var %s is not a polynomial in parent" \
                % str(var)
        return None
      elif trace:
        print "deterministic dep var %s is poly %s in parent" \
              % (str(var), str(var.value))
    # otherwise, assign a polynomial to the variable's value
    # to help generate likelihood objects in the children
    else:
      var.value = Polynomial(oldval)
      
    # get the likelihood for the children and multiply them together
    likelihood = 1
    for childkey in var.children:
      childvar = world.variables[childkey]
      if childvar.type == VARTYPE_DETERMINISTIC:
        like = get_likelihood(world, childvar, trace)
      elif childvar.type == VARTYPE_STOCHASTIC:
        try:
          # evaluate at the data point to get the likelihood object
          tmp = exec_dep_varkey(childkey)
          childstats = childvar.dist.SufficientStats()
          childstats.add(childvar.value)
          like = tmp(childstats)
        except:
          if trace:
            print "no likelihood for child %s of %s" % (childvar, var)
          return None
      elif childvar.type == VARTYPE_GROUPBY_PARENT:
        try:
          # pick some groupby-child of this child var
          somekey = childvar.children.pop()
          childvar.children.add(somekey)
          # evaluate at the data point to get a temp lambda function
          tmp = exec_dep_varkey(somekey)
          # now use the sufficient stats to get the likelihood function
          like = tmp(childvar.child_suff_stats)
        except:
          if trace:
            print "no likelihood for child %s of %s" % (childvar, var)
          return None
      elif childvar.type == VARTYPE_GROUPBY_CHILD:
        # If a groupby child is a direct child of this variable then
        # the current variable is an indicator variable for that child.
        # Hence the current variable is a switching variable.
        if trace:
          print "indicator for child:", str(childvar)
        return None
      else:
        assert(False, "unknown child type")
        
      # multiply the child's likelihood to the existing likelihood
      if like is None:
        return None
      elif likelihood == 1:
        likelihood = like
      elif like != 1:
        likelihood = likelihood * like
    
    return likelihood
  
  # restore the variable's true value before returning
  finally:
    var.value = oldval
    world.raise_on_new_var = False

def mh_nonswitching_proposer(curr_value, prior_dist, like, rand, trace):
  """
  Propose a value for a non-switching variable using a Metropolis-Hastings
  rule
  """
  # we need to first propose a value and then compute the acceptance ratio
  # for proposing a value we can use either the prior or the likelihood,
  # with equal probability
  
  # propose from the likelihood if possible and with probability .5
  if hasattr(like, "sample") and rand.random() < .5:
    newval = like.sample(rand)
    log_accept = prior_dist.log_prob(newval) - prior_dist.log_prob(curr_value)
  else:
    newval = prior_dist.sample(rand)
    log_accept = like.log_prob(newval) - like.log_prob(curr_value)

  if trace:
    print "mh_nonswitching_proposer: %s --> %s  accep prob: %.5g" \
          % (str(curr_value), str(newval), math.exp(log_accept))

  # M-H acceptance rule
  if log_accept >= 0.0 or rand.random() < math.exp(log_accept):
    return 0, newval
  return 1, None

def resample_new_world(neww):
  # sample a new value for all created variables in the
  
  # save current world
  old_world = glob.curr_world
  glob.curr_world = neww

  # sample newly created variables
  for v2 in neww.created_vars:
    var2 = neww.variables[v2]
    # we can only sample variables with a stochastic dependency
    if var2.dist is not None:
      flip_var(var2, neww, rand, trace, statsobj, skip_switching=True)
  # restore current world
  glob.curr_world = old_world

  # sample variables with new parents
  for v2 in neww.vars_new_parents:
    var2 = neww.variables[v2]
    # we can only samples variables with a stochastic dependency
    # and those that aren't evidence
    if var2.dist is not None and var2.varkey not in currworld.evidence_vars:
      flip_var(var2, neww, rand, trace, statsobj, skip_switching=True)

def switching_proposer(currworld, var, rand, trace, mystats, statsobj):
  var_dist = currworld.get_dist(var)
  if hasattr(var_dist, "finite_support") and glob.PARAM_GIBBS_DISCRETE:
    if indicator_type(var) != IS_NOT_INDICATOR:
      gibbs_finite_indicator(currworld, var, rand, trace, mystats, statsobj)
    else:
      gibbs_switching_proposer(currworld, var, rand, trace, mystats, statsobj)
    return 0
  elif hasattr(var_dist, "get_partialwts_residualprob") \
       and glob.PARAM_GIBBS_PARTIAL_DISCRETE \
       and not gibbs_partial_proposer(currworld, var, rand, trace,
                                      mystats, statsobj):
    return 0
  else:
    return mh_switching_proposer(currworld, var, rand, trace, mystats,
                                 statsobj)

def indicator_type(var):
  """
  Returns one of
  - IS_NOT_INDICATOR
  - IS_INDICATOR
  - IS_PURE_INDICATOR
  """
  # initially assume pure
  indtype = IS_PURE_INDICATOR
  # check the role of this variable for each child
  for child in var.children:
    child_vardef = glob.var_def[child[0]]
    # if the child is not lifted by indicator then this var can't be one
    if (child_vardef.groupby != GROUPBY_INDICATOR_FN):
      return IS_NOT_INDICATOR
    # if no bound functions then its not a pure indicator
    if child_vardef.bound_fns is None:
      indtype = IS_INDICATOR
    
  return indtype

def gibbs_finite_indicator(world, var, rand, trace, mystats, statsobj):
  """
  Gibbs proposer for indicator variables with finite domain. These
  indicators are not assumed to be pure (but they could be)
  """
  # First remove the variables and its children from any lifted variables
  # that they might belong to. This will correct the sufficient stats for
  # collapsing later on
  for child in var.children:
    # delete references to the old lifted child node
    gby_child = world.variables[((child[0],),var.value)]
    childnode = world.variables[child]
    gby_child.remove_child_var(childnode)
  
  if var.type == VARTYPE_GROUPBY_CHILD:
    lifted_var = world.variables[var.dist]
    lifted_var.remove_child_var(var)
    var_dist = world.get_collapsed_dist(lifted_var, var.varkey)
  else:
    var_dist = var.dist
  
  possible_vals = var_dist.finite_support()
  
  weights = Counter()

  gby_children = []
  old_val = var.value
  for val in possible_vals:
    wt = var_dist.log_prob(val)
    for childkey in var.children:
      childnode = world.variables[childkey]
      gby_child = world.query_var(((childkey[0],), val), False)
      # change the value of the variable so that when the collapsed
      # distribution is being looked up, the correct parent shows up
      var.value = val
      gby_dist = world.get_collapsed_dist(gby_child, childkey)
      wt += gby_dist.log_prob(childnode.value)
      gby_children.append(gby_child)

    weights[val] = wt

  # now, choose one of the values
  weights.lognormalize()

  sampled_val = weights.sample(rand)

  if trace:
    print "gibbs_finite_indicator:",
    print "weights:", weights
    print "sampled:", sampled_val

  if sampled_val != old_val:  
    mystats["gibbs_indicator-ACC"] += 1
  else:
    mystats["gibbs_indicator-SAME"] += 1

  # change the variable
  var.value = sampled_val

  # now put the variable and its children back into the appropriate
  # lifted node
  for child in var.children:
    # refer to the new lifted child
    gby_child = world.variables[((child[0],), var.value)]
    gby_child.add_child_var(childnode)
  
  if var.type == VARTYPE_GROUPBY_CHILD:
    lifted_var = world.variables[var.dist]
    lifted_var.add_child_var(var)

  # remove any un-needed lifted child nodes
  for gby_child in gby_children:
    if not gby_child.children:
      world.remove_var(gby_child)
  

def gibbs_partial_proposer(world, var, rand, trace, mystats, statsobj):
  """
  Gibbs proposer for variables with partially instantiated discrete values
  """
  # we need to check if the Gibbs proposer for partially instantiated
  # discrete distribution is applicable.

  vardef = glob.var_def[var.varkey[0]]

  # it must have only one kind of children, otherwise its expensive to
  # detect if we can uninstantiate values
  if not hasattr(vardef, "childfns"):
    vardef.childfns = set(child[0] for child in var.children)
  elif vardef.childfns is None:
    return 1

  # this variable must be a pure indicator for all its children
  for child in var.children:
    # if any variable of this function has a different set of children
    # then we'll disable this optimization for all variables of this function
    if child[0] not in vardef.childfns:
      vardef.childfns = None
      return 1

    # is variable a pure indicator?
    child_vardef = glob.var_def[child[0]]
    if (child_vardef.groupby != GROUPBY_INDICATOR_FN) \
       or (child_vardef.bound_fns is None):
      vardef.childfns = None
      return 1
  
  # get the parents of this variable
  if (vardef.groupby == GROUPBY_NOTHING):
    parents =  var.parents
    dist = var.dist
  elif vardef.groupby == GROUPBY_UNUSED_ARGS:
    gby = world.variables[list(var.parents)[0]]
    parents = gby.parents
    dist = gby.dist
  # FUTURE: we could handle groupby indicator here perhaps
  else:
    return 1

  # the parents must have only one child, otherwise we can't uninstantiate
  # values
  for par in parents:
    parnode = world.variables[par]
    if len(parnode.children) > 1:
      return 1

  if trace:
    print "Variable satisfies requirement for gibbs_partial_proposer"
  

  # check if the current value of the variable is the only occurrence of
  # this value -- since its a pure indicator and its children set don't
  # change, its enough to check if the grouped variable for each of its
  # child has only one child
  singleton = True
  for childfn in vardef.childfns:
    gby_child = world.variables[((childfn,), var.value)]
    if len(gby_child.children) > 1:
      singleton = False
      break

  # if its not a singleton then we need to generate a value
  # for the indicator and the grouped child variables
  if not singleton:
    # sometimes we can't generate new values
    try:
      genval, genprob = dist.generate(rand)
      generated = True
    except ValueError:
      generated = False
  
    gen_gby_children = [world.query_var(((childfn,), genval), False) \
                        for childfn in vardef.childfns]

  else:
    generated = False
  
  # now, get the partially instantiated values
  itemprob, residual = dist.get_partialwts_residualprob()

  # create a set of weights for each value of this variable
  weights = Counter()

  childnodes = [world.variables[child] for child in var.children]
  #now, compute the weights of all values
  for val, prob in itemprob.iteritems():
    wt = math.log(prob)
    for childnode in childnodes:
      gby_child = world.variables[((childnode.varkey[0],), val)]
      wt += gby_child.dist.log_prob(childnode.value)

    weights[val] = wt

  # now, choose one of the values
  weights.lognormalize()

  if trace:
    print "weights:", weights
  
  sampled_val = weights.sample(rand)
  
  # do we need to uninstantiate the generated value
  if generated and (sampled_val != genval):
    for gby_child in gen_gby_children:
      if not gby_child.children:
        world.remove_var(gby_child)
    dist.uninstantiate(genval)

  # do we need to uninstantiate the current value
  elif singleton and (sampled_val != var.value):
    dist.uninstantiate(var.value)

  # do we need to change parent-child pointers
  if sampled_val != var.value:
    if trace:
      print "Changing to value", sampled_val, "prob", weights[sampled_val]
    mystats["gibbs_partial-ACC"] += 1

    for child in var.children:
      # delete references to the old lifted child node
      gby_child = world.variables[((child[0],),var.value)]
      childnode = world.variables[child]
      gby_child.remove_child_var(childnode)
      if not gby_child.children:
        world.remove_var(gby_child)
      # refer to the new lifted child
      gby_child = world.variables[((child[0],), sampled_val)]
      gby_child.add_child_var(childnode)
    
    # finally, change the variable
    if var.type == VARTYPE_GROUPBY_CHILD:
      lifted_var = world.variables[var.dist]
      lifted_var.upd_child_stats(var.value, sampled_val)
    var.value = sampled_val
    
  else:
    if trace:
      print "Keeping same value", var.value, "prob", weights[var.value]
    mystats["gibbs_partial-SAME"] += 1
    
    
def gibbs_switching_proposer(currworld, var, rand, trace, mystats, statsobj):
  # for each possible value, create a world with that value in it
  # (except for the current value of the variable)
  
  # TODO: delete and reinstantiate variables when removing switched edges
  # and remove such vars from vars_new_parents. Then we won't need to keep
  # track of all_deleted_vars
  
  newworlds = []
  all_deleted_vars = set()          # variables deleted in any world
  var_dist = currworld.get_dist(var)
  for newval in var_dist.finite_support():
    # skip current value
    if newval == var.value:
      newworlds.append(None)
      continue
    # create a new world
    neww = WorldDiff(currworld)
    newworlds.append(neww)
    # change the variable in the new world
    neww.change_var(var.varkey, newval)
    neww.close()
    
    ### resample_new_world(neww)
    
    # update the common children with children in this world

    # keep track of deleted children, these should not be included in the
    # acceptance probability of any world where they are not deleted
    all_deleted_vars.update(neww.deleted_vars)

  common_children = currworld.get_children_rec(var) - all_deleted_vars
  
  # compute the log probabilities for each of the newly created worlds
  wts = []
  for neww in newworlds:
    if neww is None:
      newvar = var
      newwor = currworld
    else:
      newvar = neww.variables[var.varkey]
      newwor = neww
    # probability of current value ...
    wt = var_dist.log_prob(newvar.value)
    # .. times probability of each child
    for childkey in common_children:
      child = newwor.variables[childkey]
      
      if child.type == VARTYPE_STOCHASTIC:
        wt += child.dist.log_prob(child.value)
        
      elif child.type == VARTYPE_GROUPBY_CHILD:
        wt += newwor.variables[child.dist].dist.log_prob(child.value)

      elif child.type == VARTYPE_GROUPBY_PARENT:
        for child2key in (child.children - common_children \
                          - newwor.created_vars  - all_deleted_vars):
          wt += child.dist.log_prob(currworld.variables[child2key].value)
      
    wts.append(wt)

  # subtract the max weight from all the weights and convert to probabilities
  maxwt = max(wts)
  wts = [math.exp(x - maxwt) for x in wts]
  # normalize the weights
  sumwt = sum(wts)
  wts = [x/sumwt for x in wts]

  if trace:
    print "Gibbs Switching Proposer Probabilities:"
    for x, y in zip(var_dist.finite_support(), wts):
      print x, "->", y,
    print

  idx = Categorical(wts).sample(rand)

  neww = newworlds[idx]

  if trace:
    if neww is None:
      print "no change"
    else:
      print "new value:", neww.variables[var.varkey].value
    
  if neww is None:
    # no change
    mystats["gibbs_switching-SAME"] += 1
  else:
    neww.merge()
    mystats["gibbs_switching-ACC"] += 1

def mh_switching_proposer(currworld, var, rand, trace, mystats, statsobj):
  """
  Propose a value for a switching variable using a Metropolis-Hastings rule
  """
  # sample a new value for the variable
  # if the new value is that same as the old then we can return right away
  val = currworld.get_dist(var).sample(rand)
  # note: we can't compare matrices
  if not hasattr(val, "shape") and val == var.value:
    if trace:
      print "mh_switching_proposer: same value"
    mystats["mh_switching-SAME"] += 1
    return 0         # nothing to do

  # now sample all the non-switching variables, this will let us know
  # later on which variables need to be included in the acceptance ratio
  ##for v2 in currworld.variables.itervalues():
  ##  # sample all but deterministic dep variables and evidence
  ##  if v2.dist is not None and v2.varkey not in currworld.evidence_vars:
  ##    flip_var(v2, currworld, rand, trace, statsobj, skip_switching=True)
  
  # create a new world and change the value of the variable that we
  # earlier sampled a new value for
  newworld = WorldDiff(currworld)
  newworld.change_var(var.varkey, val)
  newworld.close()
  
  if trace:
    newworld.dump()
  
  # for all the non-switching variables which have acquired new parents or
  # have been created, we need to resample to make this a more likely move

  ### resample_new_world(newworld)

    
  # compute the acceptance prob
  log_accept = 0.0
  # only variables with distributions that have acquired new parents
  # contribute to the acceptance ratio (since changed and new variables
  # were proposed using the prior
  for childkey in newworld.vars_new_parents:
    newvar = newworld.variables[childkey]
    oldvar = currworld.variables[childkey]

    if newvar.type == VARTYPE_STOCHASTIC:
      log_accept += newvar.dist.log_prob(newvar.value) \
                    - oldvar.dist.log_prob(oldvar.value)

    elif newvar.type == VARTYPE_GROUPBY_CHILD:
      # if a groupby child doesn't change its group and the group
      # has a changed parent then it will be accounted for by the group
      if (newvar.dist != oldvar.dist) \
         or (newvar.dist not in newworld.vars_new_parents):
        newdist = newworld.variables[newvar.dist].dist
        olddist = currworld.variables[oldvar.dist].dist
        log_accept += newdist.log_prob(newvar.value) \
                      - olddist.log_prob(oldvar.value)

    elif newvar.type == VARTYPE_GROUPBY_PARENT:
      newdist = newvar.dist
      olddist = oldvar.dist
      # we will only consider children which haven't changed groups
      # other children will be accounted individually
      for childkey in (newvar.children & oldvar.children):
        childval = currworld.variables[childkey].value
        log_accept += newdist.log_prob(childval) - olddist.log_prob(childval)
        
  if trace:
    print "mh_switching_proposer: %s --> %s  accep prob:" \
          % (str(var.value), str(val)),
    if log_accept >= 0.0 or log_accept < -10:
      print "exp(%.1f)" % log_accept
    else:
      print "%.5g" % (math.exp(log_accept),)
  
  if log_accept >= 0.0 or rand.random() < math.exp(log_accept):
    newworld.merge()
    # accepted
    mystats["mh_switching-ACC"] += 1
    return 0

  # rejected
  mystats["mh_switching-REJ"] += 1
  return 1

