
#include <fstream>

#include <boost/algorithm/string.hpp>
#include <boost/algorithm/string/join.hpp>
#include <boost/lexical_cast.hpp>
#include <boost/format.hpp>
#include <boost/utility.hpp>

#include "inference_engine.hpp"
#include "knowledge_base.hpp"
#include "plugin.hpp"

#include "henry.hpp"

/* The very warm heart of Henry inference engine continues... */
ie::weighted_abduction_t::weighted_abduction_t( kb::pool_t *_p_kbp, string &command ) :
  m_f_no_group_merge( false ),
  m_f_diet( true ),
  m_p_ilp_log( &cerr ) {

  m_p_kbp                   = _p_kbp;
  m_depth_limit             = 2;
  m_timeout                 = 60.0 * 60.0;
  m_f_rhs_strict            = false;
  m_f_expl_mutual_exclusive = false;
  m_num_var_h               = 0;
  m_num_var_r               = 0;
  m_num_var_u               = 0;
  m_num_var_c               = 0;
  m_num_var_e               = 0;
  
  m_command                 = command;

  m_p_kbp->initialize_axiom_log();

  /* Modify options. */
  string_list_t options = m_p_kbp->get_options();

  foreach( string_list_t, iter_o, options ) {
    string_list_t args; split( args, *iter_o, is_any_of( "," ) );
    if( "RHS_STRICT" == args[0] )                                 m_f_rhs_strict                       = true;
    else if( "PROHIBIT_EXPLAIN_SAME_LITERAL" == args[0] )         m_f_block_multiple_same_explanations = true;
    else if( "PROHIBIT_CIRCULAR_ZERO" == args[0] )                m_f_block_circular_zero              = true;
    else if( "ALL_EXPLANATIONS_ARE_MUTUAL_EXCLUSIVE" == args[0] ) m_f_expl_mutual_exclusive            = true;
    else if( "PROHIBIT_LITERALS_IN_SAME_CONJUNCTION" == args[0] ) m_f_no_group_merge                   = true;
  }
  
}

ie::weighted_abduction_t::~weighted_abduction_t() {

  m_ilp_solver.release();
  
}

struct cb_inferred_context_t {
  slpl_dict_t          &unifiable;
  int_list_t           &hot_literals;
  literal_vector_t     &hypotheses;
  list<unifier_t>      &unifier_storage;
  ie::weighted_abduction_t &ie;
  ie::variable_cluster_t &non_merges;
};

bool cb_inferred( const literal_list_t &llt, void *p_context ) {

  vector<ilp::variable_t> and_clause;
  int_set_t               sibling;
  static int              group_id = 0;
  cb_inferred_context_t   *p_cic = (cb_inferred_context_t*)p_context;

  group_id++;

  WHISPER( 2, " - Assume " << llt.to_string() << endl; );
    
  foreachc( literal_list_t, iter_al, llt ) {

    if( p_cic->ie.is_timeout() ) return false;

//     if( "!=" == iter_al->predicate ) {
//       p_cic->non_merges.add_unifier( NULL, iter_al->arguments[0], iter_al->arguments[1] );
//       continue;
//     }
    
    bool f_twins = false;

    /* Search exactly same literal in ancestor path. */
    foreachc( int_set_t, iter_anc, iter_al->ancestor ) {
      if( p_cic->hypotheses[ *iter_anc ].predicate == iter_al->predicate &&
          p_cic->hypotheses[ *iter_anc ].arguments == iter_al->arguments ) { f_twins = true; break; }
    }
      
    int        n_literal = p_cic->ie.add_literal( *iter_al, group_id );
    literal_t *p_literal = &p_cic->hypotheses[ n_literal ];
    p_literal->is_eob = f_twins;
    p_cic->hot_literals.push_back( n_literal );
    sibling.insert( n_literal );

    p_cic->unifiable[ p_literal->to_string( PredicateArity ) ].push_back( n_literal );

    if( f_twins )
      WHISPER( 3, p_literal->to_string() << " is flagged as EOB." << endl );
    
    /* The children variables. */
    foreachc( int_set_t, iter_parent, p_literal->parents )
      p_cic->hypotheses[ *iter_parent ].children.insert( n_literal );
    
    /* @CONSTR: Reduction can be given only when the literal is hypothesized. */
    ilp::constraint_template_t *p_creward_phh;
    p_literal->con_r2 = p_cic->ie.get_ilp_solver().add_constraint( &p_creward_phh );
    p_creward_phh->set( 0, ilp::CT_NO_MATTER, 2, p_literal->var_reduce, p_literal->var_pay, -1.0, 1.0 );
    
    /* @CONSTR: Hypothesized is allowed only when the parent literal is hypothesized. */
    ilp::constraint_template_t *p_creward_ph_self;
    p_literal->con_r1 = p_cic->ie.get_ilp_solver().add_constraint( &p_creward_ph_self );
    p_creward_ph_self->set( 0, ilp::CT_NO_MATTER, 1, p_literal->var_pay, -1.0 );

    if( !p_cic->ie.get_rhs_restrict() )
      p_creward_ph_self->push_back( p_cic->hypotheses[ *p_literal->parents.begin() ].var_pay, 1.0 );
    else {

      foreachc( int_set_t, iter_p, p_literal->parents )
        p_creward_ph_self->push_back( p_cic->hypotheses[ *iter_p ].var_pay, 1.0 );

      p_creward_ph_self->coefficients[0] = -1.0 * p_literal->parents.size();
      
    }
    
    WHISPER( 5, "ILPCON-RS: " << p_literal->to_string() << ":" << p_literal->con_r1 << endl );
    
    /* @CONSTR: */
    foreach( unifier_list_t, iter_cond, p_literal->conditions ) {
      
      unifier_t *p_unifier;
      p_cic->ie.get_variable_cluster().add_unifier( &p_unifier, iter_cond->v1, iter_cond->v2 );
      p_unifier->var_operator   = p_literal->var_pay;
      p_unifier->constrain_type = 1;
      
    }

    /* @CONSTR: Cannot hypothesize only one literal in AND-clause. */
    and_clause.push_back( p_literal->var_pay );

  }

  foreach( int_set_t, iter_l, sibling )
    p_cic->hypotheses[ *iter_l ].sibling = sibling;

  if( and_clause.size() > 1 ) {
    
    /* @CONSTR: Logical AND, i.e., we cannot hypothesize only one literal in AND-clause. */
    ilp::constraint_template_t *p_cconj;    
    ilp::constraint_t c = p_cic->ie.get_ilp_solver().add_constraint( &p_cconj );
    WHISPER( 5, "ILPCON-AND: "  << llt.to_string() << ":" << c << endl );

    p_cconj->set( 0, 0, and_clause );
    p_cconj->coefficients[0] = -1.0 * (and_clause.size() - 1);
  
    foreach( int_set_t, iter_l, sibling )
      p_cic->hypotheses[ *iter_l ].con_and = c;
    
  }
  
  return true;
  
}

bool ie::weighted_abduction_t::ready_set( bool f_want_obs ) {

  m_time_start = gettimeofday_sec();

  literal_pset_t        mrhs;

  /* Process the observations... */
  m_initial_cost = 0.0; m_initial_cost_eps = 0.0;

  foreach( literal_list_t, itr, m_input ) {

    if( "!=" == itr->predicate ) {
      m_non_merges.add_unifier( NULL, itr->arguments[0], itr->arguments[1] );
      continue;
    }

    int        n_literal = add_literal( *itr );
    literal_t *p_literal = &m_hypotheses[ n_literal ];

    /* Observation literals are always paying cost. */
    m_ilp_solver.fix_variable( p_literal->var_pay, 1.0 );
    p_literal->fixed_value = 1;
    p_literal->is_observed = true;
    m_initial_cost     += 100.0*p_literal->number;
    m_initial_cost_eps += 1;

    ilp::constraint_template_t *p_cobs;
    m_ilp_solver.add_constraint( &p_cobs );
    p_cobs->set( 1.0, 1.0, 1, p_literal->var_pay, 1.0 );
    
    /* Add the of observation literal. */
    m_pa2h[ p_literal->to_string( PredicateArity ) ].push_back( n_literal );

    foreach( logvar_list_t, iter_var, p_literal->arguments ) {
      m_v2h[ *iter_var ][ p_literal->to_string( PredicateArity ) ].push_back( n_literal );
      m_v2h_o[ *iter_var ][ p_literal->to_string( PredicateArity ) ].push_back( n_literal );
    }

  }

  m_non_merges.flush();

  literal_vector_t::iterator iter_hypo  = m_hypotheses.begin();
  int                        iter_id    = 0;
  vector<int_list_t>         d2hypotheses;
  
  for( int d=0; d<m_depth_limit; d++ ) {

    /* Prepare the literal hash. */
    slps_dict_t     obs_hash;
    
    for( int i=0; i<m_hypotheses.size(); i++ )
      obs_hash[ m_hypotheses[ i ].to_string( PredicateArity ) ].insert( i );
    
    WHISPER( 1, "Backward-chaining (d=" << d+1 << ")..." << endl );

    IFWHISPER( 4 ) {
      DATE;
      cerr << "  Candidates are:";
      foreachc( slps_dict_t, iter_oh, obs_hash )
        cerr << iter_oh->first << ":" << iter_oh->second.size() << ", ";
      cerr << " :)" << endl;
    }
    
    int_list_t hot_literals;
    sss_dict_t axioms;

    WHISPER( 1, "  Enumerating applicable axioms (P: " << m_hypotheses.size() << ")..." << endl );

    /* openmp */
    m_p_kbp->extract_applicable_axioms( &axioms, m_hypotheses );

    WHISPER( 1, "  Applying the axioms..." << endl );

    /* openmp */
    cb_inferred_context_t cic = {m_pa2h, hot_literals, m_hypotheses, m_unifier_storage, *this, m_non_merges};
    if( !m_p_kbp->apply_applicable_axioms( axioms, cb_inferred, (void*)&cic, obs_hash, m_hypotheses, m_recursion_limit, m_f_rhs_strict, m_f_bc_from_same_prefix ) ) return false;

    if( 0 == hot_literals.size() ) {
      WHISPER( 1, "No axiom was applied. Break." << endl );
      break;
    }
    
    foreach( int_list_t, iter_h, hot_literals ) {
      foreach( logvar_list_t, iter_var, m_hypotheses[ *iter_h ].arguments )
        m_v2h[ *iter_var ][ m_hypotheses[ *iter_h ].to_string( PredicateArity ) ].push_back( *iter_h );
    }
    
    d2hypotheses.push_back( hot_literals );
    
  }

  if( !f_want_obs ) {
    unordered_map<literal_t*, ilp::variable_set_t> unify_operators;

    if( !_prepare_unification( &unify_operators ) ) return false;
    if( m_f_diet ? !_prepare_diet( d2hypotheses ) : false ) return false;
    if( !_prepare_variable_clusters() ) return false;
    if( !plugin::conditional_unification_t( *this ).process() ) return false;
    if( !_prepare_multiple_rhs() ) return false;
    if( !_prepare_reduce_triggers( unify_operators ) ) return false;
    if( !plugin::xor_constraint_t( *this ).process() ) return false;
    if( m_f_block_multiple_same_explanations ? !plugin::prohibit_same_multiple_explanations_t( *this ).process() : false ) return false;
    if( m_f_block_circular_zero ? !plugin::prohibit_circular_zero_t( *this ).process() : false ) return false;
  }
  
  m_time_prepare = gettimeofday_sec() - m_time_start;

  return true;
  
}

bool ie::weighted_abduction_t::_prepare_diet( const vector<int_list_t> &d2h ) {

  WHISPER( 1, "Sweeping out hypothesis space..." << endl );

  int num_sweeped = 0;

  rforeachc( vector<int_list_t>, iter_h_set, d2h ) {

    unordered_map<string, int_set_t> p2h;
    unordered_map<int, int_set_t>    g2s;
      
    foreachc( int_list_t, iter_h, *iter_h_set ) {

      literal_t &lt = m_hypotheses[ *iter_h ];

      if( lt.is_discarded ) continue;
      if( lt.children.size() > 0 ) continue;
      if( lt.unified_with.size() > 0 ) continue;

      /* Ensure literals in sibling have no children. */
      bool f_really_no_children = true;
      
      foreach( int_set_t, iter_sib, lt.sibling ) {
        if( m_hypotheses[ *iter_sib ].children.size() > 0 ||
            m_hypotheses[ *iter_sib ].unified_with.size() > 0 ) { f_really_no_children = false; break; }
      }

      if( !f_really_no_children ) continue;

      string parents_hash = awesome_join( lt.parents.begin(), lt.parents.end() );
      p2h[ parents_hash ].insert( lt.group_id );
      g2s[ lt.group_id ] = lt.sibling;
      
    }

    for( unordered_map<string, int_set_t>::iterator iter_p2h = p2h.begin(); iter_p2h != p2h.end(); ++iter_p2h ) {

      double        parents_cost = 0.0;
      string_list_t sl_parents;

      split( sl_parents, iter_p2h->first, is_any_of(",") );

      foreach( string_list_t, iter_p, sl_parents ) {
        int p = lexical_cast<int>( *iter_p );
        parents_cost += m_hypotheses[ p ].number;
      }

      double        minimum_group_cost = 10000.0;
      int           minimum_group = -1;
      
      foreach( int_set_t, iter_g, iter_p2h->second ) {
        
        double group_cost = m_hypotheses[ *g2s[ *iter_g ].begin() ].number_sibling;
        
        if( group_cost >= parents_cost ) {
          foreach( int_set_t, iter_sib, g2s[ *iter_g ] ) { del_literal( &m_hypotheses[ *iter_sib ] ); num_sweeped++; }
        } else {
          if( minimum_group_cost > group_cost ) {
            minimum_group = *iter_g;
            minimum_group_cost = group_cost;
          }
        }
        
      }
      
      if( -1 != minimum_group ) {
        
        foreach( int_set_t, iter_g, iter_p2h->second ) {
          if( minimum_group == *iter_g ) continue;
          foreach( int_set_t, iter_sib, g2s[ *iter_g ] ) { del_literal( &m_hypotheses[ *iter_sib ] ); num_sweeped++; }
        }
        
      }
      
    }
    
  }

  WHISPER( 1, num_sweeped << " literals were eliminated." << endl );
  
  return true;
  
}

bool ie::weighted_abduction_t::_is_worth_unifying( const literal_t &h1, const literal_t &h2 ) {

  if( -1 != h1.xor_id && -1 != h2.xor_id && h1.xor_id == h2.xor_id ) return false;
  if( has_common( h1.xor_path, h2.xor_path ) ) return false;

  /* Loopy unification check. */
  if( h1.ancestor.end() != h1.ancestor.find( h2.id ) || h2.ancestor.end() != h2.ancestor.find( h1.id ) ) return false;

  if( m_f_no_group_merge && !h1.is_observed && !h2.is_observed ) {
    if( h1.group_id == h2.group_id ) return false;
  }
  
  if( m_f_expl_mutual_exclusive ) {
    
    /* p => a, q => a */
    if( 1 == h1.parents.size() && 1 == h2.parents.size() &&
        (*h1.parents.begin()) == (*h2.parents.begin()) ) return false;

    /* Look back ancestor path. */
    int_set_t h1s = h1.ancestor, h2s = h2.ancestor;

    foreachc( int_set_t, iter_anc, h1.ancestor )
      h1s.insert( m_hypotheses[ *iter_anc ].sibling.begin(), m_hypotheses[ *iter_anc ].sibling.end() );
        
    foreachc( int_set_t, iter_anc, h2.ancestor )
      h2s.insert( m_hypotheses[ *iter_anc ].sibling.begin(), m_hypotheses[ *iter_anc ].sibling.end() );

    if( h1s.end() != h1s.find( h2.id ) || h2s.end() != h2s.find( h1.id ) ) return false;

    foreach( int_set_t, iter_s1, h1s ) {
      foreach( int_set_t, iter_s2, h2s ) {
        if( *iter_s1 == *iter_s2 ) {
          /* Basically, considering this unification violates the
             principles of weighted abduction. But in the following
             case: A => B, A & C => B & Y, literal A should be
             unified. */

          /* TODO! */
        
          return false;
        }
      } }

  }
  
  return true;
  
}

typedef unordered_map<string, pair<logical_variable_t,logical_variable_t> > _umap_t;

bool ie::weighted_abduction_t::_prepare_unification( unordered_map<literal_t*, ilp::variable_set_t> *p_unify_operators ) {

  WHISPER( 1, "Enumerating pairs of unifiable literals..." << endl );
  WHISPER( 1, "" ); g_progress_bar.wakeup( m_pa2h.size() );
  
  foreachc( slpl_dict_t, iter_sets, m_pa2h ) {

    g_progress_bar.goto_school();
    
    if( 1 == iter_sets->second.size() ) continue;
    
    WHISPER( 3, iter_sets->first << ": " << iter_sets->second.size() << " literals." );
    
    foreachc( int_list_t, iter_h1, iter_sets->second ) {
      for( int_list_t::const_iterator iter_h2 = next( iter_h1 ); iter_sets->second.end() != iter_h2; ++iter_h2 ) {
        
        if( is_timeout() ) return false;
        if( *iter_h1 == *iter_h2 ) continue;
        
        /* c(h1) >= c(h2) */
        literal_t &h1 = LMAX(*iter_h1, *iter_h2, m_hypotheses), &h2 = LMIN(*iter_h1, *iter_h2, m_hypotheses);

        if( 0.0 == h1.number && 0.0 == h2.number ) continue;
        if( !_is_worth_unifying( h1, h2 ) ) continue;
        
        /* Compare our variables with the variables of the unifiable literal. */
        _umap_t unifier; si_dict_t unifier_arg_id; unordered_map<string, unifier_t*> unifier_obj;
        bool    f_arg_match_failure = false;
        
        for( int i=0; i<h1.arguments.size(); i++ ) {

          logical_variable_t &x1 = h1.arguments[i], &y1 = h2.arguments[i];

          /* If these arguments are different and are constants, the merge fails. */
          if( x1 != y1 ) {
            if( IS_CONST( x1 ) && IS_CONST( y1 ) ) { f_arg_match_failure = true; break; }
            else {
              
              /* Check non-merge constraint. */
               if( m_non_merges.is_in_same_cluster( x1, y1 ) ) {
                 f_arg_match_failure = true; break;
               }

               if( x1 < y1 ) { unifier[ g_vm.p2v(x1) + g_vm.p2v(y1) ] = make_pair( x1, y1 ); unifier_arg_id[ g_vm.p2v(x1) + g_vm.p2v(y1) ] = i; }
               else          { unifier[ g_vm.p2v(y1) + g_vm.p2v(x1) ] = make_pair( x1, y1 ); unifier_arg_id[ g_vm.p2v(y1) + g_vm.p2v(x1) ] = i; }

            }
          }
          
        }

        if( f_arg_match_failure ) continue;
        
        /* If it's in same group... (cause(X, Y) & cause(Y, Z) ) */
        // if( h1.group_id == h2.group_id ) continue;
        
        /* Unifier activation variable. */
        unifier_operator_t *p_opr = &m_unified[ h1.id ][ h2.id ];
        p_opr->h1 = h1.id; p_opr->h2 = h2.id;
        p_opr->var_operator = m_ilp_solver.add_variable( NULL, 0.0, "u_{" + h1.to_string() +"," + h2.to_string() + "}" );
        m_ilp_solver.set_initial_value( p_opr->var_operator, 1.0 );
        m_num_var_u++;
        
        (*p_unify_operators)[ &h1 ].insert( p_opr->var_operator );
        m_unifiable[ iter_sets->first ].push_back( p_opr );

        h1.unified_with.insert( h2.id );
        h2.unified_with.insert( h1.id );

        ilp::constraint_template_t *p_cunify; 
        p_opr->con_u1 = m_ilp_solver.add_constraint( &p_cunify );
        p_cunify->set( 0, ilp::CT_NO_MATTER, 3, h1.var_pay, h2.var_pay, p_opr->var_operator, 1.0, 1.0, -2.0 );
        WHISPER( 5, "ILPCON-U: " << h1.to_string() << "~" << h2.to_string() << " :" << p_opr->con_u1 << "," << p_opr->con_u2 << endl );

        foreach( _umap_t, iter_uf, unifier ) {
          unifier_t *p_unifier;
          p_opr->unifiers.push_back( m_vc.add_unifier( &p_unifier, iter_uf->second.first, iter_uf->second.second ) );
          p_unifier->p_operator            = p_opr;
          p_unifier->var_operator          = p_opr->var_operator;
          p_unifier->argument              = unifier_arg_id[ iter_uf->first ];
        }

        WHISPER( 4, h1.to_string() << " can be unified with "
                 << h2.to_string() << " by the unifier " << p_opr->to_string( m_vc.get_unifier_storage() )
                 << " (" << p_opr->var_operator << ")" << endl );
         
      } /* h2 */
    } /* h1 */
    
  }

  g_progress_bar.goto_hell();

  return true;
  
}

bool ie::weighted_abduction_t::_prepare_reduce_triggers( const unordered_map<literal_t*,ilp::variable_set_t>& unify_operators ) {

  WHISPER( 1, "Generating reduce triggers..." << endl );
  
  foreach( literal_vector_t, iter_h, m_hypotheses ) {

    if( iter_h->is_discarded ) continue;
    if( is_timeout() ) return false;
    
    ilp::variable_set_t reduce_trigger;

    /* Add the pay variables of its children into a reduce trigger. */
    foreach( int_set_t, iter_c, iter_h->children )
      reduce_trigger.insert( m_hypotheses[ *iter_c ].var_pay );

    /* Add an unifying operator. */
    unordered_map<literal_t*, ilp::variable_set_t>::const_iterator iter_uo = unify_operators.find( &(*iter_h) );

    if( unify_operators.end() != iter_uo ) {
      foreachc( ilp::variable_set_t, iter_var_opr, iter_uo->second )
        reduce_trigger.insert( *iter_var_opr );
    }
    
    if( 0 < reduce_trigger.size() ) {

      /* @CONSTR: Implementing the reduction trigger constructed above. */
      ilp::constraint_template_t *p_creduce;
      ilp::constraint_t c = m_ilp_solver.add_constraint( &p_creduce );
      p_creduce->set( 0, ilp::CT_NO_MATTER, reduce_trigger );
      p_creduce->push_back( iter_h->var_reduce, -1.0 );
      WHISPER( 5, "ILPCON-RDC: " << iter_h->to_string() << ":" << c << endl );
      

    } else {

      /* This literal is neither unified nor backchained. */
      m_ilp_solver.fix_variable( iter_h->var_reduce, 0.0 );
      
    }

  }

  return true;
  
}

bool ie::weighted_abduction_t::_prepare_multiple_rhs() {
  
  WHISPER( 1, "Imposing variable substitution constraints for hypothesized literals..." << endl );

  foreach( literal_vector_t, iter_h, m_hypotheses ) {

    if( iter_h->is_discarded ) continue;
    if( is_timeout() ) return false;
    
    bool                 f_multiple_rhs = false, f_boring = false;
    literal_t           &h = *iter_h;
    int                  num_args = 0;

    vector<ilp::variable_set_t> eu_set;
    
    foreach( vvset_dict_t, iter_asub, iter_h->axiom_substitution ) {

      if( 1 >= iter_asub->second.size() ) continue;
      
      WHISPER( 3, h.to_string() << " " << h.generated_by << ": " );
      SWHISPER( 3, g_vm.p2v( iter_asub->first ) << "=" );
      
      foreach( logvar_set_t, iter_v, iter_asub->second ) {
        SWHISPER( 3, g_vm.p2v( *iter_v ) << ", " );
      }

      SWHISPER( 3, endl );
      
      /* Check if "No way! It doesn't make sense." */      
      foreach( logvar_set_t, iter_v1, iter_asub->second ) {
        for( logvar_set_t::iterator iter_v2 = next(iter_v1); iter_asub->second.end() != iter_v2; ++iter_v2 ) {

          logical_variable_t  v1 = *iter_v1 < *iter_v2 ? *iter_v1 : *iter_v2, v2 = *iter_v1 < *iter_v2 ? *iter_v2 : *iter_v1;
          ilp::variable_t     e_var = find_variable_e( v1, v2, 0 );
          ilp::variable_set_t u_set;

          if( ilp::INVALID_HANDLE == e_var ) { WHISPER( 5, "Never happens: " << g_vm.p2v(v1) << " - " << g_vm.p2v(v2) << endl ); f_boring = true; break; }

          int_list_t          &eudic = get_eu_dict( v1, v2 );

          foreach( int_list_t, iter_uset, eudic ) {
            u_set.insert( m_vc.get_unifier( *iter_uset )->var_operator );
          }

          eu_set.push_back( u_set );
          
        } if( f_boring ) break; }

      if( f_boring ) break;

      num_args++;
      
    }

    if( f_boring ) {       /* Delete his credit history. */
      WHISPER( 3, " - Cannot be hypothesized." << endl );
      foreach( int_set_t, iter_sib, iter_h->sibling ) { del_literal( &m_hypotheses[ *iter_sib ] ); }
    } else {

      foreach( vector<ilp::variable_set_t>, iter_eu, eu_set ) {
      
        ilp::constraint_template_t *p_cvs;
        m_ilp_solver.add_constraint( &p_cvs );
        p_cvs->set( 0, ilp::CT_NO_MATTER );

        foreach( ilp::variable_set_t, iter_u, *iter_eu ) {
          p_cvs->push_back( *iter_u, 1.0 );
        }
      
        p_cvs->push_back( h.var_pay, -1.0 * num_args );
      
      }

    }
    
  }

  return true;
  
}

bool ie::weighted_abduction_t::_prepare_variable_clusters() {

  WHISPER( 1, "Creating variable clustering constraints..." << endl );

  m_vc.flush();

  variable_cluster_t::cluster_set_t          the_cluster_set = m_vc.get_cluster_set();
  unordered_map<ilp::variable_t, unifier_t*> e2u;

  WHISPER( 1, "" ); g_progress_bar.wakeup( the_cluster_set.size() );
  
  foreach( variable_cluster_t::cluster_set_t, iter_vc, the_cluster_set ) {

    g_progress_bar.goto_school();
    
    if( is_timeout() ) return false;

    /* Enumerate the variables and constants in the cluster. */
    logvar_list_t constants, variables;
      
    foreach( logvar_set_t, iter_v, iter_vc->second.variables ) {
      if( IS_CONST( *iter_v ) ) constants.push_back( *iter_v );
      else                      variables.push_back( *iter_v );
    }

    /* Constants are automatically made exclusive. */
    for( int i=0; i<constants.size(); i++ )
      for( int j=i+1; j<constants.size(); j++ )
        m_non_merges.add_unifier( NULL, constants[i], constants[j] );

    if( 0 < constants.size() ) m_non_merges.flush();
    
    WHISPER( 2, "Variable cluster #" << iter_vc->first << " ("
             << constants.size() << "/" << variables.size() << "/" << iter_vc->second.unifiers.size()
             << "): " << endl );
    //cerr << join( iter_vc->second.variables, "," ) << endl;
    
    /* Prepare clusters. */
    int                      num_clusters = 1;
    vector<logvar_list_t>    vars_ineq;
    logvar_list_t            vars1( iter_vc->second.variables.begin(), iter_vc->second.variables.end() );
    sort( vars1.begin(), vars1.end() );
    
    /* Search intersection with non-merge variable clusters ({x, y, z} if x != y, y != z). */
    foreach( variable_cluster_t::cluster_set_t, iter_ic, m_non_merges.get_cluster_set() ) {

      logvar_list_t       vars2( iter_ic->second.variables.begin(), iter_ic->second.variables.end() ), vars_xcl;
      sort( vars2.begin(), vars2.end() );
      set_intersection( vars1.begin(), vars1.end(), vars2.begin(), vars2.end(), back_inserter(vars_xcl) );

      if( 1 >= vars_xcl.size() ) continue;
      
      vars_ineq.push_back( vars_xcl );
      num_clusters = max( (int)vars_xcl.size(), num_clusters );
      
    }

    iter_vc->second.num_clusters = num_clusters;
    
    WHISPER( 2, " => " << num_clusters << " cluster(s) will be created." << endl );
    
    /* Introduce c-variables that denote where a logical variable/constant X belongs to. */
    unordered_map<logical_variable_t, ilp::variable_set_t> c_set;

    foreach( logvar_set_t, iter_var, iter_vc->second.variables ) {

      /* At the same time, introduce constraint that X can belong to at most one cluster. */
      ilp::constraint_template_t *p_cat_most_1c;
      m_ilp_solver.add_sos1( &p_cat_most_1c );
      //m_ilp_solver.add_constraint( &p_cat_most_1c );
      p_cat_most_1c->set( ilp::CT_NO_MATTER, 1 );
      
      for( int t=0; t<num_clusters; t++ ) {
        
        ilp::variable_t var_cx = m_ilp_solver.add_variable( NULL, 0.0, "c_{"+ g_vm.p2v(*iter_var) +","+ char(int('0')+t) +"}" );
        m_ilp_solver.set_initial_value( var_cx, 1.0 );
        m_num_var_c++;
        m_var_cx[ *iter_var ][ t ] = var_cx;
        m_var_vc[ *iter_var ]      = iter_vc->first;
        
        p_cat_most_1c->push_back( var_cx, 1.0 );
        //p_cat_most_1c->push_back( var_cx, 1.0 );
        
      }

      c_set[ *iter_var ] = ilp::variable_set_t();
      
    }

    /* Impose non-merge constraints. */
    foreach( vector<logvar_list_t>, iter_nm, vars_ineq ) {

      for( int i=0; i<num_clusters; i++ ) {

        /* c_{a,0} + c_{b,1} + ... + c_{z,i} <= 1.0  */
        ilp::constraint_template_t *p_cxcl;
        m_ilp_solver.add_sos1( &p_cxcl );

        /* For the largest cluster, possibly fix c value to make fast. */
        int order = 0;
        foreach( logvar_list_t, iter_var, *iter_nm )
          p_cxcl->push_back( m_var_cx[ *iter_var ][ i ], 1.0 );

      }
      
    }
    
    foreach( int_set_t, iter_unifier, iter_vc->second.unifiers ) {

      unifier_t *p_unifier = m_vc.get_unifier( *iter_unifier );

      /* Imposing necesarry condition for hypothesizing on literals. */
      switch( p_unifier->constrain_type ) {
      case 1: {
        ilp::variable_set_t e_set;

        for( int t=0; t<num_clusters; t++ )
          e_set.insert( add_variable_e( p_unifier->v1, m_var_cx[ p_unifier->v1 ][ t ], p_unifier->v2, m_var_cx[ p_unifier->v2 ][ t ], t, *iter_unifier ) );

        ilp::constraint_template_t *p_cnecond;
        m_ilp_solver.add_constraint( &p_cnecond );
        p_cnecond->set( 0, ilp::CT_NO_MATTER, 1, p_unifier->var_operator, -1.0 );

        foreach( ilp::variable_set_t, iter_ve, e_set ) {
          p_cnecond->push_back( *iter_ve, 1.0 );
          WHISPER( 4, *iter_ve << endl );
        }
        
        WHISPER( 4, "... C=1." << endl );

        break; }
        
      default: {
        ilp::variable_set_t e_set;

        if( 0 == vars_ineq.size() && IS_UNBOUND(p_unifier->v1) && IS_UNBOUND(p_unifier->v2) ) {
          p_unifier->v1 = p_unifier->v2;
          m_hypotheses[ p_unifier->p_operator->h1 ].arguments[ p_unifier->argument ] = m_hypotheses[ p_unifier->p_operator->h2 ].arguments[ p_unifier->argument ];
          continue;
        }
        
        for( int t=0; t<num_clusters; t++ ) {
          ilp::variable_t var = add_variable_e( p_unifier->v1, m_var_cx[ p_unifier->v1 ][ t ], p_unifier->v2, m_var_cx[ p_unifier->v2 ][ t ], t, *iter_unifier );
          
          e_set.insert( var );
          e2u[ var ] = p_unifier;
        }

        m_u2e[ p_unifier->var_operator ].second.push_back( e_set );
        
        break; }
        
      };

      /* c -> u_set mapping. */
      for( int t=0; t<num_clusters; t++ ) {
        c_set[ p_unifier->v1 ].insert( p_unifier->var_operator );
        c_set[ p_unifier->v2 ].insert( p_unifier->var_operator );
      }
      
    }

    /* c_0 v c_1 v c_i v ... => u_0 v u_1 v u_2 v ... */
    for( unordered_map<logical_variable_t, ilp::variable_set_t>::iterator iter_cs = c_set.begin(); c_set.end() != iter_cs; ++iter_cs ) {

      ilp::constraint_template_t *p_cu;
      m_ilp_solver.add_constraint( &p_cu );
      p_cu->set( 0, ilp::CT_NO_MATTER );

      foreach( cx_dict_t, iter_cx, m_var_cx[ iter_cs->first ] ) p_cu->push_back( iter_cx->second, -1.0 );
      foreach( ilp::variable_set_t, iter_u, iter_cs->second )   p_cu->push_back( *iter_u, 1.0 );
      
    }

  }

  g_progress_bar.goto_hell();

  /* Create 2.0 * u <= ex,y_0 + ex,y_1 + e_z,w_0 + e_z,w_1 */
  for( u2e_dict_t::iterator iter_u2e = m_u2e.begin(); m_u2e.end() != iter_u2e; ++iter_u2e ) {
    
    ilp::constraint_template_t *p_cunify;
    iter_u2e->second.first = m_ilp_solver.add_constraint( &p_cunify );
    p_cunify->set( 0, ilp::CT_NO_MATTER, 1, iter_u2e->first, -1.0 * iter_u2e->second.second.size() );
    
    foreach( vector<ilp::variable_set_t>, iter_vve, iter_u2e->second.second ){
      foreach( ilp::variable_set_t, iter_ve, *iter_vve ) /* Es will be at most 1. */
        p_cunify->push_back( *iter_ve, 1.0 );
    }

  }

  foreach( variable_cluster_t::cluster_set_t, iter_ic, m_non_merges.get_cluster_set() )
    WHISPER( 2, "Non-merge: " << g_vm.join( iter_ic->second.variables, " != " ) << endl );

  return true;
  
}


ilp::variable_t ie::weighted_abduction_t::add_variable_e( logical_variable_t &v1, ilp::variable_t c1, logical_variable_t &v2, ilp::variable_t c2, int t, int u_id ) {

  /* e_{x,y,c}: both x and y belongs to the cluster c */
  static char buffer[ 1024 ]; sprintf( buffer, "%s/%s/%d", g_vm.p2v(v1 < v2 ? v1 : v2).c_str(), g_vm.p2v(v1 < v2 ? v2 : v1).c_str(), t );
  string ek( buffer );
  e_dict_t::iterator iter_e = m_edict.find( ek );

  if( m_edict.end() != iter_e ) {
    if( -1 != u_id ) m_eudict[ ek ].push_back( u_id );
    return iter_e->second;
  }

  ilp::variable_t var_e = m_ilp_solver.add_variable( NULL, 0.0, "e_{" + ek + "}" );
  m_ilp_solver.set_initial_value( var_e, 1.0 );  
  m_num_var_e++;
  
  m_edict[ ek ] = var_e;
  if( -1 != u_id ) m_eudict[ ek ].push_back( u_id );

  /* e_{x,y,t} = 1 iff c_{x,t} = 1 \land c_{y,t} = 1 */
  ilp::constraint_template_t *p_ce;
  m_ilp_solver.add_constraint( &p_ce );
  p_ce->set( 0, 1, 3,
      ilp::INVALID_HANDLE == c1 ? m_var_cx[ v1 ][ t ] : c1,
      ilp::INVALID_HANDLE == c2 ? m_var_cx[ v2 ][ t ] : c2, var_e,
      1.0, 1.0, -2.0 );

  return var_e;
  
}

int ie::weighted_abduction_t::add_literal( const literal_t &literal, int group_id ) {

  m_hypotheses.push_back( literal );

  int        the_passport  = m_hypotheses.size()-1;
  literal_t *p_literal     = &m_hypotheses[ the_passport ];
  p_literal->id            = the_passport;
  p_literal->group_id      = group_id;

  ilp::variable_template_t *p_ilpvar;
  p_literal->var_pay    = m_ilp_solver.add_variable( &p_ilpvar, 0 == p_literal->number ? 0 : 1+100.0*p_literal->number, "h_{" + p_literal->to_string() + "}" );
  p_literal->var_reduce = m_ilp_solver.add_variable( &p_ilpvar, 0 == p_literal->number ? 0 : -100.0*p_literal->number, "r_{" + p_literal->to_string() + "}" ); //-(NH_OPT+p_literal->number);
  m_ilp_solver.set_initial_value( p_literal->var_pay, 0.0 );
  m_ilp_solver.set_initial_value( p_literal->var_reduce, 1.0 );
  m_num_var_h++; m_num_var_r++;
  

  WHISPER( 5, "ILPVAR-PAY: " << p_literal->to_string() << ":" << p_literal->var_pay << endl );
  WHISPER( 5, "ILPVAR-REDUCE: " << p_literal->to_string() << ":" << p_literal->var_reduce << endl );
  
  return the_passport;
  
}

inline void _follow_children( int_set_t *p_out, const literal_t &lt, const literal_vector_t &hypotheses ) {
  
  foreachc( int_set_t, iter_c, lt.children ) {
    foreachc( int_set_t, iter_csib, hypotheses[ *iter_c ].sibling ) {
      p_out->insert( *iter_csib );
      _follow_children( p_out, hypotheses[ *iter_csib ], hypotheses );
    } }
  
}

void ie::weighted_abduction_t::del_literal( literal_t *p_literal ) {

  if( !m_f_diet ) return;
  
  WHISPER( 4, "Eliminate: " << p_literal->to_string() << endl );

  p_literal->is_discarded = true;

  if( !p_literal->is_discarded ) {
    m_ilp_solver.del_variable( p_literal->var_pay );    m_num_var_h--;
    m_ilp_solver.del_variable( p_literal->var_reduce ); m_num_var_r--;
    if( ilp::INVALID_HANDLE != p_literal->con_r1 ) m_ilp_solver.del_constraint( p_literal->con_r1 );
    if( ilp::INVALID_HANDLE != p_literal->con_r2 ) m_ilp_solver.del_constraint( p_literal->con_r2 );
    if( ilp::INVALID_HANDLE != p_literal->con_r3 ) m_ilp_solver.del_constraint( p_literal->con_r3 );
    if( ilp::INVALID_HANDLE != p_literal->con_and ) m_ilp_solver.del_constraint( p_literal->con_and );
  }

  foreach( list<unifier_t>, iter_u, p_literal->conditions ) {
    m_vc.del_unifier( *iter_u );
  }

  foreach( int_set_t, iter_u, p_literal->unified_with ) {

    unifier_operator_t *p_unifier_op = get_unifier_op( p_literal->id, *iter_u );
    if( NULL == p_unifier_op ) continue;

    //m_ilp_solver.del_variable( p_unifier_op->var_operator );
    m_ilp_solver.fix_variable( p_unifier_op->var_operator, 0.0 );
    if( ilp::INVALID_HANDLE != p_unifier_op->con_u1 ) m_ilp_solver.del_constraint( p_unifier_op->con_u1 );
    if( ilp::INVALID_HANDLE != p_unifier_op->con_u2 ) m_ilp_solver.del_constraint( p_unifier_op->con_u2 );
    if( ilp::INVALID_HANDLE != p_unifier_op->con_u3 ) m_ilp_solver.del_constraint( p_unifier_op->con_u3 );

    u2e_dict_t::iterator iter_u2e = m_u2e.find( p_unifier_op->var_operator );

    if( m_u2e.end() == iter_u2e ) continue;
    if( ilp::INVALID_HANDLE != iter_u2e->second.first ) m_ilp_solver.del_constraint( iter_u2e->second.first );
    
  }

  foreach( int_set_t, iter_p, p_literal->parents ) {
    foreach( int_set_t, iter_s, p_literal->sibling ) {
      m_hypotheses[ *iter_p ].children.erase( *iter_s );
    } }
  
  int_set_t children;
  _follow_children( &children, *p_literal, m_hypotheses );

  foreach( int_set_t, iter_c, children ) del_literal( &m_hypotheses[ *iter_c ] );
  
}

inline void _follow_link( logvar_list_t *p_out_vars, int_list_t *p_out_unifiers, vector<bool> *p_checked, int c, const vector<ie::variable_cluster_t::baby_cluster_t> &c2v ) {

  if( (*p_checked)[ c ] ) return;
  (*p_checked)[ c ] = true;
  
  p_out_vars->insert( p_out_vars->end(), c2v[c].variables.begin(), c2v[c].variables.end() );
  p_out_unifiers->insert( p_out_unifiers->end(), c2v[c].unifiers.begin(), c2v[c].unifiers.end() );

  int_set_t unilinks( c2v[c].links.begin(), c2v[c].links.end() );
  
  foreachc( int_set_t, iter_f, unilinks ) {

    if( (*p_checked)[ *iter_f ] ) continue;
    _follow_link( p_out_vars, p_out_unifiers, p_checked, *iter_f, c2v );
    
  }
  
}

void ie::variable_cluster_t::flush() {

  m_v2c_flushed.clear();

  vector<bool> checked_out( m_c2v.size(), false );
      
  for( int c=0; c<m_c2v.size(); c++ ) {

    IFWHISPER( 5 ) {
      DATE;
      cerr << "c" << c << ":";
      foreach( logvar_list_t, iter_v, m_c2v[c].variables ) {
        cerr << g_vm.p2v( *iter_v ) << " ";
      }
      cerr << endl;
    }

    if( checked_out[ c ] ) continue;
    
    logvar_list_t variables;
    int_list_t    unifiers;

    /* Follow the link */
    _follow_link( &variables, &unifiers, &checked_out, c, m_c2v );

    if( 0 == variables.size() ) continue;
    
    cluster_t *p_e = &m_cluster_set[ c ];
    p_e->variables.insert( variables.begin(), variables.end() );
    p_e->unifiers.insert( unifiers.begin(), unifiers.end() );

    foreach( logvar_set_t, iter_var, p_e->variables )
      m_v2c_flushed[ *iter_var ] = c;
    
  }

}

void ie::weighted_abduction_t::go() {
  
  m_ilp_solver.flush();
  if( m_timeout - m_time_prepare > 0.0 ) m_ilp_solver.set_timeout( m_timeout - m_time_prepare );

  int num_var_total = m_num_var_h + m_num_var_r + m_num_var_u + m_num_var_c + m_num_var_e;
  
  WHISPER( 1, format( "# of ILP variables: %d (h:%d, r:%d, u:%d, c:%d, e:%d)" ) % num_var_total % m_num_var_h % m_num_var_r % m_num_var_u % m_num_var_c % m_num_var_e << endl );
  WHISPER( 1, "# of ILP constraints: " << m_ilp_solver.get_num_constraints() << endl );
  WHISPER( 1, "Variables and constraints are loaded into the solver." << endl );
  WHISPER( 1, "Started solving the linear programming problem... (" << m_timeout - m_time_prepare << ")" << endl );
  WHISPER( 1, "c(H) = " << m_initial_cost << endl );

  /* Hey boy, solve the problem, please. */
  double         t1 = gettimeofday_sec();
  m_ilp_solver.set_cutoff( m_initial_cost+m_initial_cost_eps+1 );

  m_ilp_solver.solve( *m_p_ilp_log, NULL );
  m_time_ilp = gettimeofday_sec() - t1;

  /* Retrieve the status of hypotheses. */
  unordered_map<int, bool> actually_used_variables, observed_variables;
  variable_cluster_t       e_vc;
  double                   nh_opt = 0.0;
  
  if( !m_ilp_solver.has_feasible_solution() ) {
    
    foreach( literal_vector_t, iter_h, m_hypotheses ) {
      iter_h->is_hypothesized = iter_h->is_observed;
      iter_h->is_reduced      = false;
    }

  } else {
  
    /* h_p, r_p */
    slpl_dict_t zero_unifiables;
    
    foreach( literal_vector_t, iter_h, m_hypotheses ) {

      iter_h->is_hypothesized = false;
      iter_h->is_reduced      = false;
      
      if( iter_h->is_discarded ) continue;
      
      double
        val_h  = m_ilp_solver.get_variable_val( iter_h->var_pay ),
        val_r  = m_ilp_solver.get_variable_val( iter_h->var_reduce );
      
      WHISPER( 4, iter_h->var_pay << ": ILP-var h_{" << iter_h->to_string() << "} = " << val_h << endl );
      WHISPER( 4, iter_h->var_reduce << ": ILP-var r_{" << iter_h->to_string() << "} = " << val_r << endl );
      
      iter_h->is_hypothesized = val_h > 0.5;
      iter_h->is_reduced      = val_r > 0.5;

      if( iter_h->is_hypothesized ) {
        m_best_axioms.insert( iter_h->generated_by );
        foreach( logvar_list_t, iter_arg, iter_h->arguments )
          actually_used_variables[ *iter_arg ] = true;
        if( 0.0 == iter_h->number ) zero_unifiables[ iter_h->to_string( PredicateArity ) ].push_back( iter_h->id );
      }

      if( iter_h->is_observed )
        foreach( logvar_list_t, iter_arg, iter_h->arguments )
          observed_variables[ *iter_arg ] = true;

      if( iter_h->is_hypothesized && 0.0 < iter_h->number ) nh_opt += 1;
      
    }

    /* u_{p,q} */
    foreach( unified_literal_dict_t, itr_u1, m_unified ) {

      literal_t *p1 = &m_hypotheses[ itr_u1->first ];
      p1->is_absorbed   = false;
      p1->p_absorbed_by = NULL;
      
      foreach( iu_dict_t, itr_u2, itr_u1->second ) {

        /* c(p1) >= c(p2) is guaranteed. */
        literal_t *p2   = &m_hypotheses[ itr_u2->first ];
        double     val  = m_ilp_solver.get_variable_val( itr_u2->second.var_operator );
        
        WHISPER( 4, itr_u2->second.var_operator << ": ILP-var u_{" << p1->to_string() << "," << p2->to_string() << "} = " << val << endl );

        /* val > 0.5, and make sure that both literals are hypothesized. */
        itr_u2->second.is_applied = val > 0.5 && p1->is_hypothesized && p2->is_hypothesized;
        if( itr_u2->second.is_applied ) {
          //nh_opt -= 1;
          if( !p1->is_reduced && !p2->is_reduced ) p1->is_reduced = true;
        }
        
        /* Identify an absorbing literal. */
        if( itr_u2->second.is_applied && p1->is_reduced ) {
          p1->p_absorbed_by =
            p1->p_absorbed_by == NULL ? p2 :
            (p1->p_absorbed_by->number > p2->number ? p2 : p1->p_absorbed_by);
          p1->is_absorbed = true;
        }
        
      } }

    /* e_{x,y,t} */    
    foreach( e_dict_t, iter_e, m_edict ) {

      double val = m_ilp_solver.get_variable_val( iter_e->second );
      WHISPER( 4, iter_e->second << ": ILP-var: e_{" << iter_e->first << "} = " << val << endl );

      string_list_t var;
      split( var, iter_e->first, is_any_of( "/" ) );

      int           cl = lexical_cast<int>( var[2] );
    
      /* Where is this variable clustered to? */
      foreach( vector<int>, iter_eu, m_eudict[ iter_e->first ] ) {
        unifier_t *p_unifier = m_vc.get_unifier( *iter_eu );
        WHISPER( 4, p_unifier->var_operator << " = " << m_ilp_solver.get_variable_val( p_unifier->var_operator ) << endl );
      }
      
      if( val > 0.5 ) {
        string c = (format( "%d-%d" ) % m_var_vc[ g_vm.register_variable( var[0] ) ] % cl).str();

        WHISPER( 4, c << ":" << var[0] << ", " << var[1] << endl );

        if( 0 < m_eudict[ iter_e->first ].size() ) {
          unifier_t *p_unifier, *p_eu = m_vc.get_unifier( m_eudict[ iter_e->first ].front() );
          e_vc.add_unifier( &p_unifier, p_eu->v1, p_eu->v2 );
          p_unifier->set( *p_eu );
        }
      }
    
    }

    /* Unification for 0-0 literals. */
    plugin::conditional_unification_t cup( *this );
    
    foreach( slpl_dict_t, iter_sets, zero_unifiables ) {

      string_list_t conditioned_property;
      bool   f_conditioned = cup.is_conditioned_predicate( &conditioned_property, iter_sets->first );

      /* Process the property. */
      static pcrecpp::RE regex_split( ".*?/\\d:(\\d)" ), regex_tr( " ~ (.*)" );
      string requirements; string_list_t sl_requirements;
      int    target;

      WHISPER( 3, iter_sets->first << ": " << iter_sets->second.size() << " literals." << (f_conditioned ? " (CU)" : "") << endl );
      
      foreachc( int_list_t, iter_h1, iter_sets->second ) {
        for( int_list_t::const_iterator iter_h2 = next( iter_h1 ); iter_sets->second.end() != iter_h2; ++iter_h2 ) {
          
          literal_t
            &h1 = m_hypotheses[ *iter_h1 ], &h2 = m_hypotheses[ *iter_h2 ];

          if( !_is_worth_unifying( h1, h2 ) ) continue;
          
          /* Unify them only if variables introduced by them can be merged. */
          bool                f_arg_match_failure = false;
          unifier_operator_t *p_opr = &m_unified[ h1.id ][ h2.id ];
          p_opr->h1 = *iter_h1; p_opr->h2 = *iter_h2;
          
          for( int i=0; i<h1.arguments.size(); i++ ) {
            if( h1.arguments[i] == h2.arguments[i] ) continue;
            p_opr->unifiers.push_back( m_vc.add_unifier( NULL, h1.arguments[i], h2.arguments[i] ) );
            p_opr->is_applied = false;
          }

          if( f_conditioned ) {
            
            p_opr->is_postponed = true;

            foreach( string_list_t, iter_cp, conditioned_property ) {
              regex_split.PartialMatch( *iter_cp, &target );
              regex_tr.PartialMatch(    *iter_cp, &requirements );
              split( sl_requirements, requirements, is_any_of(",") );
              f_arg_match_failure = !cup.generate_constraint( NULL, *p_opr, sl_requirements, target, true );

              if( f_arg_match_failure ) break;
            }
            
          } else {
          
            for( int i=0; i<h1.arguments.size(); i++ ) {

              if( h1.arguments[i] == h2.arguments[i] ) continue;
              if( IS_CONST( h1.arguments[i] ) && IS_CONST( h2.arguments[i] ) ) { f_arg_match_failure = true; break; }

              /* Then also consider variables that are assumed equal to these variables. */
              logvar_list_t v1e, v2e;
            
              e_vc.get_raw_cluster_variables( &v1e, h1.arguments[i] );
              e_vc.get_raw_cluster_variables( &v2e, h2.arguments[i] );

              foreach( logvar_list_t, iter_v1, v1e ) {
                foreach( logvar_list_t, iter_v2, v2e ) {
                  if( m_non_merges.is_in_same_cluster( *iter_v1, *iter_v2 ) ) { f_arg_match_failure = true; break; }
                } if( f_arg_match_failure ) break; }
            
              if( f_arg_match_failure ) break;
            
            }

          }
          
          if( f_arg_match_failure ) continue;
        
          h1.unified_with.insert( h2.id );
          h2.is_absorbed = true;
          h2.p_absorbed_by = &h1;
          p_opr->is_applied = true;
          
          for( int i=0; i<h1.arguments.size(); i++ )
            e_vc.add_unifier( NULL, h1.arguments[i], h2.arguments[i] );
          
        } } /* h1, h2 */
      
    }
    
    e_vc.flush();

    foreach( variable_cluster_t::cluster_set_t, iter_cs, e_vc.get_cluster_set() ) {
      foreach( logvar_set_t, iter_v, iter_cs->second.variables ) {
        m_c2v[ lexical_cast<string>(iter_cs->first) ].insert( *iter_v );
        m_v2c[ *iter_v ] = lexical_cast<string>(iter_cs->first);
      } }
    
    foreach( vs_dict_t, iter_vc, m_v2c ) {

      if( 1 >= m_c2v[ iter_vc->second ].size() ) continue;
    
      logical_variable_t *p_v = &m_c2n[ iter_vc->second ];

      if( IS_CONST( *p_v ) ) continue;
      else if( 0 == p_v ) *p_v = iter_vc->first;
      else if( IS_CONST( iter_vc->first ) ) *p_v = iter_vc->first;
      else if( string::npos != IS_UNBOUND( *p_v ) ) *p_v = iter_vc->first;
      else {
        if( observed_variables.end() != observed_variables.find( *p_v ) ) continue;
        if( actually_used_variables.end() != actually_used_variables.find( *p_v ) ) continue;
        *p_v = iter_vc->first;
      } }

    /* Substitute the arguments. */
    foreach( literal_vector_t, iter_h, m_hypotheses ) {
      if( iter_h->is_discarded ) continue;
      foreach( logvar_list_t, iter_arg, iter_h->arguments ) {
        iter_h->o_arguments.push_back( *iter_arg );
        *iter_arg = !has_key( m_v2c, *iter_arg ) ? *iter_arg : (!has_key( m_c2n, m_v2c[ *iter_arg ] ) ? *iter_arg : m_c2n[ m_v2c[ *iter_arg ] ]);
      } }
    
  }
  
  /* Calculate the cost of best hypothesis. */
  m_cost = m_ilp_solver.get_objfn() - nh_opt;

  /* Output the summary. */      
  WHISPER( 1, "Printing the output literals..." << endl );
  
  /* Write the results of inference. */
  string_list_t   sl_obs;

  foreach( literal_list_t, itr_o, m_input )
    sl_obs.push_back( itr_o->to_string() );
  
  cout << format( "# System: problem/%s, depth/%d, hypotheses/%d, axioms/%d" ) % m_problem % m_depth_limit % m_hypotheses.size() % -1 << endl
       << format( "# ILP stats: solution/%d, variables/%d (h:%d, r:%d, u:%d, c:%d, e:%d), constraints/%d" ) % (!m_ilp_solver.has_feasible_solution() ? "NOT-AVAILABLE" : (m_ilp_solver.is_solution_optimal() ? "OPTIMAL" : "SUBOPTIMAL" )) % num_var_total % m_num_var_h % m_num_var_r % m_num_var_u % m_num_var_c % m_num_var_e % m_ilp_solver.get_num_constraints() << endl
       << format( "# Time elapsed: prepare/%.4f, ilp/%.4f, total/%.4f" ) % m_time_prepare % m_time_ilp % (m_time_prepare + m_time_ilp) << endl
       << format( "# Cost: %.4f (was %.4f)" ) % (m_ilp_solver.has_feasible_solution() ? (m_cost / 100.0) : (m_initial_cost / 100.0)) % (m_initial_cost / 100.0) << endl
       << format( "# Observations: " ) << join( sl_obs, " & " ) << endl;
   
  foreach( literal_vector_t, itr_h, m_hypotheses ) {

    if( !itr_h->is_hypothesized ) continue;
    
    /* Check if this is unified with another literal. */
    string_list_t sl_unified;

    foreach( int_set_t, itr_u2, itr_h->unified_with ) {
      unifier_operator_t *p_unifier_op = get_unifier_op( itr_h->id, *itr_u2 );
      if( NULL == p_unifier_op ) continue; /* Should not be evoked but... just for case. */
      if( p_unifier_op->is_applied ) sl_unified.push_back( m_hypotheses[ *itr_u2 ].named );
    }

    itr_h->merged_with = join( sl_unified, "," );

    if( itr_h->is_absorbed ) continue;
    
    if( itr_h->is_reduced ) {

      cout << itr_h->to_string(Literal) << ":0.0:" << itr_h->named << ":{" << itr_h->merged_with << "}";
      cout << endl;
      
    } else
      cout << itr_h->to_string() << endl;
    
  }

  cout << endl;

}

void ie::weighted_abduction_t::print_observations() {

  string_list_t   sl_obs;

  foreach( literal_list_t, itr_o, m_input )
    sl_obs.push_back( itr_o->to_string() );
  
  cout << format( "# System: problem/%s, depth/%d, hypotheses/%d, axioms/%d" ) % m_problem % m_depth_limit % m_hypotheses.size() % -1 << endl
       << format( "# ILP stats: solution/NOT-AVAILABLE, variables/-1 (h:-1, r:-1, u:-1, c:-1, e:-1), constraints/-1" ) << endl
       << format( "# Time elapsed: prepare/%.4f, ilp/%.4f, total/%.4f" ) % m_time_prepare % m_time_ilp % (m_time_prepare + m_time_ilp) << endl
       << format( "# Cost: %.4f (was %.4f)" ) % m_initial_cost % m_initial_cost << endl
       << format( "# Observations: " ) << join( sl_obs, " & " ) << endl;
  
  foreach( literal_vector_t, iter_h, m_hypotheses ) {
    if( iter_h->is_observed ) cout << iter_h->to_string() << endl;
  }
  
}

void ie::weighted_abduction_t::print_variable_substitutions() {
  
  string_list_t       sl_u;

  foreach( svset_dict_t, iter_c, m_c2v ) {
        
    sl_u.push_back( (format( "c%s = {%s}" ) %
                     iter_c->first % g_vm.join( iter_c->second, ", " )).str() );
      
  }

  sort( sl_u.begin(), sl_u.end() );

  cout << "[Substitution]" << endl;
  foreach( string_list_t, iter_s, sl_u ) cout << *iter_s << endl;
  cout << endl;
  
}

void ie::weighted_abduction_t::print_axioms( bool f_only_best ) {
  
  ss_dict_t     &axiom_dict = m_p_kbp->get_axiom_dict();
  string_list_t  sl_ad;
  int            num_total = 0, num_best = 0;
    
  foreach( ss_dict_t, iter_ad, axiom_dict ) {
    string axiom_name = string::npos == iter_ad->first.find( "]" ) ? iter_ad->second : iter_ad->first.substr( 0, iter_ad->first.find( "]" ) );
    bool   f_best     = has_key( m_best_axioms, axiom_name );
    num_total++;
    if( f_only_best && !f_best ) continue;
    sl_ad.push_back( (f_best ? "* " : "" ) + (string::npos == iter_ad->first.find( "]" ) ? iter_ad->second + "] " + iter_ad->first : iter_ad->first ) );
    if( f_best ) num_best++;
  }

  sort( sl_ad.begin(), sl_ad.end() );
    
  if( f_only_best ) cout << "[Axioms Used in the Best Interpretation (total/"<< num_total <<", best/"<< num_best <<")]" << endl;
  else              cout << "[Axioms Used for Inference (total/"<< num_total <<", best/"<< num_best <<")]" << endl;
  
  foreach( string_list_t, iter_ad, sl_ad ) cout << *iter_ad << endl;
  cout << endl;
  
}

void ie::weighted_abduction_t::print_ilp() {

  vector<ilp::variable_template_t>   &vars = m_ilp_solver.get_variables();
  vector<ilp::constraint_template_t> &cons = m_ilp_solver.get_constraints();

  /* Objective function. */
  string_list_t sl_obj; int num_var = 0;
  
  foreach( vector<ilp::variable_template_t>, iter_var, vars ) {
    num_var++;
    if( iter_var->is_deleted ) continue;
    sl_obj.push_back( (format("%.2f*x_{%d}") % (iter_var->coefficient/100) % (num_var-1)).str() );
  }

  /* Constraints. */
  string_list_t sl_cons;
  foreach( vector<ilp::constraint_template_t>, iter_con, cons ) {
    if( iter_con->is_deleted ) continue;
    
    string_list_t sl_con, sl_con_neg;
    
    for( int i=0; i<iter_con->coefficients.size(); i++ ) {
      sl_con.push_back( lexical_cast<string>( iter_con->coefficients[i] ) + "*x_{" + lexical_cast<string>( iter_con->variables[ i ] ) + "}" );
      sl_con_neg.push_back( lexical_cast<string>( -iter_con->coefficients[i] ) + "*x_{" + lexical_cast<string>( iter_con->variables[ i ] ) + "}" );
    }

    if( iter_con->lhs != ilp::CT_NO_MATTER && iter_con->lhs == iter_con->rhs ) {
      if( 0 == iter_con->lhs ) sl_cons.push_back( (format( "%s = 0" ) % join( sl_con, " + " )).str() );
      else                     sl_cons.push_back( (format( "%s + %d = 0" ) % join( sl_con, " + " ) % (-iter_con->lhs)).str() );
    } else {
      if( iter_con->lhs != ilp::CT_NO_MATTER ) {
        if( 0 == iter_con->lhs ) sl_cons.push_back( (format( "%s <= 0" ) % join( sl_con_neg, " + " )).str() );
        else                     sl_cons.push_back( (format( "%s + %d <= 0" ) % join( sl_con_neg, " + " ) % iter_con->lhs).str() );
      }
    
      if( iter_con->rhs != ilp::CT_NO_MATTER ) {
        if( 0 == iter_con->rhs ) sl_cons.push_back( (format( "%s <= 0" ) % join( sl_con, " + " )).str() );
        else                     sl_cons.push_back( (format( "%s + %d <= 0" ) % join( sl_con, " + " ) % (-iter_con->rhs)).str() );
      }
    }
    
  }
  
  cout << "[ILP Problem (vars/" << num_var << ", cons/" << sl_cons.size() << ")]" << endl
       << "Objective function: " << endl
       << join( sl_obj, " + " ) << endl
       << endl
       << "Constraints:" << endl;
  foreach( string_list_t, iter_con, sl_cons ) cout << *iter_con << endl;
  cout << endl;

  cout << "Variables:" << endl;
  num_var = 0;
  foreach( vector<ilp::variable_template_t>, iter_var, vars ) {
    num_var++;
    if( iter_var->is_deleted ) continue;
    cout << "x_{" + lexical_cast<string>( num_var-1 ) + "}: " + iter_var->name << endl;
  }
  cout << endl;
  
}

void ie::weighted_abduction_t::draw_graph( ostream &os, graph_repr_t grt ) {

  os << "digraph {" << endl
     << "rankdir=\"TB\";" << endl
     << "ranksep=1" << endl
     << "compound=true" << endl
     << "node [shape=\"none\", fontsize=18]" << endl;

  string_set_t                    g_observed, g_edges;
  unordered_map<int,string_set_t> g_nodes;

  struct { string operator()( literal_t &lt, literal_repr_t lrt ) {
    return (format( "label=\"%s:%.2f\",fontcolor=\"%s\"" ) %
            (lt.to_string( lrt )) % lt.number %
            (lt.is_observed ? "#0000bb" : (!lt.is_hypothesized ? "#999999" : (lt.is_reduced ? "#000000" : "#ff0000"))) ).str();
  } }  get_node_label;

  struct { string operator()( literal_t &lt1, literal_t &lt2 ) {
    return (format( "n%s -> n%s [%slabel=\"%s\",fontcolor=\"%s\",style=\"%s\"]" ) %
            lt1.id % lt2.id % (lt1.is_observed ? "" : "ltail=\"cluster_g"+ lexical_cast<string>(lt1.group_id) +"\",") % 
            lt1.generated_by %
            (!lt1.is_hypothesized || !lt2.is_hypothesized ? "#999999" : "#000000") %
            (!lt1.is_hypothesized || !lt2.is_hypothesized ? "dotted" : "solid") ).str();
  } }  get_edge_label;
  
  struct { string operator()( literal_t &lt1, literal_t &lt2, unifier_operator_t &u, const unifier_t *p_ustorage ) {
    return (format( "n%s -> n%s [label=\"%s\",color=\"#bb0000\",fontcolor=\"%s\"dir=\"none\",style=\"%s\"]" ) %
            lt1.id % lt2.id %
            u.to_string( p_ustorage ) %
            (!u.is_applied ? "#999999" : "#bb0000") %
            (!u.is_applied ? "dotted" : "solid") ).str();
  } }  get_unify_edge_label;

  /* We want to draw nodes for each group. */
  unordered_map<int,literal_pvector_t> g2h;
  iii_dict_t                           g2p;
  
  foreach( literal_vector_t, iter_h, m_hypotheses )
    g2h[ iter_h->group_id ].push_back( &(*iter_h) );
  
  for( unordered_map<int,literal_pvector_t>::iterator iter_g2h=g2h.begin(); g2h.end()!=iter_g2h; ++iter_g2h ) {
    
    string_set_t nodes;
    
    foreach( literal_pvector_t, iter_h, iter_g2h->second ) {

      literal_t *p_literal = *iter_h;
      
      if( p_literal->is_discarded ) continue;
      if( (FullGraph != grt && !p_literal->is_hypothesized) ) continue;

      literal_t *p_target = SimpleGraph == grt && p_literal->is_absorbed ? p_literal->p_absorbed_by : p_literal;
      bool       f_related = false;
    
      foreach( int_set_t, iter_c, p_literal->parents ) {

        literal_t *p_cliteral = &m_hypotheses[ *iter_c ];
        if( FullGraph != grt && !p_cliteral->is_hypothesized ) continue;

        literal_t *p_target_parent = SimpleGraph == grt && p_cliteral->is_absorbed ? p_cliteral->p_absorbed_by : p_cliteral;
      
        f_related = true;

        if( 1 != g2p[ p_target->group_id ][ p_target_parent->id ] ) {
          g_edges.insert( get_edge_label( *p_target, *p_target_parent) );
          g2p[ p_target->group_id ][ p_target_parent->id ] = 1;
        }
      
      }

      if( SimpleGraph != grt && m_unified.end() != m_unified.find( p_literal->id ) ) {
        foreach( iu_dict_t, itr_u2, m_unified[ p_literal->id ] ) {
          if( ComplexGraph == grt && !itr_u2->second.is_applied ) continue;
          if(        m_hypotheses[ itr_u2->first ].is_discarded ) continue;
          g_edges.insert( get_unify_edge_label(*p_target, m_hypotheses[ itr_u2->first ], itr_u2->second, m_vc.get_unifier_storage() ) );
        }
      }

      if( SimpleGraph != grt || f_related || p_target->is_observed )
        (p_target->is_observed ? g_observed : nodes).insert( (format( "n%s [%s]" ) % p_target->id % get_node_label( *p_target, SimpleGraph!=grt ? g_verbose_level > 1 ? OrgArgs : LiteralOrgArgs : Literal) ).str() );

    }

    if( 0 < nodes.size() ) g_nodes[ iter_g2h->first ] = nodes;

  }

  for( unordered_map<int,string_set_t>::iterator iter_nodes=g_nodes.begin(); g_nodes.end()!=iter_nodes; ++iter_nodes )
    os << "subgraph cluster_g" << iter_nodes->first << " { color=\"#888888\"; style=dotted; \n" << join( iter_nodes->second, "\n" ) << endl << "}" << endl;
  
  os << "subgraph cluster { color=\"white\"; rank=same; \n" << join( g_observed, "\n" ) << "}" << endl;
  os << join( g_edges, "\n" ) << endl;

  os << "};" << endl;
  
}

void ie::input_storage_t::load( string_list_t *p_problem_list, istream &is_input ) {

  string line;
  
  while( getline( is_input, line ) ) {

    trim( line );

    if( 0 == line.length() ) continue;
    
    string_list_t  elements;
    
    if( '#' == line[0] ) {
      iter_split( elements, line, first_finder( "\t" ) );

      if( 2 != elements.size() ) continue;
      
    } else {
      iter_split( elements, line, first_finder( "]" ) );

      if( 1 == elements.size() ) {
        elements.clear();
        elements.push_back( "default" );
        elements.push_back( line );
      } else {
        trim( elements[0] );
        trim( elements[1] );
      }
      
    }

    m_input[ elements[0] ] = elements[1];
    
    if( NULL != p_problem_list )
      p_problem_list->push_back( elements[0] );

  }
  
}

int ie::input_storage_t::parse_input( literal_list_t *p_out, string& problem ) {

  input_t::iterator iter_p = m_input.find( problem );
  if( m_input.end() == iter_p ) iter_p = m_input.find( "#" + problem );
  if( m_input.end() == iter_p ) return INPUT_ERROR_PROBLEM_NOT_FOUND;

  bool          f_error = false;
  string_list_t str_literals;
  iter_split( str_literals, iter_p->second, first_finder( " & " ) );
    
  for( string_list_t::iterator itr = str_literals.begin(); itr != str_literals.end(); ++itr ) {

    literal_t lt;
      
    /* Parse the input literal. */
    if( !kb::parse_literal( &lt, *itr, NULL, true ) ) {
      WHISPER( 1, "Syntax error: " << *itr << endl );
      f_error = true;
      continue;
    }
    
    p_out->push_back( lt );
      
  }
  
  return !f_error ? 0 : INPUT_ERROR_SYNTAX;

}
