
void ie::weighted_abduction_t::go() {

  cmsg << v<1>
       << "# of ILP variables: " << m_ilp_solver.get_variables().size() << endl
       << "# of ILP constraints: " << m_ilp_solver.get_constraints().size() << endl;

  m_ilp_solver.flush();
  m_ilp_solver.set_timeout( m_timeout - m_time_prepare );
  
  cmsg << v<1> << "Started solving the linear programming problem... (" << m_timeout - m_time_prepare << ")" << endl;
  
  /* Hey boy, solve the problem, please. */
  double t1 = gettimeofday_sec();
  m_ilp_solver.solve();
  
  m_time_ilp = gettimeofday_sec() - t1;

  /* Retrieve the status of hypotheses. */
  map<string, bool> actually_used_variables, observed_variables;
  double            nh_opt = 0.0;
  
  if( 2 != m_ilp_solver.get_solution_status() )  {
  
    /* h_p, r_p */
    foreach( literal_list_t, iter_h, m_hypotheses ) {

      double
        val_h  = m_ilp_solver.get_variable_val( iter_h->var_pay ),
        val_r  = m_ilp_solver.get_variable_val( iter_h->var_reduce );
      
      cmsg << v<4> << "ILP-var h_{" << iter_h->to_string() << "} = " << val_h << endl;
      cmsg << v<4> << "ILP-var r_{" << iter_h->to_string() << "} = " << val_r << endl;
      
      iter_h->is_hypothesized = val_h > 0.5;
      iter_h->is_reduced      = val_r > 0.5;
    
      if( iter_h->is_hypothesized )      
        foreach( string_list_t, iter_arg, iter_h->arguments )
          actually_used_variables[ *iter_arg ] = true;

      if( iter_h->is_observed )
        foreach( string_list_t, iter_arg, iter_h->arguments )
          observed_variables[ *iter_arg ] = true;
    
      if( iter_h->is_hypothesized && !iter_h->is_reduced && 0.0 < iter_h->number ) nh_opt += NH_OPT;
    
    }

    /* u_{p,q} */
    foreach( unified_literal_dict_t, itr_u1, m_unified ) {

      literal_t *p1 = m_hypotheses_idmap[ itr_u1->first ];
      p1->is_absorbed   = false;
      p1->p_absorbed_by = NULL;
      
      foreach( iu_dict_t, itr_u2, itr_u1->second ) {

        /* c(p1) >= c(p2) is guaranteed. */
        literal_t *p2   = m_hypotheses_idmap[ itr_u2->first ]; 
        double     val  = m_ilp_solver.get_variable_val( itr_u2->second.var_operator );
        
        itr_u2->second.is_applied = val > 0.5;

        cmsg << v<4> << "ILP-var u_{" << p1->to_string() << "," << p2->to_string() << "} = " << val << endl;
      
        /* Identify an absorbing literal. */
        if( itr_u2->second.is_applied && p1->is_reduced ) {
          p1->p_absorbed_by =
            p1->p_absorbed_by == NULL ? p2 :
            (p1->p_absorbed_by->number > p2->number ? p2 : p1->p_absorbed_by);
          p1->is_absorbed = true;
        }
      } }

    /* e_{x,y,t} */
    variable_cluster_t e_vc;
    
    foreach( e_dict_t, iter_e, m_edict ) {

      double val = m_ilp_solver.get_variable_val( iter_e->second );
      cmsg << v<4> << "ILP-var: e_{" << iter_e->first << "} = " << val << endl;

      string_list_t var;
      split( var, iter_e->first, is_any_of( "/" ) );

      int           cl = lexical_cast<int>( var[2] );
    
      /* Where is this variable clustered to? */
      if( val > 0.5 ) {

        /* If one of unification is activated. */
        bool f_pass = false;
        
        foreach( list<unifier_t*>, iter_eu, m_eudict[ iter_e->first ] ) {
          if( m_ilp_solver.get_variable_val( (*iter_eu)->var_operator ) > 0.5 )
            { f_pass = true; break; }
        }

        if( !f_pass ) continue;
        
        string c = (format( "%d-%d" ) % m_var_vc[ var[0] ] % cl).str();

        cmsg << v<4> << c << ":" << var[0] << ", " << var[1] << endl;

        e_vc.add_unifier( m_eudict[ iter_e->first ].front() );
        
      }
    
    }

    e_vc.flush();

    foreach( variable_cluster_t::cluster_set_t, iter_cs, e_vc.get_cluster_set() ) {
      foreach( string_set_t, iter_v, iter_cs->second.variables ) {
        m_c2v[ lexical_cast<string>(iter_cs->first) ].insert( *iter_v );
        m_v2c[ *iter_v ] = lexical_cast<string>(iter_cs->first);
      }
    }
    
    foreach( ss_dict_t, iter_vc, m_v2c ) {

      if( 1 >= m_c2v[ iter_vc->second ].size() ) continue;
    
      string *p_v = &m_c2n[ iter_vc->second ];

      if( IS_CONST( *p_v ) ) continue;
      else if( 0 == p_v->length() ) *p_v = iter_vc->first;
      else if( IS_CONST( iter_vc->first ) ) *p_v = iter_vc->first;
      else if( string::npos != p_v->find( UNBOUND_VARIABLE_PREFIX ) ) *p_v = iter_vc->first;
      else {
        if( observed_variables.end() != observed_variables.find( *p_v ) ) continue;
        if( actually_used_variables.end() != actually_used_variables.find( *p_v ) ) continue;
        *p_v = iter_vc->first;
      } }

    /* Substitute the arguments. */
    foreach( literal_list_t, iter_h, m_hypotheses ) {
      foreach( string_list_t, iter_arg, iter_h->arguments ) {
        iter_h->o_arguments.push_back( *iter_arg );
        *iter_arg = !has_key( m_v2c, *iter_arg ) ? *iter_arg : (!has_key( m_c2n, m_v2c[ *iter_arg ] ) ? *iter_arg : m_c2n[ m_v2c[ *iter_arg ] ]);
      } }
  }
  
  /* Calculate the cost of best hypothesis. */
  m_cost = m_ilp_solver.get_objfn() - nh_opt;

  /* Output the summary. */      
  cmsg << v<1> << "Printing the output literals..." << endl;
  
  /* Write the results of inference. */
  string_list_t   sl_obs;

  foreach( literal_list_t, itr_o, m_input )
    sl_obs.push_back( itr_o->to_string() );
  
  cout << format( "# System: problem/%s, depth/%d, hypotheses/%d" ) % m_problem.substr(1) % m_depth_limit % m_hypotheses.size() << endl
       << format( "# ILP stats: solution-type/%d, variables/%d, constraints/%d" ) % m_ilp_solver.get_solution_status() % m_ilp_solver.get_variables().size() % m_ilp_solver.get_constraints().size() << endl
       << format( "# Time elapsed: prepare/%.4f, ilp/%.4f, total/%.4f" ) % m_time_prepare % m_time_ilp % (m_time_prepare + m_time_ilp) << endl
       << format( "# Cost/%.4f" ) % m_cost << endl
       << format( "# Observations: " ) << join( sl_obs, ", " ) << endl;
   
  foreach( literal_list_t, itr_h, m_hypotheses ) {

    if( !itr_h->is_hypothesized ) continue;
    
    /* Check if this is unified with another literal. */
    string_list_t sl_unified;

    foreach( int_set_t, itr_u2, itr_h->unified_with ) {
      bool f_ok = false;
      if( m_unified.end() != m_unified.find( itr_h->id ) ) {
        if( m_unified[ itr_h->id ].end() != m_unified[ itr_h->id ].find( *itr_u2 ) )
          if( m_unified[ itr_h->id ][ *itr_u2 ].is_applied) f_ok = true;
      } else if( m_unified.end() != m_unified.find( *itr_u2 ) ) {
        if( m_unified[ *itr_u2 ].end() != m_unified[ *itr_u2 ].find( itr_h->id ) )
          if( m_unified[ *itr_u2 ][ itr_h->id ].is_applied ) f_ok = true;
      }
      if( f_ok ) sl_unified.push_back( m_hypotheses_idmap[ *itr_u2 ]->named );
    }

    itr_h->merged_with = join( sl_unified, "," );

    if( itr_h->is_absorbed ) continue;
    
    if( itr_h->is_reduced ) {

      cout << itr_h->to_string(Literal) << ":0.0:{" << itr_h->named << "}:{" << itr_h->merged_with << "}";
      cout << endl;
      
    } else
      cout << itr_h->to_string() << endl;
    
  }

  cout << endl;
  
  /* Results of variable clustering. */
  if( m_f_vs_output ) {
  
    string_list_t       sl_u;

    foreach( sss_dict_t, iter_c, m_c2v ) {
        
      sl_u.push_back( (format( "c%s = {%s}" ) %
                       iter_c->first % join( iter_c->second, ", " )).str() );
      
    }

    sort( sl_u.begin(), sl_u.end() );

    cout << "[Substitution]" << endl;
    foreach( string_list_t, iter_s, sl_u ) cout << *iter_s << endl;
    cout << endl;

  }

  /* Axiom dictionary. */
  if( m_f_axiom_dict_output ) {

    cout << "[Axioms Used for Inference]" << endl;

    ss_dict_t     &axiom_dict = m_p_kbp->get_axiom_dict();
    string_list_t  sl_ad;
    
    foreach( ss_dict_t, iter_ad, axiom_dict )
      sl_ad.push_back( string::npos == iter_ad->first.find( "]" ) ? iter_ad->second + "] " + iter_ad->first : iter_ad->first );

    sort( sl_ad.begin(), sl_ad.end() );
    
    foreach( string_list_t, iter_ad, sl_ad ) cout << *iter_ad << endl;
    cout << endl;
    
  }
  
}

void ie::weighted_abduction_t::draw_graph( ostream &os, graph_repr_t grt ) {

  iii_dict_t      drawn_unification;
  literal_list_t  hypotheses;

  /* Create Hobbs+ (93) style proof graph. */
  os << "digraph {" << endl
     << "rankdir=\"TB\"" << endl
     << "ranksep=1" << endl    
     << "node [shape=\"none\"]" << endl;

  string_set_t g_nodes, g_observed, g_edges;

  struct { string operator()( literal_t &lt, literal_repr_t lrt ) {
    return (format( "label=\"%s:%.2f\",fontcolor=\"%s\"" ) %
            (lt.to_string( lrt )) % lt.number %
            (lt.is_observed ? "#0000bb" : (!lt.is_hypothesized ? "#999999" : (lt.is_reduced ? "#000000" : "#ff0000"))) ).str();
  } }  get_node_label;

  struct { string operator()( literal_t &lt1, literal_t &lt2 ) {
    return (format( "\"n%s\" -> \"n%s\" [label=\"%s\",fontcolor=\"%s\",style=\"%s\"]" ) %
            lt1.id % lt2.id %
            lt1.generated_by %
            (!lt1.is_hypothesized || !lt2.is_hypothesized ? "#999999" : "#000000") %
            (!lt1.is_hypothesized || !lt2.is_hypothesized ? "dotted" : "solid") ).str();
  } }  get_edge_label;
  
  struct { string operator()( literal_t &lt1, literal_t &lt2, unifier_operator_t &u ) {
    return (format( "\"n%s\" -> \"n%s\" [label=\"%s\",color=\"#bb0000\",fontcolor=\"%s\"dir=\"none\",style=\"%s\"]" ) %
            lt1.id % lt2.id %
            u.to_string() %
            (!u.is_applied ? "#999999" : "#bb0000") %
            (!u.is_applied ? "dotted" : "solid") ).str();
  } }  get_unify_edge_label;
  
  foreach( literal_list_t, iter_h, m_hypotheses ) {

    if( (FullGraph != grt && !iter_h->is_hypothesized) ) continue;

    literal_t *p_target = FullGraph != grt && iter_h->is_absorbed ? iter_h->p_absorbed_by : &(*iter_h);
    bool       f_related = false;
    
    foreach( literal_pset_t, iter_c, iter_h->parents ) {

      if( FullGraph != grt && !(*iter_c)->is_hypothesized ) continue;

      literal_t *p_target_parent = FullGraph != grt && (*iter_c)->is_absorbed ? (*iter_c)->p_absorbed_by : (*iter_c);
      
      f_related = true;
      
      (p_target_parent->is_observed ? g_observed : g_nodes).insert( (format( "\"n%s\" [%s]" ) % p_target_parent->id % get_node_label( *p_target_parent, FullGraph==grt ? LiteralOrgArgs : Literal ) ).str() );
      g_edges.insert( get_edge_label(*p_target, *p_target_parent) );
      
    }

    if( FullGraph == grt && m_unified.end() != m_unified.find( iter_h->id ) ) {
      foreach( iu_dict_t, itr_u2, m_unified[ iter_h->id ] ) {
        g_edges.insert( get_unify_edge_label(*p_target, *m_hypotheses_idmap[ itr_u2->first ], itr_u2->second) );
      }
    }

    if( FullGraph == grt || f_related || p_target->is_observed )
      (p_target->is_observed ? g_observed : g_nodes).insert( (format( "\"n%s\" [%s]" ) % p_target->id % get_node_label( *p_target, FullGraph==grt ? LiteralOrgArgs : Literal) ).str() );
    
  }

  os << join( g_nodes, "\n" ) << endl;
  os << "subgraph { rank=same; \n" << join( g_observed, "\n" ) << "}" << endl;
  os << join( g_edges, "\n" ) << endl;

  os << "};" << endl;
  
}

void ie::input_storage_t::load( string_list_t *p_problem_list, istream &is_input ) {

  string line;
  
  while( getline( is_input, line ) ) {

    trim( line );

    if( 0 == line.length() ) continue;
    
    string_list_t  elements;
    
    if( '#' == line[0] ) {
      iter_split( elements, line, first_finder( "\t" ) );

      if( 2 != elements.size() ) continue;
      
    } else {
      iter_split( elements, line, first_finder( "]" ) );

      if( 1 == elements.size() ) {
        elements.clear();
        elements.push_back( "#default" );
        elements.push_back( line );
      } else {
        trim( elements[0] );
        trim( elements[1] );
        elements[0] = "#" + elements[0];
      }
      
    }
    
    literal_list_t &literals = m_input[ elements[0] ];
    string_list_t str_literals;
    iter_split( str_literals, elements[1], first_finder( " & " ) );
    
    if( NULL != p_problem_list )
      p_problem_list->push_back( elements[0] );
    
    for( string_list_t::iterator itr = str_literals.begin(); itr != str_literals.end(); ++itr ) {

      literal_t lt;
      
      if( itr->find( "!=" ) != string::npos ) {

        /* Wao! You want me to prohibit merging variables! */
        lt.predicate = "!=";
        
        iter_split( lt.arguments, *itr, first_finder( "!=" ) );
        trim( lt.arguments[0] ); trim( lt.arguments[1] );
        
      } else {

        /* Parse the input literal. */
        kb::parse_literal( &lt, *itr );

      }
      
      literals.push_back( lt );
      
    }

  }
  
}
