
#include <map>
#include <vector>
#include <fstream>

#include <pcrecpp.h>
#include <cdb.h>

#include <boost/algorithm/string.hpp>
#include <boost/algorithm/string/join.hpp>
#include <boost/lexical_cast.hpp>
#include <boost/format.hpp>
#include <boost/utility.hpp>

#include "knowledge_base.hpp"
#include "henry.hpp"


/*
  Finalize function.
*/
kb::pool_t::~pool_t() {

  _cdb_unload( m_cdb_axioms );
  
}

void kb::pool_t::extract_applicable_axioms( sss_dict_t *p_axioms, const literal_vector_t &lts ) {

  /* Create index. */
  string_set_t                  o_index;
  unordered_map<string, double> o_max_cost;

  foreachc( literal_vector_t, iter_h, lts ) {
    string query = iter_h->to_string( PredicateArity );
    o_index.insert( query );
    o_max_cost[ query ] = max( o_max_cost[ query ], iter_h->number );
  }

  foreach( string_set_t, iter_pa, o_index ) {
    
    string str_axioms;
    if( !_cdb_get( m_cdb_axioms, &str_axioms, *iter_pa ) ) continue;

    string_list_t axioms;
    split( axioms, str_axioms, is_any_of( "\t" ) );

    double max_cost = o_max_cost[ *iter_pa ];

    foreach( string_list_t, iter_ax, axioms ) {

      if( 0.0 == max_cost ) {
        size_t
          s_then = iter_ax->find( "=>" ),
          s_and  = iter_ax->find( "&", s_then );

        /* If just one literal in RHS and corresponding cost is 0 */
        if( string::npos == s_and ) continue;
      }
    
      (*p_axioms)[ *iter_ax ].insert( *iter_pa );
    
    }

  }
  
}

bool kb::pool_t::apply_applicable_axioms( const sss_dict_t &axioms, bool (*p_cb)(const literal_list_t&, void*), void *p_context, const slps_dict_t &obs_hash, literal_vector_t &hypotheses, int recursion_limit, bool f_rhs_strict, bool f_bc_from_same_prefix ) {

  static pcrecpp::RE regex_smr1( "(.*?) *\\](.*?)=>(.*?)" ), regex_smr2( "(.*?)=>(.*?)" );
    
  WHISPER( 1, "" ); g_progress_bar.wakeup( axioms.size() );
  
  foreachc( sss_dict_t, iter_ax, axioms ) {

    g_progress_bar.goto_school();
    
    /* All predicates must be found in observations when 'R' option is activated. */
    if( f_rhs_strict ) {
      
      /* Count the number of predicate types in RHS. */
      if( iter_ax->second.size() < lexical_cast<int>( iter_ax->first.substr( 0, 3 ) ) ) {
        WHISPER( 5, iter_ax->first << " - All literals were not found in RHS." << endl ); continue;
      }
      
    }

    string_list_t   rhs_literals;
    string          axiom_line = iter_ax->first.substr( 3 ), lhs_str, rhs_str;

    if( !regex_smr1.FullMatch( axiom_line, &m_axiom_no[ axiom_line ], &lhs_str, &rhs_str ) )
      if( !regex_smr2.FullMatch( axiom_line, &lhs_str, &rhs_str ) ) return false; /* ... */
    
    /* Find a literal in RHS corresponding to the observation. */
    split( rhs_literals, rhs_str, is_any_of( "&" ) );

    /* Find out possible combinations. */
    vector<int_list_t>   possible_combinations; /* [ [p(x), p(z)], [q(x)], [r(y), r(w)] ] */
    vector<literal_t>    rhs_templates;
    
    possible_combinations.push_back( int_list_t() );

    foreach( string_list_t, iter_r, rhs_literals ) {
      
      literal_t axiom_lt;
      parse_literal( &axiom_lt, *iter_r, NULL );

      /* This is not the focused predicate. Search if there is such
         predicate in observations. */
      slps_dict_t::const_iterator iter_another = obs_hash.find( axiom_lt.to_string( PredicateArity ) );
      if( obs_hash.end() == iter_another ) continue;
      
      rhs_templates.push_back( axiom_lt );
      
      /* Add to the possible combination. */
      vector<int_list_t>  new_possible_combinations;

      foreachc( int_set_t, iter_rhs_literal, iter_another->second ) {

        /* Detect constants inconsistency. */
        bool f_inconsistent = false;
        
        for( int i=0; i<axiom_lt.arguments.size(); i++ ) {
          if( IS_CONST( axiom_lt.arguments[i] ) && IS_CONST( hypotheses[ *iter_rhs_literal ].arguments[i] ) &&
              axiom_lt.arguments[i] != hypotheses[ *iter_rhs_literal ].arguments[i] ) { f_inconsistent = true; break; }
        }
        
        if( f_inconsistent ) continue;
        
        for( int i=0; i<possible_combinations.size(); i++ ) {
          
          int_list_t new_list( possible_combinations[i] );
          new_list.push_back( *iter_rhs_literal );
          new_possible_combinations.push_back( new_list );

        }
      }

      possible_combinations = new_possible_combinations;
      
    }

    if( 0 == possible_combinations.size() ) continue;

    /* Assign the axiom no if necessary.*/
    string *p_axiom_id = &m_axiom_no[ axiom_line ];
    string  a_xor_id;
    size_t  xor_mark = p_axiom_id->find( "^" );
    size_t  num_mark = p_axiom_id->find( "*" );
    int     local_recursion_limit = -1;
    
    if( string::npos != xor_mark ) {
      if( string::npos != num_mark ) a_xor_id = p_axiom_id->substr( xor_mark+1, num_mark - xor_mark );
      else                           a_xor_id = p_axiom_id->substr( xor_mark+1 );
    }

    if( string::npos != num_mark ) local_recursion_limit = lexical_cast<int>( p_axiom_id->substr( num_mark+1 ) );

    if( 0 == xor_mark || 0 == num_mark || "" == *p_axiom_id ) {
      static char buffer[ 1024 ];
      sprintf( buffer, "0x%04x", ++m_axiom_counter );
      *p_axiom_id = string( buffer ) + *p_axiom_id;
    }
    
    /* Go into deep parsing... */
    WHISPER( 3, axiom_line << " (named " << *p_axiom_id << ", " << possible_combinations.size() << " possible instantiations, LRL:" << local_recursion_limit << ")" << endl );
    
    /* Back-chaining on the possible combination. */
    for( int i=0; i<possible_combinations.size(); i++ ) {

      IFWHISPER( 3 ) {
        DATE;
        
        for( int j=0; j<possible_combinations[i].size(); j++ )
          SWHISPER( 3, hypotheses[ possible_combinations[i][j] ].to_string() << " & " );

        SWHISPER( 3, endl );
      }
      
      /* Back-chaining on the possible combination. */
      vvset_dict_t     arg_mapper;
      int_set_t        parents, ancestor, ancestor_repr, xor_path;
      double           rhs_total_number = 0.0;
      axiom_list_t     applied_axioms;

      /* Some calculus from instances of RHS literals. */
      ii_dict_t        incount;
      char             arg_prefix = 0;
      bool             f_all_obs = true;
      string           from_name = hypotheses[ possible_combinations[i][0] ].named;

      if( f_bc_from_same_prefix ) {
        for( int j=0; j<possible_combinations[i].size(); j++ )
          f_all_obs &= 0 == hypotheses[ possible_combinations[i][j] ].group_id;
      }
      
      for( int j=0; j<possible_combinations[i].size(); j++ ) {

        const literal_t *p_lpc = &hypotheses[ possible_combinations[i][j] ];
        incount[ p_lpc->id ] += 1;

        /* Check the prefix of arguments. */
        if( f_all_obs && f_bc_from_same_prefix ) {
          bool f_different_arg_prefix = false;
        
          foreachc( logvar_list_t, iter_arg, p_lpc->arguments ) {
            if( IS_CONST( *iter_arg ) ) continue;
            if( 0 == arg_prefix ) arg_prefix = g_vm.p2v(*iter_arg)[0];
            else if( g_vm.p2v(*iter_arg)[0] != arg_prefix ) { f_different_arg_prefix = true; break; }
          }

          if( f_different_arg_prefix ) {
            WHISPER( 5, "Back-chaining from different arguments' prefixes." << endl ); rhs_total_number = 0.0; break;
          }
        }
        
        if( has_common( xor_path, p_lpc->xor_path ) ) {
          int group_head = hypotheses[ possible_combinations[i][0] ].group_id;
          for( int x=0; x<possible_combinations[i].size(); x++ ) {
            if( group_head != hypotheses[ possible_combinations[i][x] ].group_id ) { group_head = -1; break; }
          }
          if( -1 == group_head ) {
            WHISPER( 5, "Literals in RHS cannot be hypothesized simultaneously! " ); rhs_total_number = 0.0; break;
          }
        }
        
        xor_path.insert( p_lpc->xor_path.begin(), p_lpc->xor_path.end() );
        ancestor.insert( p_lpc->ancestor.begin(), p_lpc->ancestor.end() );
        ancestor_repr.insert( p_lpc->ancestor_repr.begin(), p_lpc->ancestor_repr.end() );
        ancestor.insert( p_lpc->id );
        parents.insert( p_lpc->id );
        applied_axioms.insert( applied_axioms.end(), p_lpc->applied_axioms.begin(), p_lpc->applied_axioms.end() );
        
        rhs_total_number += p_lpc->number;

        /* Extract the mapper. */        
        vv_dict_t new_mapper;
        
        match_arguments( &new_mapper, *p_lpc, rhs_templates[ j ].arguments );

        foreach( vv_dict_t, iter_nm, new_mapper )
          arg_mapper[ iter_nm->first ].insert( iter_nm->second );
        
      }

      ancestor_repr.insert( hypotheses[ possible_combinations[i][0] ].id );

      if( 0.0 == rhs_total_number ) continue;
      
      bool f_consistent = true;
      
      foreach( ii_dict_t, iter_ic, incount ) {
        if( iter_ic->second > 1 ) { f_consistent = false; break; }
      }

      if( !f_consistent ) { WHISPER( 3, "... cancelled." << endl ); continue; }

      /* Create the unique key. */
      string_list_t     inst;
      string            instantiation;
      list<unifier_t>   conditions;

      foreach( vvset_dict_t, iter_arg_mapper, arg_mapper ) {

        logvar_list_t sl_inst( iter_arg_mapper->second.begin(), iter_arg_mapper->second.end() );
        sort( sl_inst.begin(), sl_inst.end() );
        
        inst.push_back( g_vm.p2v( iter_arg_mapper->first ) + "/" + g_vm.join( sl_inst, "," ) );

        /* The process when ... => rel(R) is applied to rel(r). */
        if( IS_CONST( iter_arg_mapper->first ) && !IS_CONST( *iter_arg_mapper->second.begin() ) )
          conditions.push_back( unifier_t( *iter_arg_mapper->second.begin(), iter_arg_mapper->first ) );
          
      }

      sort( inst.begin(), inst.end() );

      /* To avoid the loopy application of the same axiom as one
         that we have applied before! */
      string  the_aa_key = *p_axiom_id + "{" + join( inst, ", " ) + "}";
      axiom_t the_aa_key_no = register_applied_axiom( the_aa_key );
      
      WHISPER( 3, " + Bind: " << the_aa_key << endl );

      if( applied_axioms.end() != find( applied_axioms.begin(), applied_axioms.end(), the_aa_key_no ) ) {
        WHISPER( 3, "This axiom has already been applied in this chain!" << endl );
        continue;
      }

      if( -1 != local_recursion_limit ) {
        
        int num_applied = 0;

        foreachc( int_set_t, iter_anc, ancestor_repr )
          if( hypotheses[ *iter_anc ].generated_by == *p_axiom_id ) num_applied++;

        if( num_applied >= local_recursion_limit ) {
          WHISPER( 3, "Exceeds the local recursion limit (" << num_applied << ")!" << endl );
          continue;
        }

      }
      
      applied_axioms.push_back( the_aa_key_no );

      /* XOR! */
      int    xor_id = -1;
    
      if( 0 < a_xor_id.length() ) {
        /* TODO: is 0 always true? */
        static char buffer[1024]; sprintf( buffer, "%s-%d", a_xor_id.c_str(), hypotheses[ possible_combinations[i][0] ].id );
        //static char buffer[1024]; sprintf( buffer, "%s", a_xor_id.c_str() );
        xor_id = std::tr1::hash<string>()( string(buffer) );
        xor_path.insert( xor_id );
        WHISPER( 5, " XOR-ID:" <<  xor_id << endl );
      }
      
      /* There is no need to perform backward-chaining on them. */
      for( int j=0; j<possible_combinations[i].size(); j++ ) {
        hypotheses[ possible_combinations[i][j] ].applied_axioms.push_back( the_aa_key_no );
        parents.insert( possible_combinations[i][j] );
      }
      
      /* Then parses LHS and instantiate them using the mapper that
         we built above! */
      literal_list_t lhs;
      vv_dict_t      flat_arg_mapper;
      double         sibling_cost = 0.0;
      int            num_recursive_matched = 0;

      foreach( vvset_dict_t, iter_am, arg_mapper ) { flat_arg_mapper[ iter_am->first ] = *iter_am->second.begin(); }

      string_list_t  lhs_literals;
      split( lhs_literals, lhs_str, is_any_of( "&" ) );
      
      foreach( string_list_t, itr_l, lhs_literals ) {
        
        literal_t lt;
        
        parse_literal( &lt, *itr_l, &flat_arg_mapper );

        /* To avoid recursion, check the ancestor literal. */
        if( -1 != recursion_limit ) {

          string lts = lt.to_string( Literal );
          
          foreach( int_set_t, iter_anc, ancestor ) {
            if( hypotheses[ *iter_anc ].arguments.size() != lt.arguments.size() ) continue;
            num_recursive_matched += hypotheses[ *iter_anc ].to_string(Literal) == lts ? 1 : 0;
          }

          if( recursion_limit <= num_recursive_matched ) break;
          
        }

        /* IMPORTANT: Avoid double-copy. */
        lhs.push_back( lt ); 
        literal_t *p_literal = &(lhs.back());

        p_literal->named              = from_name + "+" + p_literal->named;
        p_literal->generated_by       = *p_axiom_id;
        p_literal->number            *= rhs_total_number;
        p_literal->number_benefit     = rhs_total_number - p_literal->number;
        p_literal->xor_id             = xor_id;
        p_literal->conditions         = conditions;
        p_literal->axiom_substitution = arg_mapper;
        p_literal->ancestor           = ancestor;
        p_literal->ancestor_repr      = ancestor_repr;
        p_literal->parents            = parents;
        p_literal->xor_path           = xor_path;
        p_literal->applied_axioms     = applied_axioms;

        sibling_cost += p_literal->number;
        
      }

      if( -1 != recursion_limit && recursion_limit <= num_recursive_matched ) continue;

      foreach( literal_list_t, iter_h, lhs ) iter_h->number_sibling = sibling_cost;
      
      if( 0 < lhs.size() )
        if( !(*p_cb)( lhs, p_context ) ) return false;
      
    }
    
  }

  g_progress_bar.goto_hell();

  return true;
  
}

/*
  The database is loaded into our memory.
*/
bool kb::pool_t::load_axioms( const string& filename ) {

  if( '@' == filename[ filename.length()-1 ] ) { /* Dynamic compile. */
    ifstream ifs( filename.substr( 0, filename.length()-1 ).c_str() );
    compile( "./_dynamic-compiled.kb.cdb", ifs );
    ifs.close();
    return _cdb_load( &m_cdb_axioms, "./_dynamic-compiled.kb.cdb" );
  }
  
  return _cdb_load( &m_cdb_axioms, filename );
}

/*
  This is the knowledge base compiler, which outputs the mapper from
  RHS to the axioms.
*/
inline void _process_line( sss_dict_t *p_mapper, const string &line, int line_no ) {

  /* Do you have an ID? */
  string_list_t id_axiom;
  split( id_axiom, line, is_any_of( "]" ) );
  trim( id_axiom[ id_axiom.size()-1 ] );
    
  string_list_t axiom;
  iter_split( axiom, id_axiom[ id_axiom.size()-1 ], first_finder( "=>" ) );
  trim( axiom[0] ); trim( axiom[1] );

  string_list_t lhs_literals, rhs_literals;
  split( lhs_literals, axiom[0], is_any_of( "&" ) );
  split( rhs_literals, axiom[1], is_any_of( "&" ) );

  /* Count the number of predicate types in RHS. */
  string_set_t     predicates;
  literal_vector_t parsed_literals;
    
  foreach( string_list_t, itr, rhs_literals ) {

    /* Create mapping from literals in LHS/RHS to this axiom. */
    literal_t lt;
    if( !kb::parse_literal( &lt, *itr, NULL ) ) { WHISPER( 1, "Line: " << line_no << endl ); }
    parsed_literals.push_back( lt );

    predicates.insert( lt.to_string( PredicateArity ) );
      
  }
    
  foreach( literal_vector_t, itr, parsed_literals ) {

    /* Create mapping from literals in LHS/RHS to this axiom. */
    static char buffer[ 1024*1024 ];
    sprintf( buffer, "%03lu%s", predicates.size(), line.c_str() );
    (*p_mapper)[ itr->to_string( PredicateArity ) ].insert( string( buffer ) );
  
  }

}

bool kb::compile( const string &fn_out, istream &s_src ) {
  
  sss_dict_t the_mapper;
  int        status = 0;
  
  WHISPER( 1, "Organizing a hash map..." << endl );

  bool f_eof = false;
  int  line_no = 0;
  int  id_disjoint = 0;
  
  while( !f_eof ) {

    string_list_t sl_lines;
    string        line;
      
    for(int i=0; i<16; i++) {
      f_eof = !getline( s_src, line );
      if( f_eof ) break; else sl_lines.push_back( line );
    }

    int i; size_t line_buffer_size = sl_lines.size();
      
    for(i=0; i<line_buffer_size; i++) {

      line_no++;
      
      line = sl_lines[i];
      trim( line );
    
      if( '#' == line[0] || 0 == line.length() ) continue;

      size_t pos_imp = line.find( "=>" );
    
      if( string::npos == pos_imp ) {
        /* Hmm... there is something special here. */
        string_list_t literals;
        split( literals, line, is_any_of( "&" ) );

        foreach( string_list_t, iter_l, literals ) {
          literal_t lt;

          if( !parse_literal( &lt, *iter_l, NULL ) ) {
            WHISPER( 1, "Line: " << line_no << endl );
            continue;
          }

          if( "set" == lt.predicate ) {
            the_mapper[ "$OPTIONS$" ].insert( g_vm.join( lt.arguments ) );
          
          } else if( "set_condition" == lt.predicate ) {

            static pcrecpp::RE regex_cond( "set_condition\\((.*?)\\)" );
            string predicate = g_vm.p2v( lt.arguments[0] ), args;
            predicate = predicate.substr( 0, predicate.find(":") );
            regex_cond.PartialMatch( line, &args );

            if( '/' == predicate[0] ) the_mapper[ "$PP-REGEX$" ].insert( args );
            else                      the_mapper[ "$PP$" + predicate ].insert( args );

          } else if( "set_disjoint" == lt.predicate ) {

            string str_id_disjoint = lexical_cast<string>( id_disjoint++ );
            int    id = 0;
            
            foreach( logvar_list_t, iter_var, lt.arguments ) {
              if( 0 == id++ ) {
                the_mapper[ "$DJ-"+ str_id_disjoint +"$" ].insert( g_vm.p2v( *iter_var ) );
              } else {
                the_mapper[ "$DJ$" + g_vm.p2v( *iter_var ) ].insert( str_id_disjoint );
              }
            }
            
          } else if( "allow_multiple_explaining" == lt.predicate ) {

            the_mapper[ "$IGNORE-M$" ].insert( g_vm.p2v( lt.arguments[0] ) );
            
          }
        }
    
        continue;
      }

      if( '<' != line[ pos_imp-1 ] ) _process_line( &the_mapper, line, line_no );
      else {
        /* Hmm... you are bi-directional. */
        size_t aid = line.find( "]" );
        string lhs = line.substr( string::npos == aid ? 0 : aid+1, pos_imp-3-(string::npos == aid ? 0 : aid) ), rhs = line.substr( pos_imp );
        pcrecpp::RE( ":[0-9.]+" ).GlobalReplace( "", &rhs );
        pcrecpp::RE( ":[0-9.]+" ).GlobalReplace( "", &lhs );      
        _process_line( &the_mapper, line.substr( 0, pos_imp-1 ) + rhs, line_no );
        _process_line( &the_mapper, (string::npos == aid ? line.substr( pos_imp+3 ) : (line.substr( 0, aid+1 ) + " " + line.substr( pos_imp+2 )) ) + " => " + lhs, line_no );
      }
    }

  }
  
  WHISPER( 1, "Printing the hash..." << endl );

  /* Now, we'll tell you the truth. */
  cdb_make cdbm;
  int      fd = "stdout" == fn_out ? 1 : open( fn_out.c_str(), O_RDWR|O_CREAT, S_IRUSR|S_IWUSR|S_IROTH|S_IRGRP );
  
  cdb_make_start( &cdbm, fd );
  
  for( sss_dict_t::iterator itr = the_mapper.begin(); itr != the_mapper.end(); ++itr ) {
    string val = join( itr->second, "\t" );
    cdb_make_add( &cdbm, itr->first.c_str(), itr->first.length(), val.c_str(), val.length() );
  }

  cdb_make_finish( &cdbm );
  
  if( 1 != fd ) close(fd);

  return true;
  
}

/*
  Literal parser.
 */
bool kb::parse_literal( literal_t* l_out, const string &literal, vv_dict_t *p_mapper, bool f_number_required, const string &name_prefix ) {

  static pcrecpp::RE regex_pc(" *(.*?)\\((.*?)\\):([0-9.]+) *"), regex_pcn(" *(.*?)\\((.*?)\\):([0-9.]+):(.*?) *"), regex_p(" *(.*?)\\((.*?)\\)[^:]*"), regex_ineq(" *(.*?)!=(.*?) *");
  static int _counter_unbound = 0, _literal_no = 0;
  static string args_str;

  l_out->number = 0.0;

  if( !regex_p.FullMatch( literal, &l_out->predicate, &args_str ) )
    if( !regex_pc.FullMatch( literal, &l_out->predicate, &args_str, &l_out->number ) )
      if( !regex_pcn.FullMatch( literal, &l_out->predicate, &args_str, &l_out->number, &l_out->named ) ) {
        string var1, var2;
        if( !regex_ineq.FullMatch( literal, &var1, &var2 ) ) {
          WHISPER( 1, "Syntax Error: " << literal << endl );
          return false;
        }
      
        l_out->predicate = "!="; l_out->number = 0.0;

        if( NULL == p_mapper ) {
          l_out->arguments.push_back( g_vm.register_variable( trim_copy( var1 ) ) );
          l_out->arguments.push_back( g_vm.register_variable( trim_copy( var2 ) ) );
        } else 
          return false;/* Not support. */
      
        return true;
      }

  if( 0 == l_out->named.length() ) {
    static char buffer[ 1024 ];
    sprintf( buffer, "0x%04x", ++_literal_no );
    l_out->named.assign( buffer );
  }

  static char buffer[1024]; strcpy( buffer, args_str.c_str() );
  static char *p_element; p_element = strtok( buffer, ", " );

  l_out->arguments.clear();
  
  if( NULL == p_mapper ) {

    /* Push the arguments. */
    do {
      l_out->arguments.push_back( g_vm.register_variable( string( p_element ) ) );
    } while( NULL != (p_element = strtok( NULL, ", " )) );
    
  } else {
    
    do {

      /* Register this variable. */
      logical_variable_t var = g_vm.register_variable( string(p_element) );
      
      if( IS_CONST( var ) ) {

        l_out->arguments.push_back( var );
        (*p_mapper)[ var ] = var;
        
      } else {
        
        vv_dict_t::iterator itr_a = p_mapper->find( var );

        if( p_mapper->end() != itr_a )
          l_out->arguments.push_back( itr_a->second );
        else {
          /* Register the unbounded variable. */
          static char buffer[1024]; sprintf( buffer, "%s%d", UNBOUND_VARIABLE_PREFIX, ++_counter_unbound );
          logical_variable_t ubvar = g_vm.register_variable( string( buffer ) );
          (*p_mapper)[ var ] = ubvar;
          l_out->arguments.push_back( ubvar );
        }
        
      }
    } while( NULL != (p_element = strtok( NULL, ", " )) );
    
  }

  return true;
  
}

bool kb::match_arguments( vv_dict_t *p_mapper, const literal_t &prop, const logvar_list_t &arguments ) {

  for( int arg_index = 0; arg_index < arguments.size(); arg_index++ ) {

    vv_dict_t::iterator iter_am = p_mapper->find( arguments[ arg_index ] );
    
    if( p_mapper->end() != iter_am ) {
      if( iter_am->second != prop.arguments[ arg_index ] ) return false;
    } else
      (*p_mapper)[ arguments[ arg_index ] ] = prop.arguments[ arg_index ];
    
  }

  return true;
  
}

/* cdb */
bool _cdb_load( cdb_t *p_cdb, const string &filename ) {
  p_cdb->fd = open( filename.c_str(), O_RDONLY );

  if( -1 == p_cdb->fd ) return false;

  p_cdb->is_loaded = true;
  
  cdb_init( &p_cdb->db, p_cdb->fd );

  return true;
}

void _cdb_unload( cdb_t &db ) {
  if( !db.is_loaded ) return;
  cdb_free( &db.db );
  close( db.fd );
}

bool _cdb_haskey( cdb_t &db, const string &key ) {
  if( !db.is_loaded ) return false;
  return 0 < cdb_find( &db.db, (char *)key.c_str(), key.length() );
}

bool _cdb_get( cdb_t &db, string *p_out, const string &key ) {

  if( !db.is_loaded ) return false;
  if( 0 >= cdb_find( &db.db, (char *)key.c_str(), key.length() ) ) return false;

  int  data_size = cdb_datalen( &db.db );
  char *p_buffer = new char[ data_size+1 ];

  cdb_read( &db.db, p_buffer, data_size, cdb_datapos( &db.db ) );
  p_buffer[ data_size ] = '\0';

  /* Make the copy. */
  p_out->assign( p_buffer );

  delete[] p_buffer;

  return true;

}
