<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<title>Reranker Framework (ReFr): hadoop-run.py Source File</title>

<link href="tabs.css" rel="stylesheet" type="text/css"/>
<link href="doxygen.css" rel="stylesheet" type="text/css" />

<link href="search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="search/search.js"></script>
<script type="text/javascript">
  $(document).ready(function() { searchBox.OnSelectItem(0); });
</script>

</head>
<body>
<div id="top"><!-- do not remove this div! -->


<div id="titlearea">
<table cellspacing="0" cellpadding="0">
 <tbody>
 <tr style="height: 56px;">
  
  
  <td style="padding-left: 0.5em;">
   <div id="projectname">Reranker Framework (ReFr)
   
   </div>
   <div id="projectbrief">Reranking framework for structure prediction and discriminative language modeling</div>
  </td>
  
  
  
 </tr>
 </tbody>
</table>
</div>

<!-- Generated by Doxygen 1.7.6.1 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "search",false,'Search');
</script>
  <div id="navrow1" class="tabs">
    <ul class="tablist">
      <li><a href="index.html"><span>Main&#160;Page</span></a></li>
      <li><a href="namespaces.html"><span>Namespaces</span></a></li>
      <li><a href="annotated.html"><span>Classes</span></a></li>
      <li class="current"><a href="files.html"><span>Files</span></a></li>
      <li>
        <div id="MSearchBox" class="MSearchBoxInactive">
        <span class="left">
          <img id="MSearchSelect" src="search/mag_sel.png"
               onmouseover="return searchBox.OnSearchSelectShow()"
               onmouseout="return searchBox.OnSearchSelectHide()"
               alt=""/>
          <input type="text" id="MSearchField" value="Search" accesskey="S"
               onfocus="searchBox.OnSearchFieldFocus(true)" 
               onblur="searchBox.OnSearchFieldFocus(false)" 
               onkeyup="searchBox.OnSearchFieldChange(event)"/>
          </span><span class="right">
            <a id="MSearchClose" href="javascript:searchBox.CloseResultsWindow()"><img id="MSearchCloseImg" border="0" src="search/close.png" alt=""/></a>
          </span>
        </div>
      </li>
    </ul>
  </div>
  <div id="navrow2" class="tabs2">
    <ul class="tablist">
      <li><a href="files.html"><span>File&#160;List</span></a></li>
      <li><a href="globals.html"><span>File&#160;Members</span></a></li>
    </ul>
  </div>
</div>
<div class="header">
  <div class="headertitle">
<div class="title">hadoop-run.py</div>  </div>
</div><!--header-->
<div class="contents">
<a href="hadoop-run_8py.html">Go to the documentation of this file.</a><div class="fragment"><pre class="fragment"><a name="l00001"></a><a class="code" href="namespacehadoop-run.html">00001</a> <span class="comment">#!/usr/bin/python</span>
<a name="l00002"></a>00002 <span class="comment">#-----------------------------------------------------------------------</span>
<a name="l00003"></a>00003 <span class="comment"># Copyright 2012, Google Inc.</span>
<a name="l00004"></a>00004 <span class="comment"># All rights reserved.</span>
<a name="l00005"></a>00005 <span class="comment"># </span>
<a name="l00006"></a>00006 <span class="comment"># Redistribution and use in source and binary forms, with or without</span>
<a name="l00007"></a>00007 <span class="comment"># modification, are permitted provided that the following conditions are</span>
<a name="l00008"></a>00008 <span class="comment"># met:</span>
<a name="l00009"></a>00009 <span class="comment"># </span>
<a name="l00010"></a>00010 <span class="comment">#   * Redistributions of source code must retain the above copyright</span>
<a name="l00011"></a>00011 <span class="comment">#     notice, this list of conditions and the following disclaimer.</span>
<a name="l00012"></a>00012 <span class="comment">#   * Redistributions in binary form must reproduce the above</span>
<a name="l00013"></a>00013 <span class="comment">#     copyright notice, this list of conditions and the following disclaimer</span>
<a name="l00014"></a>00014 <span class="comment">#     in the documentation and/or other materials provided with the</span>
<a name="l00015"></a>00015 <span class="comment">#     distribution.</span>
<a name="l00016"></a>00016 <span class="comment">#   * Neither the name of Google Inc. nor the names of its</span>
<a name="l00017"></a>00017 <span class="comment">#     contributors may be used to endorse or promote products derived from</span>
<a name="l00018"></a>00018 <span class="comment">#     this software without specific prior written permission.</span>
<a name="l00019"></a>00019 <span class="comment">#</span>
<a name="l00020"></a>00020 <span class="comment"># THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS</span>
<a name="l00021"></a>00021 <span class="comment"># &quot;AS IS&quot; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT</span>
<a name="l00022"></a>00022 <span class="comment"># LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR</span>
<a name="l00023"></a>00023 <span class="comment"># A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT</span>
<a name="l00024"></a>00024 <span class="comment"># OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,</span>
<a name="l00025"></a>00025 <span class="comment"># SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT</span>
<a name="l00026"></a>00026 <span class="comment"># LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,           </span>
<a name="l00027"></a>00027 <span class="comment"># DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY           </span>
<a name="l00028"></a>00028 <span class="comment"># THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT</span>
<a name="l00029"></a>00029 <span class="comment"># (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE</span>
<a name="l00030"></a>00030 <span class="comment"># OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.</span>
<a name="l00031"></a>00031 <span class="comment"># -----------------------------------------------------------------------------</span>
<a name="l00032"></a>00032 <span class="comment">## @file hadoop-run.py</span>
<a name="l00033"></a>00033 <span class="comment">#  A python program which will train a reranking model on a Hadoop cluster using</span>
<a name="l00034"></a>00034 <span class="comment">#  the Iterative Parameter Mixtures perceptron training algorithm.</span>
<a name="l00035"></a>00035 <span class="comment">#</span>
<a name="l00036"></a>00036 <span class="comment">#  You must first have a Hadoop account configured.  In order to train, you will</span>
<a name="l00037"></a>00037 <span class="comment">#  need to have the following:</span>
<a name="l00038"></a>00038 <span class="comment">#   - Training data locally accessible (accessible by the script)</span>
<a name="l00039"></a>00039 <span class="comment">#   - A HadoopFS (HDFS) directory with enough space to store the input</span>
<a name="l00040"></a>00040 <span class="comment">#   training data, the intermediate models and the final model.</span>
<a name="l00041"></a>00041 <span class="comment">#</span>
<a name="l00042"></a>00042 <span class="comment">#  The program will attempt to locate the Hadoop binary and the</span>
<a name="l00043"></a>00043 <span class="comment">#  Hadoop streaming library.  If this fails, you can specify these </span>
<a name="l00044"></a>00044 <span class="comment">#  via command-line parameters (--hadooproot and --streamingloc).</span>
<a name="l00045"></a>00045 <span class="comment">#</span>
<a name="l00046"></a>00046 <span class="comment">#  Usage:</span>
<a name="l00047"></a>00047 <span class="comment">#   hadoop-run.py --input InputData --hdfsinputdir HDFSIndir \\</span>
<a name="l00048"></a>00048 <span class="comment">#                 --hdfsoutputdir HDFSOutDir --outputdir OutputDir</span>
<a name="l00049"></a>00049 <span class="comment">#</span>
<a name="l00050"></a>00050 <span class="comment">#   InputData - A comma-separated list of file globs containing the training data.</span>
<a name="l00051"></a>00051 <span class="comment">#               These must be accessible by script.</span>
<a name="l00052"></a>00052 <span class="comment">#   OutputDir - The local directory where the trained model(s) are written.  The</span>
<a name="l00053"></a>00053 <span class="comment">#               default model name is &#39;model&#39;.  You can change this using the</span>
<a name="l00054"></a>00054 <span class="comment">#               --modelname command-line parameter.</span>
<a name="l00055"></a>00055 <span class="comment">#   HDFSInDir - A directory on HDFS where the input data will be copied to.</span>
<a name="l00056"></a>00056 <span class="comment">#   HDFSOutDir - A directory on HDFS where the temporary data and output data</span>
<a name="l00057"></a>00057 <span class="comment">#                will be written to.</span>
<a name="l00058"></a>00058 <span class="comment">#                The final models are copied to the locally-accessible OutputDir.</span>
<a name="l00059"></a>00059 <span class="comment">#</span>
<a name="l00060"></a>00060 <span class="comment"># Check  input command line options.</span>
<a name="l00061"></a>00061 <span class="comment">#  @author kbhall@google.com (Keith Hall)</span>
<a name="l00062"></a>00062 <span class="comment">#</span>
<a name="l00063"></a>00063 
<a name="l00064"></a>00064 <span class="keyword">from</span> optparse <span class="keyword">import</span> OptionParser
<a name="l00065"></a>00065 <span class="keyword">import</span> os, sys, re, gzip, glob, signal, atexit, operator, random
<a name="l00066"></a>00066 <span class="keyword">import</span> codecs
<a name="l00067"></a>00067 <span class="keyword">import</span> pyutil
<a name="l00068"></a>00068 <span class="keyword">import</span> hadooputil
<a name="l00069"></a>00069 <span class="keyword">import</span> defs
<a name="l00070"></a>00070 
<a name="l00071"></a>00071 <span class="comment">##</span>
<a name="l00072"></a>00072 <span class="comment">#  The following arguments are available to hadoop-run.py</span>
<a name="l00073"></a>00073 <span class="comment">#</span>
<a name="l00074"></a>00074 <span class="comment">#  @param[in] hadooproot Location of hadoop installation.</span>
<a name="l00075"></a>00075 <span class="comment">#  @param[in] refrbin Location of the Reranker Framework bin directory.</span>
<a name="l00076"></a>00076 <span class="comment">#  @param[in] develdata Location of development data.</span>
<a name="l00077"></a>00077 <span class="comment">#  @param[in] input Location of input data on local FS.</span>
<a name="l00078"></a>00078 <span class="comment">#  @param[in] hdfsinputdir Location of input data on HDFS.</span>
<a name="l00079"></a>00079 <span class="comment">#  @param[in] hdfsoutputdir Output directory (on HDFS) - will be removed before each iteration.</span>
<a name="l00080"></a>00080 <span class="comment">#  @param[in] outputdir Output directory.</span>
<a name="l00081"></a>00081 <span class="comment">#  @param[in] inputmodel Name of model to start with.</span>
<a name="l00082"></a>00082 <span class="comment">#  @param[in] inputmodeliter Iteration number of input model (will start with next iteration).</span>
<a name="l00083"></a>00083 <span class="comment">#  @param[in] modelname Name of model file (new models written to --outputdir).</span>
<a name="l00084"></a>00084 <span class="comment">#  @param[in] maxiter Maximum number of iterations to run.</span>
<a name="l00085"></a>00085 <span class="comment">#  @param[in] numreducer Number of reducers.</span>
<a name="l00086"></a>00086 <span class="comment">#  @param[in] streamingloc Location under hadooproot for streaming jar file.</span>
<a name="l00087"></a>00087 <span class="comment">#  @param[in] libpath Specify the LD_LIBRARY_PATH for jobs run on Hadoop.</span>
<a name="l00088"></a>00088 <span class="comment">#  @param[in] splitsize Min size f each data split.</span>
<a name="l00089"></a>00089 <span class="comment">#  @param[in] tasktimeout Amount of time (seconds) for task to run</span>
<a name="l00090"></a>00090 <span class="comment">#                         (e.g., loading mode) before processing the next input record.</span>
<a name="l00091"></a>00091 <span class="comment">#  @param[in] force Force all data processing even if files exist.</span>
<a name="l00092"></a>00092 <span class="comment">#  @param[in] forcecompile Force precomilation if applicable.</span>
<a name="l00093"></a>00093 <span class="comment">#  @param[in] compilefeatures Compile features before processing.</span>
<a name="l00094"></a>00094 <span class="comment">#  @param[in] maxdecline Number of iterations in decline before stopping</span>
<a name="l00095"></a>00095 <span class="comment">#  @param[in] model-config Model configuration file</span>
<a name="l00096"></a>00096 <span class="comment">#  @param[in] train-config Feature extractor configuration file for training</span>
<a name="l00097"></a>00097 <span class="comment">#  @param[in] dev-config Feature extractor configuration file for dev</span>
<a name="l00098"></a>00098 
<a name="l00099"></a><a class="code" href="namespacehadoop-run.html#a70a90c07b0d4bb4dff7d8912d00fc5cf">00099</a> optParse = OptionParser()
<a name="l00100"></a>00100 optParse.add_option(<span class="stringliteral">&quot;-H&quot;</span>, <span class="stringliteral">&quot;--hadooproot&quot;</span>, dest=<span class="stringliteral">&quot;hadooproot&quot;</span>,
<a name="l00101"></a><a class="code" href="namespacehadoop-run.html#a3935d70733fa64c4ad3b0526a8ced2b9">00101</a>                     help = <span class="stringliteral">&quot;Location of hadoop installation.  If not set, &quot;</span> +
<a name="l00102"></a>00102                            <span class="stringliteral">&quot;the script will attempt to find it.&quot;</span>,
<a name="l00103"></a><a class="code" href="namespacehadoop-run.html#ac6a679f329c1e5434af2aabdfbfa610a">00103</a>                     default = <span class="stringliteral">&quot;&quot;</span>)
<a name="l00104"></a>00104 optParse.add_option(<span class="stringliteral">&quot;--refrbin&quot;</span>, dest=<span class="stringliteral">&quot;refrbin&quot;</span>,
<a name="l00105"></a>00105                     help = <span class="stringliteral">&quot;Location of the Reranker Framework (ReFr) bin directory&quot;</span>,
<a name="l00106"></a>00106                     default = defs.refrbin + <span class="stringliteral">&quot;/&quot;</span>)
<a name="l00107"></a>00107 optParse.add_option(<span class="stringliteral">&quot;-d&quot;</span>, <span class="stringliteral">&quot;--develdata&quot;</span>, dest=<span class="stringliteral">&quot;develdata&quot;</span>,
<a name="l00108"></a>00108                     help = <span class="stringliteral">&quot;Location of development data&quot;</span>)
<a name="l00109"></a>00109 optParse.add_option(<span class="stringliteral">&quot;-i&quot;</span>, <span class="stringliteral">&quot;--input&quot;</span>, dest=<span class="stringliteral">&quot;inputlist&quot;</span>,
<a name="l00110"></a>00110                     help = <span class="stringliteral">&quot;Location of input data on local FS&quot;</span>,
<a name="l00111"></a><a class="code" href="namespacehadoop-run.html#ab12a73005bb25b5a9695d359571a3b3e">00111</a>                     action = <span class="stringliteral">&quot;append&quot;</span>)
<a name="l00112"></a>00112 optParse.add_option(<span class="stringliteral">&quot;-I&quot;</span>, <span class="stringliteral">&quot;--hdfsinputdir&quot;</span>, dest=<span class="stringliteral">&quot;hdfsinputdir&quot;</span>,
<a name="l00113"></a>00113                     help = <span class="stringliteral">&quot;Location of input data on HDFS&quot;</span>)
<a name="l00114"></a>00114 optParse.add_option(<span class="stringliteral">&quot;-O&quot;</span>, <span class="stringliteral">&quot;--hdfsoutputdir&quot;</span>, dest=<span class="stringliteral">&quot;hdfsoutputdir&quot;</span>,
<a name="l00115"></a>00115                     help = <span class="stringliteral">&quot;Output directory (on HDFS) - will be removed before each iteration&quot;</span>)
<a name="l00116"></a>00116 optParse.add_option(<span class="stringliteral">&quot;-o&quot;</span>, <span class="stringliteral">&quot;--outputdir&quot;</span>, dest=<span class="stringliteral">&quot;outputdir&quot;</span>, help = <span class="stringliteral">&quot;Output directory &quot;</span>)
<a name="l00117"></a>00117 optParse.add_option(<span class="stringliteral">&quot;-M&quot;</span>, <span class="stringliteral">&quot;--inputmodel&quot;</span>, dest=<span class="stringliteral">&quot;inputmodel&quot;</span>,
<a name="l00118"></a>00118                     help = <span class="stringliteral">&quot;name of model to start with&quot;</span>)
<a name="l00119"></a>00119 optParse.add_option(<span class="stringliteral">&quot;-S&quot;</span>, <span class="stringliteral">&quot;--inputmodeliter&quot;</span>, dest=<span class="stringliteral">&quot;startiter&quot;</span>,
<a name="l00120"></a>00120                     help = <span class="stringliteral">&quot;Iteration number of input model (will start with next iteration)&quot;</span>,
<a name="l00121"></a>00121                     default = 0)
<a name="l00122"></a>00122 optParse.add_option(<span class="stringliteral">&quot;-m&quot;</span>, <span class="stringliteral">&quot;--modelname&quot;</span>, dest=<span class="stringliteral">&quot;modelname&quot;</span>,
<a name="l00123"></a>00123                     help = <span class="stringliteral">&quot;name of model file (new models written to --outputdir)&quot;</span>,
<a name="l00124"></a>00124                     default = <span class="stringliteral">&quot;model&quot;</span>)
<a name="l00125"></a>00125 optParse.add_option(<span class="stringliteral">&quot;--maxiter&quot;</span>, dest=<span class="stringliteral">&quot;maxiter&quot;</span>,
<a name="l00126"></a>00126                     help = <span class="stringliteral">&quot;maximum number of iterations to run&quot;</span>, default = 100)
<a name="l00127"></a>00127 optParse.add_option(<span class="stringliteral">&quot;--numreducer&quot;</span>, dest=<span class="stringliteral">&quot;numreducer&quot;</span>,
<a name="l00128"></a>00128                     help = <span class="stringliteral">&quot;Number of reducers.&quot;</span>, default = 1)
<a name="l00129"></a>00129 optParse.add_option(<span class="stringliteral">&quot;--streamingloc&quot;</span>, dest=<span class="stringliteral">&quot;streamingloc&quot;</span>,
<a name="l00130"></a>00130                     help = <span class="stringliteral">&quot;Location streaming jar file.  &quot;</span> +
<a name="l00131"></a>00131                            <span class="stringliteral">&quot;An empty string will force the script to attempt to find the streaming jar file.&quot;</span>,
<a name="l00132"></a>00132                     default = <span class="stringliteral">&quot;&quot;</span>)
<a name="l00133"></a>00133 <span class="comment">#                    default = &quot;contrib/streaming/hadoop-0.20.2-streaming.jar&quot;)</span>
<a name="l00134"></a>00134 optParse.add_option(<span class="stringliteral">&quot;--libpath&quot;</span>, dest=<span class="stringliteral">&quot;libpath&quot;</span>,
<a name="l00135"></a>00135                     help = <span class="stringliteral">&quot;Specify the LD_LIBRARY_PATH&quot;</span>,
<a name="l00136"></a>00136                     default = <span class="stringliteral">&quot;/usr/local/lib:&quot;</span>)
<a name="l00137"></a>00137 optParse.add_option(<span class="stringliteral">&quot;--splitsize&quot;</span>, dest=<span class="stringliteral">&quot;minsplitsize&quot;</span>,
<a name="l00138"></a>00138                     help = <span class="stringliteral">&quot;Min size of each data split&quot;</span>,
<a name="l00139"></a>00139                     default = 0)
<a name="l00140"></a>00140 optParse.add_option(<span class="stringliteral">&quot;--tasktimeout&quot;</span>, dest=<span class="stringliteral">&quot;tasktimeout&quot;</span>,
<a name="l00141"></a>00141                     help = <span class="stringliteral">&quot;Amount of time (seconds) for task to run (e.g., loading mode) &quot;</span> +
<a name="l00142"></a>00142                            <span class="stringliteral">&quot; before processing the next input record&quot;</span>,
<a name="l00143"></a>00143                     default = 0)
<a name="l00144"></a>00144 optParse.add_option(<span class="stringliteral">&quot;--force&quot;</span>, dest=<span class="stringliteral">&quot;force&quot;</span>,
<a name="l00145"></a>00145                     help = <span class="stringliteral">&quot;Force all data processing even if files exist&quot;</span>,
<a name="l00146"></a>00146                     action = <span class="stringliteral">&quot;store_true&quot;</span>,
<a name="l00147"></a>00147                     default = <span class="keyword">False</span>)
<a name="l00148"></a>00148 optParse.add_option(<span class="stringliteral">&quot;--forcecompile&quot;</span>, dest=<span class="stringliteral">&quot;forcecompile&quot;</span>,
<a name="l00149"></a>00149                     help = <span class="stringliteral">&quot;Force precomilation if applicable&quot;</span>,
<a name="l00150"></a>00150                     action = <span class="stringliteral">&quot;store_true&quot;</span>,
<a name="l00151"></a>00151                     default = <span class="keyword">False</span>)
<a name="l00152"></a>00152 optParse.add_option(<span class="stringliteral">&quot;--compilefeatures&quot;</span>, dest=<span class="stringliteral">&quot;compilefeatures&quot;</span>,
<a name="l00153"></a>00153                     help = <span class="stringliteral">&quot;Compile features before processing&quot;</span>,
<a name="l00154"></a>00154                     action = <span class="stringliteral">&quot;store_true&quot;</span>,
<a name="l00155"></a>00155                     default = <span class="keyword">False</span>)
<a name="l00156"></a>00156 optParse.add_option(<span class="stringliteral">&quot;--maxdecline&quot;</span>, dest=<span class="stringliteral">&quot;max_num_in_decline&quot;</span>,
<a name="l00157"></a>00157                     help = <span class="stringliteral">&quot;Number of iterations of an increasing loss before we stop training&quot;</span>,
<a name="l00158"></a>00158                     default = 5)
<a name="l00159"></a>00159 optParse.add_option(<span class="stringliteral">&quot;-v&quot;</span>, <span class="stringliteral">&quot;--verbosity&quot;</span>, dest=<span class="stringliteral">&quot;verbosity&quot;</span>,
<a name="l00160"></a>00160                     help = <span class="stringliteral">&quot;Set the verbosity of the debugging output&quot;</span>,
<a name="l00161"></a>00161                     default = 0)
<a name="l00162"></a>00162 optParse.add_option(<span class="stringliteral">&quot;--no-weighted-loss&quot;</span>, dest=<span class="stringliteral">&quot;weightedloss&quot;</span>,
<a name="l00163"></a>00163                     help = <span class="stringliteral">&quot;Do not use a weighted loss (e.g., when there is no reference)&quot;</span>,
<a name="l00164"></a>00164                     action = <span class="stringliteral">&quot;store_false&quot;</span>,
<a name="l00165"></a>00165                     default = <span class="keyword">True</span>)
<a name="l00166"></a>00166 optParse.add_option(<span class="stringliteral">&quot;--model-config&quot;</span>, dest=<span class="stringliteral">&quot;modelconfig&quot;</span>,
<a name="l00167"></a>00167                     help = <span class="stringliteral">&quot;Specifies the model configuration file&quot;</span>)
<a name="l00168"></a>00168 optParse.add_option(<span class="stringliteral">&quot;--train-config&quot;</span>, dest=<span class="stringliteral">&quot;trainconfig&quot;</span>,
<a name="l00169"></a>00169                     help = <span class="stringliteral">&quot;Specifies the feature extractor configuration &quot;</span> +
<a name="l00170"></a>00170                            <span class="stringliteral">&quot;file for training instances&quot;</span>)
<a name="l00171"></a>00171 optParse.add_option(<span class="stringliteral">&quot;--dev-config&quot;</span>, dest=<span class="stringliteral">&quot;devconfig&quot;</span>,
<a name="l00172"></a>00172                     help = <span class="stringliteral">&quot;Specifies the feature extractor configuraiton &quot;</span> +
<a name="l00173"></a>00173                            <span class="stringliteral">&quot;file for devtest instances&quot;</span>)
<a name="l00174"></a>00174 optParse.add_option(<span class="stringliteral">&quot;--mapperfiles&quot;</span>, dest=<span class="stringliteral">&quot;mapperfiles&quot;</span>,
<a name="l00175"></a>00175                     help = <span class="stringliteral">&quot;A list of files to be passed to the training mapper&quot;</span>,
<a name="l00176"></a>00176                     action = <span class="stringliteral">&quot;append&quot;</span>)
<a name="l00177"></a>00177 
<a name="l00178"></a>00178 (options, args) = optParse.parse_args()
<a name="l00179"></a>00179 
<a name="l00180"></a>00180 <span class="comment"># Check  input command line options.</span>
<a name="l00181"></a>00181 <span class="keywordflow">if</span> (<span class="keywordflow">not</span> options.inputlist):
<a name="l00182"></a>00182   optParse.error(<span class="stringliteral">&quot;--input option is required&quot;</span>)
<a name="l00183"></a>00183 <span class="keywordflow">if</span> (<span class="keywordflow">not</span> options.hdfsinputdir):
<a name="l00184"></a>00184   optParse.error(<span class="stringliteral">&quot;--hdfsinputdir option is required&quot;</span>)
<a name="l00185"></a>00185 <span class="keywordflow">if</span> (<span class="keywordflow">not</span> options.hdfsoutputdir):
<a name="l00186"></a>00186   optParse.error(<span class="stringliteral">&quot;--hdfsoutputdir option is required&quot;</span>)
<a name="l00187"></a>00187 <span class="keywordflow">if</span> (<span class="keywordflow">not</span> options.outputdir):
<a name="l00188"></a>00188   optParse.error(<span class="stringliteral">&quot;--outputdir option is required&quot;</span>)
<a name="l00189"></a>00189 
<a name="l00190"></a>00190 pyutil.DEBUG = options.verbosity
<a name="l00191"></a>00191 
<a name="l00192"></a>00192 <span class="comment"># Attempt to find the hadoop installation.</span>
<a name="l00193"></a><a class="code" href="namespacehadoop-run.html#af439498523072fc119fa5ed248f399e9">00193</a> hadooproot = options.hadooproot
<a name="l00194"></a>00194 <span class="keywordflow">if</span> <span class="keywordflow">not</span> hadooproot:
<a name="l00195"></a>00195   <span class="keywordflow">if</span> os.path.isdir(<span class="stringliteral">&quot;/usr/lib/hadoop&quot;</span>):
<a name="l00196"></a>00196     hadooproot = <span class="stringliteral">&quot;/usr/lib/hadoop&quot;</span>
<a name="l00197"></a>00197   <span class="keywordflow">elif</span> os.path.isdir(<span class="stringliteral">&quot;/usr/local/lib/hadoop&quot;</span>):
<a name="l00198"></a>00198     hadooproot = <span class="stringliteral">&quot;/usr/local/lib/hadoop&quot;</span>
<a name="l00199"></a>00199   <span class="keywordflow">elif</span> os.path.isdir(<span class="stringliteral">&quot;/opt/lib/hadoop&quot;</span>):
<a name="l00200"></a>00200     hadooproot = <span class="stringliteral">&quot;/opt/lib/hadoop&quot;</span>
<a name="l00201"></a>00201   <span class="keywordflow">else</span>:
<a name="l00202"></a>00202     pyutil.printError(10, <span class="stringliteral">&quot;Unable to find the hadoop installation.  &quot;</span> +
<a name="l00203"></a>00203                       <span class="stringliteral">&quot;Please specify with --hadooproot.&quot;</span>)
<a name="l00204"></a>00204 
<a name="l00205"></a><a class="code" href="namespacehadoop-run.html#af780d860089b1ed86b289118d7bb8e09">00205</a> streamingloc = options.streamingloc
<a name="l00206"></a>00206 <span class="keywordflow">if</span> <span class="keywordflow">not</span> streamingloc:
<a name="l00207"></a>00207   <span class="keywordflow">if</span> os.path.exists(hadooproot + <span class="stringliteral">&quot;/hadoop-streaming.jar&quot;</span>):
<a name="l00208"></a>00208     streamingloc = hadooproot + <span class="stringliteral">&quot;/hadoop-streaming.jar&quot;</span>
<a name="l00209"></a>00209   <span class="keywordflow">else</span>:
<a name="l00210"></a><a class="code" href="namespacehadoop-run.html#a3bec14631fcfd1766537753d2de16d78">00210</a>     tmppath = hadooproot + <span class="stringliteral">&quot;/contrib/streaming&quot;</span>
<a name="l00211"></a>00211     <span class="keywordflow">if</span> <span class="keywordflow">not</span> os.path.isdir(tmppath):
<a name="l00212"></a>00212       pyutil.printError(10, hadooproot + <span class="stringliteral">&quot;/contrib/streaming does not exist.  &quot;</span> +
<a name="l00213"></a>00213                         <span class="stringliteral">&quot;Please specify location of hadoop streaming jar file with &quot;</span> +
<a name="l00214"></a>00214                         <span class="stringliteral">&quot;--streamingloc&quot;</span>)
<a name="l00215"></a><a class="code" href="namespacehadoop-run.html#a9c952cd08d17abdf2b70ff7db0124ca2">00215</a>     streamingjar = glob.glob(tmppath + <span class="stringliteral">&quot;/hadoop-streaming*.jar&quot;</span>)
<a name="l00216"></a>00216     <span class="keywordflow">if</span> len(streamingjar) != 1:
<a name="l00217"></a>00217       pyutil.printError(10, <span class="stringliteral">&quot;Unable to find streaming jar, please specify with --streamingloc&quot;</span>)
<a name="l00218"></a>00218     streamingloc = streamingjar[0]
<a name="l00219"></a>00219 
<a name="l00220"></a>00220 <span class="comment"># Sanity check of Directories.</span>
<a name="l00221"></a>00221 <span class="keywordflow">if</span> (<span class="keywordflow">not</span> os.path.isdir(hadooproot) <span class="keywordflow">or</span>
<a name="l00222"></a>00222     <span class="keywordflow">not</span> os.path.exists(hadooproot + <span class="stringliteral">&quot;/bin/hadoop&quot;</span>)):
<a name="l00223"></a>00223   optParse.error(<span class="stringliteral">&quot;--hadooproot must be the base directory of the &quot;</span> +
<a name="l00224"></a>00224                  <span class="stringliteral">&quot;hadoop installation&quot;</span>)
<a name="l00225"></a>00225 
<a name="l00226"></a>00226 <span class="keywordflow">if</span> (<span class="keywordflow">not</span> os.path.exists(streamingloc)):
<a name="l00227"></a>00227   optParse.error(<span class="stringliteral">&quot;--streamingloc does not specify a valid jar files for the &quot;</span> + 
<a name="l00228"></a>00228                  <span class="stringliteral">&quot;streaming interface (checked: &quot;</span> + streamingloc)
<a name="l00229"></a>00229 
<a name="l00230"></a>00230 <span class="keywordflow">if</span> (<span class="keywordflow">not</span> os.path.isdir(options.refrbin) <span class="keywordflow">or</span>
<a name="l00231"></a>00231     <span class="keywordflow">not</span> os.path.exists(options.refrbin + <span class="stringliteral">&quot;/run-model&quot;</span>)):
<a name="l00232"></a>00232   optParse.error(<span class="stringliteral">&quot;--refrbin directory must be the Reranker Framework bin &quot;</span> +
<a name="l00233"></a>00233                  <span class="stringliteral">&quot;direcotry.  Checked: &quot;</span> + options.refrbin)
<a name="l00234"></a>00234 
<a name="l00235"></a>00235 
<a name="l00236"></a>00236 <span class="comment">## Collect input filenames.</span>
<a name="l00237"></a><a class="code" href="namespacehadoop-run.html#a0cbddcac92897f1138fbf5874a467125">00237</a> filenames = []
<a name="l00238"></a>00238 <span class="keywordflow">for</span> inputstring <span class="keywordflow">in</span> options.inputlist:
<a name="l00239"></a>00239   <span class="keywordflow">for</span> tmpfile <span class="keywordflow">in</span> inputstring.split():
<a name="l00240"></a>00240     filenames += glob.glob(tmpfile)
<a name="l00241"></a>00241 
<a name="l00242"></a>00242 <span class="keywordflow">for</span> input <span class="keywordflow">in</span> filenames:
<a name="l00243"></a>00243   pyutil.printInfo(<span class="stringliteral">&quot;Input file: &quot;</span> + input)
<a name="l00244"></a>00244   <span class="keywordflow">if</span> (<span class="keywordflow">not</span> os.path.exists(input)):
<a name="l00245"></a>00245     pyutil.printError(130, <span class="stringliteral">&quot;Input file not found: &quot;</span> + input)
<a name="l00246"></a>00246 
<a name="l00247"></a>00247 <span class="keywordflow">if</span> (options.develdata <span class="keywordflow">and</span> <span class="keywordflow">not</span> os.path.exists(options.develdata)):
<a name="l00248"></a>00248   pyutil.printError(131, <span class="stringliteral">&quot;Specified devel data file not found: &quot;</span> + options.develdata)
<a name="l00249"></a>00249 
<a name="l00250"></a>00250 <span class="comment">## Create output directory if it does not exist.</span>
<a name="l00251"></a>00251 <span class="keywordflow">if</span> (<span class="keywordflow">not</span> os.path.isdir(options.outputdir)):
<a name="l00252"></a>00252   os.makedirs(options.outputdir)
<a name="l00253"></a>00253 
<a name="l00254"></a>00254 <span class="comment">## @var hdproc</span>
<a name="l00255"></a>00255 <span class="comment">#  HadoopInterface object used to process all Hadoop MR utils.</span>
<a name="l00256"></a><a class="code" href="namespacehadoop-run.html#a0147327a948ba744feacf4311d120624">00256</a> hdproc = hadooputil.HadoopInterface(hadooproot,
<a name="l00257"></a>00257                                     streamingloc,
<a name="l00258"></a>00258                                     options.minsplitsize,
<a name="l00259"></a>00259                                     options.tasktimeout, 
<a name="l00260"></a>00260                                     options.libpath)
<a name="l00261"></a>00261 
<a name="l00262"></a>00262 <span class="comment">## Configuration for training options</span>
<a name="l00263"></a>00263 <span class="comment"># @var train_map_options</span>
<a name="l00264"></a>00264 <span class="comment"># Options passed to the mapper binary.</span>
<a name="l00265"></a><a class="code" href="namespacehadoop-run.html#a9835c3ab1482d09301f4b658c28fe2cc">00265</a> train_map_options = <span class="stringliteral">&quot;&quot;</span>
<a name="l00266"></a>00266 <span class="comment"># @var train_files</span>
<a name="l00267"></a>00267 <span class="comment"># string containing &#39;-file filename&#39; for all external files.</span>
<a name="l00268"></a><a class="code" href="namespacehadoop-run.html#a7dad76e7434cf8a25ac583bd9606aa12">00268</a> train_files = <span class="stringliteral">&quot;&quot;</span>
<a name="l00269"></a>00269 <span class="keywordflow">if</span> (options.modelconfig):
<a name="l00270"></a>00270   train_map_options += <span class="stringliteral">&quot; --model-config ./&quot;</span> + os.path.basename(options.modelconfig)
<a name="l00271"></a>00271   train_files += <span class="stringliteral">&quot; -file &quot;</span> + options.modelconfig
<a name="l00272"></a>00272 <span class="keywordflow">if</span> (options.trainconfig):
<a name="l00273"></a>00273   train_map_options += <span class="stringliteral">&quot; --train-config ./&quot;</span> + os.path.basename(options.trainconfig)
<a name="l00274"></a>00274   train_files += <span class="stringliteral">&quot; -file &quot;</span> + options.trainconfig
<a name="l00275"></a><a class="code" href="namespacehadoop-run.html#a676e472b0b6770ad4b06a6582b24a5a0">00275</a> train_map = (<span class="stringliteral">&quot;&#39;&quot;</span> + options.refrbin + <span class="stringliteral">&quot;/run-model&quot;</span> + train_map_options +
<a name="l00276"></a>00276             <span class="stringliteral">&quot; --train - --mapper -m -&quot;</span>)
<a name="l00277"></a>00277 
<a name="l00278"></a>00278 
<a name="l00279"></a>00279 <span class="keywordflow">if</span> options.mapperfiles:
<a name="l00280"></a>00280   <span class="keywordflow">for</span> mapperfile <span class="keywordflow">in</span> options.mapperfiles:
<a name="l00281"></a>00281     train_files += <span class="stringliteral">&quot; -file &quot;</span> + mapperfile
<a name="l00282"></a>00282 
<a name="l00283"></a>00283 <span class="comment">## Shortcuts to command-line programs.</span>
<a name="l00284"></a><a class="code" href="namespacehadoop-run.html#a66112611af16f304a90d19eeb7807faf">00284</a> extractsym_map = <span class="stringliteral">&quot;&#39;&quot;</span> + options.refrbin + <span class="stringliteral">&quot;/compile-features -i -&#39;&quot;</span>
<a name="l00285"></a><a class="code" href="namespacehadoop-run.html#a9f3638a00a52bea919f962b4e3dcbf22">00285</a> compiledata_map = <span class="stringliteral">&quot;&#39;&quot;</span> + options.refrbin + <span class="stringliteral">&quot;/compile-features -i - --clear-raw --input-symbols &quot;</span>
<a name="l00286"></a><a class="code" href="namespacehadoop-run.html#a8dd932a372d394533574108c5a6891ff">00286</a> train_reduce = options.refrbin + <span class="stringliteral">&quot;/model-merge-reducer&quot;</span>
<a name="l00287"></a><a class="code" href="namespacehadoop-run.html#a466b974ea2319c752ee42a6c7fc26e7d">00287</a> train_recomb = options.refrbin + <span class="stringliteral">&quot;/model-combine-shards&quot;</span>
<a name="l00288"></a><a class="code" href="namespacehadoop-run.html#a7cd180be7125e1b036878f420c5f8a6b">00288</a> symbol_recomb = options.refrbin + <span class="stringliteral">&quot;/model-combine-symbols&quot;</span>
<a name="l00289"></a><a class="code" href="namespacehadoop-run.html#a50dc78a9f0ff850a3cc00a02f5584c43">00289</a> pipeeval_options = <span class="stringliteral">&quot;&quot;</span>
<a name="l00290"></a>00290 <span class="keywordflow">if</span> (options.devconfig):
<a name="l00291"></a>00291   pipeeval_options = <span class="stringliteral">&quot; --dev-config &quot;</span> + options.devconfig
<a name="l00292"></a><a class="code" href="namespacehadoop-run.html#a56e2a3b19a01270934cd4f828db159d0">00292</a> pipeeval = options.refrbin + <span class="stringliteral">&quot;/piped-model-evaluator&quot;</span> + pipeeval_options
<a name="l00293"></a>00293 
<a name="l00294"></a><a class="code" href="namespacehadoop-run.html#aac9abae550160bbf25c42395852a2730">00294</a> hadoop_inputfiles = <span class="stringliteral">&quot;&quot;</span>
<a name="l00295"></a>00295 <span class="keywordflow">for</span> inputfile <span class="keywordflow">in</span> filenames:
<a name="l00296"></a>00296   hadoop_inputfiles += hdproc.CheckInputFile(inputfile, options.hdfsinputdir,
<a name="l00297"></a>00297                                              options.outputdir, options.force,
<a name="l00298"></a>00298                                              <span class="keyword">True</span>)
<a name="l00299"></a>00299                                              <span class="comment">#not options.compilefeatures)</span>
<a name="l00300"></a>00300 
<a name="l00301"></a><a class="code" href="namespacehadoop-run.html#a9342c00affa15bcdc88429e2d4d69e83">00301</a> precompdevfile = options.develdata
<a name="l00302"></a>00302 
<a name="l00303"></a>00303 <span class="comment">## Precopilation of string features.</span>
<a name="l00304"></a>00304 <span class="comment">#  Optional - reduces the size of the models, but takes time to create initial precompiled data.</span>
<a name="l00305"></a>00305 <span class="comment">#</span>
<a name="l00306"></a>00306 <span class="keywordflow">if</span> (options.compilefeatures):
<a name="l00307"></a>00307   pyutil.printInfo(<span class="stringliteral">&quot;Precompiling feature indices&quot;</span>)
<a name="l00308"></a>00308   <span class="keywordflow">if</span> (options.develdata):
<a name="l00309"></a>00309     precompdevfile = options.outputdir + <span class="stringliteral">&quot;/&quot;</span>
<a name="l00310"></a>00310     precompdevfile += os.path.basename(options.develdata).replace(<span class="stringliteral">&quot;.gz&quot;</span>,<span class="stringliteral">&quot;&quot;</span>)
<a name="l00311"></a>00311     precompdevfile += <span class="stringliteral">&quot;.compiled.gz&quot;</span>
<a name="l00312"></a><a class="code" href="namespacehadoop-run.html#ae78a065b3eb608a5856211edb4ee7a1a">00312</a>   symbol_dir = options.hdfsinputdir + <span class="stringliteral">&quot;/Symbols/&quot;</span>
<a name="l00313"></a><a class="code" href="namespacehadoop-run.html#a2f9e77fd2ea474bf35a773b8bf469d87">00313</a>   precomp_dir = options.hdfsinputdir + <span class="stringliteral">&quot;/Precompiled/&quot;</span>
<a name="l00314"></a><a class="code" href="namespacehadoop-run.html#a82ea895544777ec4d6e3bc944fa0c137">00314</a>   precompdev_dir = options.hdfsinputdir + <span class="stringliteral">&quot;/PrecompiledDev/&quot;</span>
<a name="l00315"></a>00315 
<a name="l00316"></a>00316   <span class="comment"># Extract all features.</span>
<a name="l00317"></a>00317   <span class="keywordflow">if</span> (hdproc.CheckRemoveHDir(precomp_dir, (options.force <span class="keywordflow">or</span> options.forcecompile)) <span class="keywordflow">or</span>
<a name="l00318"></a>00318       options.forcecompile):
<a name="l00319"></a><a class="code" href="namespacehadoop-run.html#a36c4796df68329c6bd8fcdd6b45af5ff">00319</a>     addl_data = <span class="stringliteral">&quot;&quot;</span>
<a name="l00320"></a>00320     <span class="keywordflow">if</span> (options.develdata):
<a name="l00321"></a>00321       addl_data = hdproc.CheckInputFile(options.develdata, options.hdfsinputdir,
<a name="l00322"></a>00322                                         options.outputdir, options.force,
<a name="l00323"></a>00323                                         <span class="keyword">True</span>)
<a name="l00324"></a>00324       pyutil.printInfo(<span class="stringliteral">&quot;Dev data file: &quot;</span> + addl_data)
<a name="l00325"></a>00325     <span class="comment"># Copy data to HDFS</span>
<a name="l00326"></a><a class="code" href="namespacehadoop-run.html#a5564609b5b72f32ec12a12fc1540dae8">00326</a>     symfile_name = options.outputdir + <span class="stringliteral">&quot;/&quot;</span> + options.modelname + <span class="stringliteral">&quot;.symbols.gz&quot;</span>
<a name="l00327"></a>00327     <span class="keywordflow">if</span> (<span class="keywordflow">not</span> os.path.exists(symfile_name)):
<a name="l00328"></a>00328       hdproc.CheckRemoveHDir(symbol_dir, <span class="keyword">True</span>)
<a name="l00329"></a>00329       hdproc.RunMR(hadoop_inputfiles + addl_data, symbol_dir, 100,
<a name="l00330"></a>00330                    <span class="stringliteral">&quot;&#39;&quot;</span> + train_reduce +  <span class="stringliteral">&quot; -S&#39;&quot;</span>, extractsym_map, <span class="stringliteral">&quot;&quot;</span>)
<a name="l00331"></a>00331       <span class="comment"># Concatenate symbols to local symbol table.</span>
<a name="l00332"></a>00332       hdproc.CatPipe(symbol_dir + <span class="stringliteral">&quot;/part-*&quot;</span>, symbol_recomb + <span class="stringliteral">&quot; -o &quot;</span> + symfile_name)
<a name="l00333"></a>00333 
<a name="l00334"></a>00334     <span class="comment"># Convert the original input data.</span>
<a name="l00335"></a>00335     hdproc.RunMR(hadoop_inputfiles, precomp_dir, 0, <span class="stringliteral">&quot;&quot;</span>,
<a name="l00336"></a>00336                  compiledata_map + <span class="stringliteral">&quot;./&quot;</span> + os.path.basename(symfile_name) +
<a name="l00337"></a>00337                  <span class="stringliteral">&quot;&#39; -file &quot;</span> + symfile_name, <span class="stringliteral">&quot;&quot;</span>)
<a name="l00338"></a>00338     <span class="keywordflow">if</span> (options.develdata):
<a name="l00339"></a>00339       hdproc.CheckRemoveHDir(precompdev_dir, <span class="keyword">True</span>)
<a name="l00340"></a>00340       hdproc.RunMR(addl_data, precompdev_dir, 0, <span class="stringliteral">&quot;&quot;</span>,
<a name="l00341"></a>00341                    compiledata_map + <span class="stringliteral">&quot;./&quot;</span> + os.path.basename(symfile_name) +
<a name="l00342"></a>00342                    <span class="stringliteral">&quot;&#39; -file &quot;</span> + symfile_name, <span class="stringliteral">&quot;&quot;</span>)
<a name="l00343"></a>00343       hdproc.CatPipe(precompdev_dir + <span class="stringliteral">&quot;/part-*&quot;</span>, <span class="stringliteral">&quot; gzip -c &gt; &quot;</span> + precompdevfile)
<a name="l00344"></a>00344       hdproc.CheckRemoveHDir(precompdev_dir, <span class="keyword">True</span>)
<a name="l00345"></a>00345   hadoop_inputfiles = <span class="stringliteral">&quot; --input &quot;</span> + precomp_dir
<a name="l00346"></a>00346 
<a name="l00347"></a>00347 <span class="comment">#------------</span>
<a name="l00348"></a>00348 <span class="comment"># Run Hadoop Iterative MapReduce</span>
<a name="l00349"></a>00349 <span class="comment"># (Iterative Parameter Mixtures)</span>
<a name="l00350"></a>00350 <span class="comment">#------------</span>
<a name="l00351"></a><a class="code" href="namespacehadoop-run.html#a710f4968870f4e191ea575db56a192e7">00351</a> cur_model = options.inputmodel
<a name="l00352"></a><a class="code" href="namespacehadoop-run.html#a7ef5e1b38b72801a5fc43f89e933cc90">00352</a> converged = <span class="keyword">False</span>
<a name="l00353"></a>00353 
<a name="l00354"></a><a class="code" href="namespacehadoop-run.html#acfb0d4d3206b9770b9d7d3fc4040a401">00354</a> iteration = int(options.startiter)
<a name="l00355"></a><a class="code" href="namespacehadoop-run.html#a5e1715f08c5580daaa35b07acac7fd8f">00355</a> prev_loss = -9999
<a name="l00356"></a><a class="code" href="namespacehadoop-run.html#a0b1f35d422c2531b6aac501a4a0fbe34">00356</a> loss_history = []
<a name="l00357"></a><a class="code" href="namespacehadoop-run.html#af6f26eafc2026ce792a3037cc01146c8">00357</a> num_in_decline = 0
<a name="l00358"></a><a class="code" href="namespacehadoop-run.html#ae00c11b0ec784431694519d4ef5035d7">00358</a> best_loss_index = 0
<a name="l00359"></a>00359 <span class="keywordflow">if</span> (options.develdata):
<a name="l00360"></a><a class="code" href="namespacehadoop-run.html#a1bb9db73c46fb39ab3c03026a461d1c8">00360</a>   eval_cmd = pipeeval + <span class="stringliteral">&quot; -d &quot;</span> + precompdevfile
<a name="l00361"></a>00361   <span class="keywordflow">if</span> (<span class="keywordflow">not</span> options.weightedloss):
<a name="l00362"></a>00362     eval_cmd += <span class="stringliteral">&quot; --use-weighted-loss false&quot;</span>
<a name="l00363"></a><a class="code" href="namespacehadoop-run.html#a96482b5b1d7e80ee6061d559e622a402">00363</a>   evalio = pyutil.CommandIO(eval_cmd)
<a name="l00364"></a>00364 
<a name="l00365"></a>00365 <span class="keywordflow">while</span> (<span class="keywordflow">not</span> converged <span class="keywordflow">and</span> iteration &lt; int(options.maxiter)):
<a name="l00366"></a>00366   iteration += 1
<a name="l00367"></a>00367   pyutil.printInfo(<span class="stringliteral">&quot;Training iteration: &quot;</span> + str(iteration))
<a name="l00368"></a>00368   <span class="comment"># Make sure the output directory is </span>
<a name="l00369"></a>00369   <span class="comment"># Run the MapReducer Job</span>
<a name="l00370"></a>00370   hdproc.CheckRemoveHDir(options.hdfsoutputdir, <span class="keyword">True</span>)
<a name="l00371"></a>00371 
<a name="l00372"></a>00372   <span class="comment"># Create the MR string and run the MR</span>
<a name="l00373"></a><a class="code" href="namespacehadoop-run.html#a29c7856a0d1d420fa831801bd72aaac2">00373</a>   iter_str = <span class="stringliteral">&quot;&#39;&quot;</span>
<a name="l00374"></a>00374   <span class="keywordflow">if</span> (cur_model):
<a name="l00375"></a>00375     iter_str = <span class="stringliteral">&quot; -i ./&quot;</span> + os.path.basename(cur_model) + <span class="stringliteral">&quot;&#39; -file &quot;</span> + cur_model
<a name="l00376"></a>00376 
<a name="l00377"></a>00377   hdproc.RunMR(hadoop_inputfiles, options.hdfsoutputdir, options.numreducer,
<a name="l00378"></a>00378                train_reduce, train_map + iter_str + train_files, <span class="stringliteral">&quot;&quot;</span>)
<a name="l00379"></a>00379 
<a name="l00380"></a>00380   <span class="comment"># Copy data form the mapreduce to the local file-system</span>
<a name="l00381"></a><a class="code" href="namespacehadoop-run.html#a88eb645c6d5e451590df140f0a1ce8fd">00381</a>   model_output = options.outputdir + <span class="stringliteral">&quot;/&quot;</span> + options.modelname + <span class="stringliteral">&quot;_iter&quot;</span> + str(iteration) + <span class="stringliteral">&quot;.gz&quot;</span>
<a name="l00382"></a><a class="code" href="namespacehadoop-run.html#a696c38c00767bd4846d4a8f312f521ea">00382</a>   proc_cmd = train_recomb + <span class="stringliteral">&quot; -o &quot;</span> + model_output
<a name="l00383"></a>00383   hdproc.CatPipe(options.hdfsoutputdir + <span class="stringliteral">&quot;/part-*&quot;</span>, proc_cmd)
<a name="l00384"></a>00384 
<a name="l00385"></a><a class="code" href="namespacehadoop-run.html#ab913a16649b25375ec6afac29d20b21b">00385</a>   devtest_score = 0
<a name="l00386"></a>00386   <span class="keywordflow">if</span> (options.develdata):
<a name="l00387"></a>00387     devtest_score = evalio.sendreceive(model_output)
<a name="l00388"></a>00388 
<a name="l00389"></a><a class="code" href="namespacehadoop-run.html#acccc808b1c10830f49ff1b785c460ddd">00389</a>   loss = 0.0;
<a name="l00390"></a>00390   <span class="keywordflow">if</span> (devtest_score):
<a name="l00391"></a>00391     <span class="comment"># Get the score returned on STDOUT</span>
<a name="l00392"></a>00392     loss = float(devtest_score)
<a name="l00393"></a>00393   <span class="keywordflow">if</span> (<span class="keywordflow">not</span> loss_history):
<a name="l00394"></a>00394     loss_history.append(loss)
<a name="l00395"></a>00395     pyutil.printInfo(<span class="stringliteral">&quot;Loss for iteration &quot;</span> + str(iteration) + <span class="stringliteral">&quot;: &quot;</span> + str(loss))
<a name="l00396"></a>00396   <span class="keywordflow">else</span>:
<a name="l00397"></a><a class="code" href="namespacehadoop-run.html#a44e02ad404f4fa147620724c270dd9a6">00397</a>     diff = loss_history[-1] - loss
<a name="l00398"></a>00398     <span class="keywordflow">if</span> (loss &lt; loss_history[best_loss_index]):
<a name="l00399"></a>00399       <span class="comment"># Loss is appended below:</span>
<a name="l00400"></a>00400       best_loss_index = len(loss_history)
<a name="l00401"></a>00401     <span class="keywordflow">if</span> (loss &gt; loss_history[-1]):
<a name="l00402"></a>00402       num_in_decline += 1
<a name="l00403"></a>00403     <span class="keywordflow">else</span>:
<a name="l00404"></a>00404       num_in_decline = 0
<a name="l00405"></a>00405     <span class="comment"># Append loss to end of history.</span>
<a name="l00406"></a>00406     loss_history.append(loss)
<a name="l00407"></a>00407     pyutil.printInfo(<span class="stringliteral">&quot;Loss for iteration &quot;</span> + str(iteration) + <span class="stringliteral">&quot;: &quot;</span> + str(loss) +
<a name="l00408"></a>00408                      <span class="stringliteral">&quot; loss-delta: &quot;</span> + str(diff))
<a name="l00409"></a>00409     <span class="keywordflow">if</span> (num_in_decline &lt; options.max_num_in_decline):
<a name="l00410"></a>00410       pyutil.printInfo(<span class="stringliteral">&quot;Continuing to train as number epochs in decline is: &quot;</span> +
<a name="l00411"></a>00411                        str(num_in_decline) + <span class="stringliteral">&quot;, which is less than &quot;</span> +
<a name="l00412"></a>00412                        str(options.max_num_in_decline))
<a name="l00413"></a>00413   <span class="comment"># if not done...</span>
<a name="l00414"></a>00414   cur_model = model_output
<a name="l00415"></a>00415 pyutil.printInfo(<span class="stringliteral">&quot;Best model is from iteration: &quot;</span> + str(best_loss_index + 1) +
<a name="l00416"></a>00416                  <span class="stringliteral">&quot; with a devset loss of: &quot;</span> + str(loss_history[best_loss_index]))
</pre></div></div><!-- contents -->
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
     onmouseover="return searchBox.OnSearchSelectShow()"
     onmouseout="return searchBox.OnSearchSelectHide()"
     onkeydown="return searchBox.OnSearchSelectKey(event)">
<a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(0)"><span class="SelectionMark">&#160;</span>All</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(1)"><span class="SelectionMark">&#160;</span>Classes</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(2)"><span class="SelectionMark">&#160;</span>Namespaces</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(3)"><span class="SelectionMark">&#160;</span>Files</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(4)"><span class="SelectionMark">&#160;</span>Functions</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(5)"><span class="SelectionMark">&#160;</span>Variables</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(6)"><span class="SelectionMark">&#160;</span>Typedefs</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(7)"><span class="SelectionMark">&#160;</span>Enumerations</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(8)"><span class="SelectionMark">&#160;</span>Enumerator</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(9)"><span class="SelectionMark">&#160;</span>Friends</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(10)"><span class="SelectionMark">&#160;</span>Defines</a></div>

<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0" 
        name="MSearchResults" id="MSearchResults">
</iframe>
</div>



<hr class="footer"/><address class="footer"><small>
Generated on Tue Apr 9 2013 11:56:29 for Reranker Framework (ReFr) by &#160;<a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/>
</a> 1.7.6.1
</small></address>

</body>
</html>
