/**
 *  Copyright 2009 Jean-Daniel Cryans
 *   
 *  Licensed under the Apache License, Version 2.0 (the "License"); 
 *  you may not use this file except in compliance with the License. 
 *  You may obtain a copy of the License at
 *   
 *    http://www.apache.org/licenses/LICENSE-2.0
 *     
 *  Unless required by applicable law or agreed to in writing, software 
 *  distributed under the License is distributed on an "AS IS" BASIS, 
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 
 *  See the License for the specific language governing permissions and 
 *  limitations under the License. 
 */
package ca.etsmtl.lasi.hbasewikipedialoader;

import java.io.IOException;
import java.io.StringReader;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.io.BatchUpdate;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapred.TableOutputFormat;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.streaming.StreamInputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.w3c.dom.CharacterData;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;

import com.sun.org.apache.xerces.internal.parsers.DOMParser;

/**
 * Import a Wikipedia dump-formated XML file into HBase.
 * 
 * @author jdcryans
 * 
 */
public class HBaseWikipediaLoader {

  public static final String NAME = "hbase-wikipedia-loader";
  public static final String TABLE = "wikipedia";

  public static enum Counters {
    MAPPED_WIKI_ARTICLES, FAILED_WIKI_ARTICLES
  };

  public static class Map extends MapReduceBase implements
      Mapper<Text, Text, ImmutableBytesWritable, BatchUpdate> {

    DOMParser parser = new DOMParser();

    public void map(Text key, Text val,
        OutputCollector<ImmutableBytesWritable, BatchUpdate> collector,
        Reporter rep) throws IOException {
      String xml = key.toString();
      try {
        parser.parse(new InputSource(new StringReader(xml)));
        Document doc = parser.getDocument();
        NodeList nodes = doc.getChildNodes();
        for (int i = 0; i < nodes.getLength(); i++) {
          Element element = (Element) nodes.item(i);

          NodeList titleNodes = element.getElementsByTagName("title");
          String title = getCharacterDataFromElement((Element) titleNodes
              .item(0));

          NodeList idNodes = element.getElementsByTagName("id");
          String id = getCharacterDataFromElement((Element) idNodes.item(0));

          NodeList textNodes = ((Element) element.getElementsByTagName(
              "revision").item(0)).getElementsByTagName("text");
          String text = getCharacterDataFromElement((Element) textNodes.item(0));

          BatchUpdate bu = new BatchUpdate(title);
          bu.put("info:id", Bytes.toBytes(id));
          bu.put("info:text", Bytes.toBytes(text));

          rep.incrCounter(Counters.MAPPED_WIKI_ARTICLES, 1);

          collector.collect(new ImmutableBytesWritable(Bytes.toBytes(title)),
              bu);
        }

      } catch (Exception e) {
        e.printStackTrace();
        rep.incrCounter(Counters.FAILED_WIKI_ARTICLES, 1);
      }
    }

    public static String getCharacterDataFromElement(Element e) {
      Node child = e.getFirstChild();
      if (child instanceof CharacterData) {
        CharacterData cd = (CharacterData) child;
        return cd.getData();
      }
      return "?";
    }

  }

  /**
   * Sets up the actual job.
   * 
   * @param conf
   *          The current configuration.
   * @param args
   *          The command line parameters.
   * @return The newly created job.
   * @throws IOException
   *           When setting up the job fails.
   */
  public static JobConf createSubmittableJob(HBaseConfiguration conf,
      String[] args) throws IOException {
    JobConf jobConf = new JobConf(conf, HBaseWikipediaLoader.class);
    jobConf.setJobName(NAME);

    // Stream stuff
    jobConf.set("stream.recordreader.class",
        "org.apache.hadoop.streaming.StreamXmlRecordReader");
    jobConf.set("stream.recordreader.begin", "<page>");
    jobConf.set("stream.recordreader.end", "</page>");

    jobConf.setSpeculativeExecution(false);

    jobConf.setMapOutputKeyClass(ImmutableBytesWritable.class);
    jobConf.setMapOutputValueClass(BatchUpdate.class);

    jobConf.setMapperClass(Map.class);
    
    jobConf.setNumReduceTasks(0);

    jobConf.setInputFormat(StreamInputFormat.class);
    jobConf.setOutputFormat(TableOutputFormat.class);
    jobConf.set(TableOutputFormat.OUTPUT_TABLE, TABLE);
    jobConf.setOutputKeyClass(ImmutableBytesWritable.class);
    jobConf.setOutputValueClass(BatchUpdate.class);

    StreamInputFormat.setInputPaths(jobConf, new Path(args[0]));
    FileOutputFormat.setOutputPath(jobConf, new Path("/tmp/" + NAME + "-"
        + System.currentTimeMillis()));

    return jobConf;

  }

  /**
   * Main entry point.
   * 
   * @param args
   *          The command line parameters.
   * @throws Exception
   *           When running the job fails.
   */
  public static void main(String[] args) throws Exception {
    HBaseConfiguration conf = new HBaseConfiguration();
    String[] otherArgs = new GenericOptionsParser(conf, args)
        .getRemainingArgs();
    if (otherArgs.length < 1) {
      System.err.println("ERROR: Wrong number of parameters: " + args.length);
      System.err.println("Usage: " + NAME + " <inputdir>");
      System.exit(-1);
    }
    JobConf jobConf = createSubmittableJob(conf, otherArgs);
    RunningJob job = JobClient.runJob(jobConf);
    job.waitForCompletion();
    System.exit(job.isSuccessful() ? 0 : 1);
  }

}
