package org.gbif.metrics.cube.occurrence.backfill;

import org.gbif.metrics.cube.mapred.OccurrenceWritable;

import java.io.IOException;

import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.io.IntWritable;

/**
 * Reads the table, emitting the occurrence keyed on the id.
 */
public class TableReaderMapper extends TableMapper<OccurrenceWritable, IntWritable> {

  private final IntWritable ONE = new IntWritable(1);

  @Override
  protected void map(ImmutableBytesWritable key, Result row, Context context) throws IOException, InterruptedException {
    OccurrenceWritable o = OccurrenceWritable.newInstance(row);
    context.setStatus("Handling occurrence from dataset[" + o.getDatasetKey() + "]");
    context.write(o, ONE);
  }
}
