package hbase.chap13;

import hbase.chap13.common.NcdcRecordParser;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.IOException;

public class HBaseTemperatureImporter extends Configured implements Tool {
  
  // Inner-class for map
  static class HBaseTemperatureMapper<K, V> extends Mapper<LongWritable, Text, K, V> {
    private NcdcRecordParser parser = new NcdcRecordParser();
    private Table table;

    @Override
    protected void map(LongWritable key, Text value, Context context)
            throws IOException, InterruptedException {
      parser.parse(value.toString());
      if (parser.isValidTemperature()) {
        byte[] rowKey = RowKeyConverter.makeObservationRowKey(parser.getStationId(),
          parser.getObservationDate().getTime());
        Put p = new Put(rowKey);
        p.addColumn(HBaseTemperatureCli.DATA_COLUMNFAMILY,
          HBaseTemperatureCli.AIRTEMP_QUALIFIER,
          Bytes.toBytes(parser.getAirTemperature()));
        table.put(p);
      }
    }

    //    public void map(LongWritable key, Text value,
//      OutputCollector<K, V> output, Reporter reporter)
//    throws IOException {
//      parser.parse(value.toString());
//      if (parser.isValidTemperature()) {
//        byte[] rowKey = RowKeyConverter.makeObservationRowKey(parser.getStationId(),
//          parser.getObservationDate().getTime());
//        Put p = new Put(rowKey);
//        p.addColumn(HBaseTemperatureCli.DATA_COLUMNFAMILY,
//          HBaseTemperatureCli.AIRTEMP_QUALIFIER,
//          Bytes.toBytes(parser.getAirTemperature()));
//        table.put(p);
//      }
//    }

    @Override
    protected void setup(Context context) throws IOException, InterruptedException {
      // Create the HBase table client once up-front and keep it around
      // rather than create on each map invocation.
      try {
        Connection conn = ConnectionFactory.createConnection(context.getConfiguration());
        this.table = conn.getTable(TableName.valueOf("observations"));
      } catch (IOException e) {
        throw new RuntimeException("Failed HTable construction", e);
      }
    }


//    public void configure(JobConf jc) {
//      super.configure(jc);
//      // Create the HBase table client once up-front and keep it around
//      // rather than create on each map invocation.
//      try {
//        this.table = new HTable(new HBaseConfiguration(jc), "observations");
//      } catch (IOException e) {
//        throw new RuntimeException("Failed HTable construction", e);
//      }
//    }


    @Override
    protected void cleanup(Context context) throws IOException, InterruptedException {
      table.close();
    }
  }

  public int run(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
    if (args.length != 1) {
      System.err.println("Usage: HBaseTemperatureImporter <input>");
      return -1;
    }

    Job job = Job.getInstance(getConf(), "temperature importer");
    job.setJarByClass(HBaseTemperatureImporter.class);
    job.setJar("./target/hadoop-test-1.0-SNAPSHOT.jar");

    FileInputFormat.addInputPath(job, new Path(args[0]));
    job.setMapperClass(HBaseTemperatureMapper.class);
    job.setNumReduceTasks(0);
    job.setOutputFormatClass(NullOutputFormat.class);
    return job.waitForCompletion(true) ? 0 : 1;
  }

  public static void main(String[] args) throws Exception {
    int exitCode = ToolRunner.run(new HBaseTemperatureImporter(), args);
    System.exit(exitCode);
  }
}