package com.fengye.hbase.example;

import java.io.IOException;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

public class TableCopy extends Configured implements Tool {

    static class CopyMapper extends TableMapper<ImmutableBytesWritable, Put> {

        @Override
        protected void map(ImmutableBytesWritable key, Result value,
                           Context context) throws IOException, InterruptedException {
            // TODO Auto-generated method stub
            //将查询结果保存到list
            List<KeyValue> kvs = value.list();
            Put p = new Put(key.get());
            //将结果装载到Put
            for (KeyValue kv : kvs) {
                System.out.println(Bytes.toString(kv.getKey()));
                p.add(kv);
            }
            //将结果写入到Reduce
            context.write(key, p);
        }

    }

    public static Job createSubmittableJob(Configuration conf, String[] args) throws IOException {
        String jobName = args[0];
        String srcTable = args[1];
        String dstTable = args[2];
        Scan sc = new Scan();
        sc.setCaching(10000);
        sc.setCacheBlocks(false);
        Job job = new Job(conf, jobName);
        job.setJarByClass(TableCopy.class);
        job.setNumReduceTasks(0);
        TableMapReduceUtil.initTableMapperJob(srcTable, sc, CopyMapper.class, ImmutableBytesWritable.class, Result.class, job);
        TableMapReduceUtil.initTableReducerJob(dstTable, null, job);
        return job;

    }

    @Override
    public int run(String[] args) throws Exception {
        Configuration conf = getConf();
        conf.set("hbase.zookeeper.quorum", "hadoop");
        Job job = createSubmittableJob(conf, args);
        return job.waitForCompletion(true) ? 0 : 1;
    }

    public static void main(String[] args) throws Exception {
        args = new String[]{"copy-job", "travel", "test"};
        ToolRunner.run(new TableCopy(), args);
    }

}