package com.shujia.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;

import java.io.IOException;

public class Demo10MapreducceToIndex {


    public static class IndexMapper extends TableMapper<Text, NullWritable> {
        @Override
        protected void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException {


            String id = Bytes.toString(key.get());

            String clazz = Bytes.toString(value.getValue("info".getBytes(), "clazz".getBytes()));

            context.write(new Text(id + "_" + clazz), NullWritable.get());
        }
    }

    public static class IndexReduce extends TableReducer<Text, NullWritable, NullWritable> {
        @Override
        protected void reduce(Text key, Iterable<NullWritable> values, Context context) throws IOException, InterruptedException {


            String id = key.toString().split("_")[0];
            String clazz = key.toString().split("_")[1];


            /**
             * 以班级作为rowkey,,医学好作为列名
             *
             */

            Put put = new Put(clazz.getBytes());

            put.add("info".getBytes(), id.getBytes(), null);

            context.write(NullWritable.get(), put);

        }
    }

    public static void main(String[] args) throws Exception {

        Configuration conf = HBaseConfiguration.create();

        Job job = Job.getInstance(conf);


        job.setJobName("hdfstoHfile");
        job.setJarByClass(Demo10MapreducceToIndex.class);


        //指定mapper   指定读取的表
        TableMapReduceUtil.initTableMapperJob(
                "shujia:student",
                new Scan(),
                IndexMapper.class,
                Text.class,
                NullWritable.class,
                job
        );


        //指定reduce    指定输出的表
        //需要先创建索引表   create 'shujia:student_index_clazz','info'
        TableMapReduceUtil.initTableReducerJob(
                "shujia:student_index_clazz",
                IndexReduce.class,
                job
        );

        //启动job
        job.waitForCompletion(true);


    }

}
