package com.shujia.MR;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

/**
 * 将HBase中的学生数据读取出来，并且经过MR计算 每个班级的人数 将结果写入HDFS
 * <p>
 * 注意：如果使用MR那么需要添加如下依赖
 * 1.hadoop-client
 * 2.hbase-server
 * <p>
 * 对于MR来说需要分成 Driver  Map  Reduce 三个部分
 * 对于自定义的Mapper类来说，需要继承 TableMapper<KEYOUT, VALUEOUT>
 * 并且需要指定其输出的 Key 和 Value
 * <p>
 * 确定MapReduce处理逻辑 ：
 * 在Map端需要读取HBase中的数据,并且是按行（RowKey）读取，
 * 读取数据后，取出班级信息，作为 Key， 1作为 Value 将数据写出到 Reduce端
 * <p>
 * 在Reduce端，和之前MR一样
 */
public class Hbase2HDFS {
    public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

        Configuration conf = new Configuration();
        conf.set("hbase.zookeeper.quorum","node1,node2,master");

        Job job = Job.getInstance(conf);
        job.setJarByClass(Hbase2HDFS.class);
        job.setJobName("Hbase2HDFS");

        // Mapper

        TableMapReduceUtil.initTableMapperJob(
                "api:tb1",
                new Scan(),
                MyMapper.class,
                Text.class,
                IntWritable.class,
                job
        );

        // Reducer
        job.setReducerClass(MyReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        FileOutputFormat.setOutputPath(job,new Path("output/hbase2hdfs"));

        job.waitForCompletion(true);

    }

    /**
     * TableMapper<KEYOUT, VALUEOUT>
     * KEYOUT:班级 Text
     * ValueOUT: 1 IntWritable
     */
    public static class MyMapper extends TableMapper<Text, IntWritable> {

        /**
         *
         * @param key 表示读取的RowKey
         * @param value 表示 一行数据
         * @param context
         * @throws IOException
         * @throws InterruptedException
         */
        @Override
        protected void map(ImmutableBytesWritable key, Result value, Mapper<ImmutableBytesWritable, Result, Text, IntWritable>.Context context) throws IOException, InterruptedException {

            String clazz = Bytes.toString(value.getValue(Bytes.toBytes("info2"), Bytes.toBytes("clazz")));
            context.write(new Text(clazz),new IntWritable(1));
        }
    }

    public static class MyReducer extends Reducer<Text, IntWritable,Text, IntWritable>{
        @Override
        protected void reduce(Text key, Iterable<IntWritable> values, Reducer<Text, IntWritable, Text, IntWritable>.Context context) throws IOException, InterruptedException {
            int num = 0;
            for (IntWritable value : values) {
                num += value.get();
            }
            context.write(key,new IntWritable(num));
        }
    }

}
