package com.snbc.hbase.migration.hadoop;

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/**
 * @author shichenguang
 * @desciption 描述
 * @date 2019-09-06 18:00
 */
public class HbaseToHdfs {
    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {

        String tablename = "student";// 表名字
        String outputpath = "/test/wordCount/outPut8/";//输出路径
        Configuration conf = HBaseConfiguration.create();

        Job job = Job.getInstance(conf, "HbaseToHdfs");
        job.setJarByClass(HbaseToHdfs.class);
        Scan scan = new Scan();
        TableMapReduceUtil.initTableMapperJob(tablename, scan, doMapper.class, Text.class, Text.class, job);
        job.setReducerClass(WordCountHbaseReaderReduce.class);
        FileOutputFormat.setOutputPath(job, new Path(outputpath));
        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }

    public static class doMapper extends TableMapper<Text, Text> {
        protected void map(ImmutableBytesWritable key, Result value, Context context)
                throws IOException, InterruptedException {
            String name= Bytes.toString(value.getValue(Bytes.toBytes("info"),Bytes.toBytes("name")));
            String id=Bytes.toString(value.getValue(Bytes.toBytes("info"),Bytes.toBytes("id")));
            String age=Bytes.toString(value.getValue(Bytes.toBytes("info"),Bytes.toBytes("age")));

            context.write(new Text(id+""),new Text(id+"_"+name+"_"+age+""));

        }
    }

    public static class WordCountHbaseReaderReduce extends Reducer<Text, Text, Text, Text> {
        private Text result = new Text();

        protected void reduce(Text key, Iterable<Text> values, Context context)
                throws IOException, InterruptedException {
            for (Text val : values) {
                String s = val.toString()+ ",";
                result.set(new Text(s));
                context.write(key, result);
            }

//            Iterator<Text> itr = values.iterator();
//            String[] item = null;
//            String name = "";
//            String age = "";
//            String id = "";
//            StringBuffer insertBuild = new StringBuffer();
//            while (itr.hasNext()){
//                item = itr.next().toString().split("_");
//                name = item[0];
//                age = item[1];
//                id  = item[0];
//            }
//            insertBuild.append(name).append(" ").append(age).append(" ").append(id);
//            result.set(new Text(insertBuild.toString()));
//            context.write(key,result);
        }
    }
}
