package cn.hdax.test1;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.util.List;

public class HbaseMr {

    /**
     * 输出参数
     */
    static class MyMapper extends TableMapper<Text, Text> {
        /**
         *
         * @param key 行键
         * @param value
         * @param context
         * @throws IOException
         * @throws InterruptedException
         * hbase : music 2行数据 将执行2次
         */
        @Override
        protected void map(ImmutableBytesWritable key, Result value,
                           Context context) throws IOException, InterruptedException {
            List<Cell> cells=value.listCells();
            for(Cell cell:cells){
                String outValue=String.format("RowKey:%s    Family:%s    Qualifier:%s    cellValue:%s  ",
                        Bytes.toString(key.get()),
                        Bytes.toString(CellUtil.cloneFamily(cell)),
                        Bytes.toString(CellUtil.cloneQualifier(cell)),
                        Bytes.toString(CellUtil.cloneValue(cell))
                );

                System.out.println(outValue);

                context.write(
                        new Text("hi--"), //获取 单元格的key.不是行键,也不是列键
                        new Text(outValue));
            }
        }
    }


    public static void main(String[] args) throws Exception{
        //默认读resource /hbase.site.xml
        Configuration conf = HBaseConfiguration.create();
        //声明一个作业
        Job job = Job.getInstance(conf, "hbase-mapreduce-api");

        //MapReduce程序作业基本配置
        job.setJarByClass(HbaseMr.class);

        Path output=new Path("/t174/out2");
        if(FileSystem.get(conf).exists(output))
            FileSystem.get(conf).delete(output,true);
        FileOutputFormat.setOutputPath(job, output);
        //设置任务数
        job.setNumReduceTasks(1);

        //使用hbase提供的工具类来设置job
        Scan scan=new Scan();
        scan.addColumn(Bytes.toBytes("info"), Bytes.toBytes("name"));
        scan.addColumn(Bytes.toBytes("info"),Bytes.toBytes("age")) ;
        //
        TableMapReduceUtil.initTableMapperJob("student", scan, MyMapper.class,Text.class, Text.class, job,false);
        //提交作业
        boolean flag = job.waitForCompletion(true);

        if(flag==true){
            System.exit(0);
        }
        else{
            System.exit(1);
        }
    }


}
