package com.kkb.hbase2hdfs;


import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;
import java.util.List;

/**
 * k2  ImmutableBytesWritable
 * v2  Result
 * k3  NullWritable
 * v3  Text
 */
public class HdfsWriteReducer extends Reducer<ImmutableBytesWritable, Result, NullWritable,Text> {

    @Override
    protected void reduce(ImmutableBytesWritable key, Iterable<Result> values, Context context) throws IOException, InterruptedException {
        for (Result result : values) {
            List<Cell> cells = result.listCells();
            if (cells == null) {
                System.out.println("============空数据===========");
            } else {
                cells.forEach(cell -> {
                    Text text = new Text();
                    String rowKet = Bytes.toString(CellUtil.cloneRow(cell));
                    String family = Bytes.toString(CellUtil.cloneFamily(cell));
                    String qualifier = Bytes.toString(CellUtil.cloneQualifier(cell));
                    String value = Bytes.toString(CellUtil.cloneValue(cell));

                    String line = "RowKey=" + rowKet + ", column=" + family + ":" + qualifier + ", value=" + value;
                    text.set(line);
                    try {
                        context.write(NullWritable.get(),text);
                    } catch (IOException e) {
                        e.printStackTrace();
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                    }
                });


            }
        }
    }
}
