package com.doit.day01;

import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

public class ReduceTask extends TableReducer<Text,Text, ImmutableBytesWritable> {
    @Override
    protected void reduce(Text key, Iterable<Text> values, Reducer<Text, Text, ImmutableBytesWritable, Mutation>.Context context) throws IOException, InterruptedException {
//创建put对象
        Put put = new Put(key.getBytes());
// 获取接收的map的value值
        Text next = values.iterator().next();
// 将value转换成字符串
        String v = next.toString();
//处理字符串获取 各个属性的值
        String[] split = v.split(":");
        String name = split[0];
        String gender = split[1];
// 将各个属性的值添加到对应的列中
        put.addColumn("f".getBytes(), "name".getBytes(), Bytes.toBytes(name));
        put.addColumn("f".getBytes(), "gender".getBytes(), Bytes.toBytes(gender));
// 将put对象写出去
        context.write(null, put);

    }
}
