package com.niit.Hbase.woedcount;

import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;

import java.io.IOException;

/**
 * @author:Yan
 * @date: 2022年11月16日 13:06
 * @desc:
 */
/*
keyin  rowkey已经封装好了
valuein 列值
keyout Text
VALUEOUT intwritable

 */
public class WordCountMapper extends TableMapper<Text, IntWritable> {

    @Override
    protected void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException {

        //mapper 会逐行的读取HBASE的数据

        //通过value.getvalue  --->字节数组 转换字符串
        //得到一行的数据
        String data = Bytes.toString(value.getValue(Bytes.toBytes("content"),Bytes.toBytes("info")));

        //切割成数组
        String[] word = data.split("\\s");
        for (String w : word){
            context.write(new Text(w),new IntWritable(1));
        }
    }
}
