package com.itbaizhan.hbase2hbase;

import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

public class Hbase2HbaseMapper extends TableMapper<Text, IntWritable> {
    //定义输出的key和value对象
    private Text keyOut = new Text();
    private IntWritable valOut = new IntWritable(1);
    @Override
    protected void map(ImmutableBytesWritable key, Result value,
                       Context context) throws IOException, InterruptedException {
        //ImmutableBytesWritable key:表示当前行数据的rowkey，Result value：表示封装当前行数据的Result对象
        //从vlaue中获取cf:line值
        String line = Bytes.toString(value.getValue("cf".getBytes(), "line".getBytes()));
        //按照空格进行拆分
        String[] words = line.split(" ");
        //遍历单词数组
        for(String word:words){
            //将word封装到keyOut中
            keyOut.set(word);
            //输出
            context.write(keyOut,valOut);
        }
    }
}