package com.whoami.dataplatform.tag_combine.hbase.load;

import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;
import java.nio.ByteBuffer;

/**
 * @author tzp
 * @since 2019/8/15
 */
public class HFileWriterMapper extends Mapper<BytesWritable, BytesWritable, ImmutableBytesWritable, Put> {
    private ImmutableBytesWritable rowkey = new ImmutableBytesWritable();

    @Override
    protected void setup(Context context) throws IOException, InterruptedException {
        super.setup(context);
    }

    @Override
    protected void map(BytesWritable key, BytesWritable value, Context context) throws IOException, InterruptedException {
        rowkey.set(key.getBytes(), 0, key.getLength());

        ByteBuffer byteBuffer = ByteBuffer.wrap(value.getBytes());
        int num = byteBuffer.getInt();
        Put put = new Put(key.getBytes(), 0, key.getLength());
        put.addColumn(HFileWriterJob.CF_DEFAULT.getBytes(), null, Bytes.toBytes(num));

        context.write(rowkey, put);
    }

    @Override
    protected void cleanup(Context context) throws IOException, InterruptedException {
        super.cleanup(context);
    }
}
