package com.chenjj.bigdata.hbase.bulkload;

import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;
import java.util.UUID;

/**
 * 用于解析txt文件，将txt文件转换为put
 */
public class BulkLoadMapper  extends Mapper<LongWritable, Text, ImmutableBytesWritable, Put>{
    private final static String SPLIT_CHAR = ",";//取数分隔符
    private static String[] columnList = null;


    @Override
    protected void map(LongWritable key, Text value,
                       Context context)
            throws IOException, InterruptedException {
        try {
            if (columnList == null) {
                columnList = context.getConfiguration().get("columns").split(",");
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
        String line = value.toString();
        String[] world = line.split(SPLIT_CHAR, columnList.length);

        String rowKey = UUID.randomUUID().toString();
        Put put = new Put(rowKey.getBytes());
        ImmutableBytesWritable mapOutput = new ImmutableBytesWritable(Bytes.toBytes(rowKey));
        for (int i = 0; i < world.length; i++) {
            put.addColumn(Bytes.toBytes("info"), Bytes.toBytes(columnList[i]), Bytes.toBytes(world[i]));
        }

        context.write(mapOutput, put);
    }
}
