package zhanghao;



import java.io.IOException;

import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

public class SourceAllFlowHbaseMap {
	public static class ConvertOutToHFileMapper extends Mapper<LongWritable, Text, ImmutableBytesWritable, KeyValue> 
	{
		@Override 
		protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException 
		{ 
			String inStr=value.toString();
			String [] inArr = inStr.split("\t");
			if(inArr.length >= 4)
			{
				String rowkeyStr = inArr[0];
				String familyStr = inArr[1];
				String qualifierStr = inArr[2];
				String valueStr = inArr[3];
				byte[] rowKey=Bytes.toBytes(rowkeyStr);
				ImmutableBytesWritable rowKeyWritable=new ImmutableBytesWritable(rowKey);
				//创建HBase中KeyValue对象,第一个参数为rowKey，第二个参数为列族，第三个参数为列，第四个参数为 具体的值
				byte[] family=Bytes.toBytes(familyStr);
				byte[] qualifier=Bytes.toBytes(qualifierStr);
				byte[] hbaseValue=Bytes.toBytes(valueStr);
				KeyValue keyValue=new KeyValue(rowKey, family, qualifier, hbaseValue);

				context.write(rowKeyWritable, keyValue);
			}
		}
	}
	
}
