package com.doit.day01;

import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;
import java.nio.charset.StandardCharsets;

public class MyMapper extends Mapper<LongWritable, Text, ImmutableBytesWritable, KeyValue> {
    @Override
    protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, ImmutableBytesWritable, KeyValue>.Context context) throws IOException, InterruptedException {
        String line = value.toString();
        //uid,order_id,price,goods_number                     user_info   uid   name  age   gender  ==>存到redis里面
        String[] arr = line.split(",");

        //获取redis的连接对象
        //通过对象可以去get数据uid  ==》 name   age  gender

        //将数据处理完成后，按照指定得格式写出去
        //rowkey_001,zss,18,male,chengxuyuan,beijing
        context.write(new ImmutableBytesWritable(arr[0].getBytes()),new KeyValue(arr[0].getBytes(),"f1".getBytes(),"age".getBytes(),arr[2].getBytes()));
        context.write(new ImmutableBytesWritable(arr[0].getBytes()),new KeyValue(arr[0].getBytes(),"f1".getBytes(),"gender".getBytes(),arr[3].getBytes()));
        context.write(new ImmutableBytesWritable(arr[0].getBytes()),new KeyValue(arr[0].getBytes(),"f1".getBytes(),"name".getBytes(),arr[1].getBytes()));
        context.write(new ImmutableBytesWritable(arr[0].getBytes()),new KeyValue(arr[0].getBytes(),"f2".getBytes(),"city".getBytes(),arr[5].getBytes()));
        context.write(new ImmutableBytesWritable(arr[0].getBytes()),new KeyValue(arr[0].getBytes(),"f2".getBytes(),"job".getBytes(),arr[4].getBytes()));

    }
}
