package com.leadbank.bigdata.mapreduce.batch;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Mapper;

import java.text.SimpleDateFormat;
import java.util.Date;

/**
 * Created by hp on 2018/5/14.
 */
public class BatchImportMapper extends
        Mapper<LongWritable, Text, LongWritable, Text> {

    SimpleDateFormat dateformat1 = new SimpleDateFormat("yyyyMMddHHmmss");
    Text v2 = new Text();

    protected void map(LongWritable key, Text value, Context context)
            throws java.io.IOException, InterruptedException {
        final String[] splited = value.toString().split("\t");
        try {
            final Date date = new Date(Long.parseLong(splited[0].trim()));
            final String dateFormat = dateformat1.format(date);
            String rowKey = splited[1] + ":" + dateFormat;
            v2.set(rowKey + "\t" + value.toString());
            context.write(key, v2);
        } catch (NumberFormatException e) {
            final Counter counter = context.getCounter("BatchImportJob",
                    "ErrorFormat");
            counter.increment(1L);
            System.out.println("出错了" + splited[0] + " " + e.getMessage());
        }
    };
}