package org.hyf.inspur.LessonDesin.clear.count;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.hyf.inspur.LessonDesin.clear.tools.StringComparator;
import org.hyf.inspur.LessonDesin.clear.tools.TProperties;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.TreeMap;

public class PvMapper extends Mapper<LongWritable, Text,Text,Text> {

    int sum = 0;
    private final Text text=new Text();
    private final IntWritable num=new IntWritable(1);
    // private final Map<String, String> joinData = new HashMap<String, String>();
    @Override


    protected void map(LongWritable key, Text value,
                       Mapper<LongWritable, Text, Text, Text>.Context context)
            throws IOException, InterruptedException {
        String line=value.toString();
        String[] values = value.toString().split(TProperties.getValue("fileoutsplit"));
        String setnum = values[0];
        String pwd = setnum.substring(0,6)+"000000000000";
        String num = values[1];
        String sb = pwd+TProperties.getValue("outfilesplit")+num+TProperties.getValue("outfilesplit");
        // System.out.println("hahah:"+num);
        context.write( new Text(sb),new Text(sb));
    }

}
