package data_manipulate;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

/*
 text inputs are stored in hdfs
    192.168.1.1 aaa
    192.168.1.1 bbb
    192.168.1.1 ccc
    192.168.5.5 fff
    192.168.1.1 ddd
    192.168.34.36 ccc
    192.168.34.36 ddd
    192.168.1.1 eee
    192.168.5.5 fff
    192.168.34.36 eee
    192.168.34.36 fff
    192.168.6.7 aaa
    192.168.3.4 bbb
    192.168.5.5 fff
    192.168.3.4 ccc
    192.168.3.4 ddd
    192.168.3.4 eee
    192.168.3.4 aaa
    192.168.5.5 fff
    192.168.101.12 aaa
    192.168.102.12 aaa
    192.168.34.36 aaa
    192.168.111.1 aaa
    192.168.222.2 ccc
    192.168.122.3 ddd
    192.168.121.3 ddd
    192.168.3.30 ddd
    192.168.5.5 fff
 */
public class Sort {

    public String inputPath = "hdfs://localhost:9000/sort/sort1";

    public static class countMapper extends  Mapper<LongWritable, Text, Text, IntWritable> {

        private IntWritable one = new IntWritable(1);
        private Text word = new Text();

        @Override
        protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context)
                throws IOException, InterruptedException {
            String[] split = value.toString().trim().split("\t");
            word.set(split[0]);
            context.write(word, one);
        }
    }
}
