package cn.lzd.mr.case1;

import cn.lzd.mr.PathUtil;
import org.apache.commons.collections.map.HashedMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.util.Map;
import java.util.UUID;

/**
 * Created by lzd on 2018/5/31.
 * 求取手机号码上下限及总流量
 */
public class TestPartition {


    public static class MDemo extends Mapper<LongWritable, Text, Text, Text> {

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String text = value.toString();
//            String[] split = text.split(" ");
//            List<String> res = Stream.of(split).filter((x) -> StringUtils.isNotBlank(x)).collect(Collectors.toList());
//
//            String phone = res.get(1);
//            int upflow = Integer.parseInt(res.get(res.size() - 3));
//            int downflow = Integer.parseInt(res.get(res.size() - 2));

//            context.write(new Text(phone), new Bean(upflow, downflow, phone));
            context.write(new Text(UUID.randomUUID().toString()), new Text("hello"));
        }
    }

    public static class RDemo extends Reducer<Text, Text, NullWritable, Bean> {

        @Override
        protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
//            int upflow = 0;
//            int downflow = 0;
//
//            for (Bean bean : values) {
//                upflow += bean.getUpflow();
//                downflow += bean.getDownflow();
//            }
//
//            context.write(NullWritable.get(), new Bean(upflow, downflow, key.toString()));
        }
    }

    public static class MyPartition extends Partitioner<Text, Text> {

        int partition = 4;

        static Map<String, Integer> map = new HashedMap() {{
            put("135", 0);
            put("136", 1);
            put("137", 2);
            put("138", 3);
        }};

        @Override
        public int getPartition(Text text, Text text2, int i) {
            Integer code = map.get(text.toString().substring(0, 3));
            return code == null ? partition : code;
        }
    }

    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        String path = TestPartition.class.getResource("/case1/").getPath();

        Configuration configuration = new Configuration();
        PathUtil.handleOutPath(path + "output",configuration);
        Job job = Job.getInstance(configuration);
        job.setJarByClass(TestPartition.class);

        job.setMapperClass(MDemo.class);
        job.setReducerClass(RDemo.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Bean.class);

        job.setOutputKeyClass(NullWritable.class);
        job.setOutputValueClass(Bean.class);

        //交给5个reduce处理
        job.setPartitionerClass(MyPartition.class);
        job.setNumReduceTasks(5);

        FileInputFormat.setInputPaths(job, new Path(path + "text"));
        FileOutputFormat.setOutputPath(job, new Path(path + "output"));

        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }
}
