package com.galeno.movie;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.util.Map;
import java.util.Random;

/**
 * Author:   Galenoliu
 * Date:     21/07/27
 * Description:
 */
public class Skew {

    static  class  SkewMapper extends Mapper<LongWritable , Text , Text , IntWritable>{
        int num = 0 ;
        @Override
        protected void setup(Context context) throws IOException, InterruptedException {
             num = context.getNumReduceTasks();
        }

        Text k = new Text() ;
        IntWritable v = new IntWritable(1);
        Random  random =  new Random();
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String line = value.toString();
            String[] words = line.split("\\s+");
            for (String word : words) {
                //a-0  a-1  a-0  a-1
                String str = word+"-"+random.nextInt(num);
                k.set(str);
                context.write(k,v);
            }

        }
    }

    static  class  SkewReducer extends Reducer<Text,IntWritable , Text,IntWritable>{
        IntWritable v = new IntWritable() ;
        @Override
        protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
            int cnt = 0 ;
            for (IntWritable value : values) {
                cnt++ ;
            }
            v.set(cnt);
            context.write(key,v);
        }
    }

    static  class SkewMap2 extends Mapper<LongWritable,Text,Text,IntWritable>{
        Text k=new Text();
        IntWritable v=new IntWritable();
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String line = value.toString();
            String word = line.split("-")[0];
            String num = line.split("\\s+")[1];
            int count=Integer.parseInt(num);
            v.set(count);
            k.set(word);
            context.write(k,v);

        }
    }
    static  class  SkewRedu2 extends  Reducer<Text,IntWritable,Text,IntWritable>{
        IntWritable v=new IntWritable();
        @Override
        protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
            int sum=0;
            for (IntWritable value : values) {
                int count = value.get();
                sum=sum+count;
            }
            v.set(sum);
            context.write(key,v);

        }
    }



    public static void main(String[] args) throws Exception {
        init1();
       init2();



    }

     static void init2() throws IOException, InterruptedException, ClassNotFoundException {
         // 创建一个配置对象
         Configuration conf = new Configuration();
         conf.set("fs.defaultFS","file:///");
         // 初始化一个Job
         Job job = Job.getInstance(conf, "skew2");
         // 设置map类
         job.setMapperClass(SkewMap2.class);
         // 设置Reducer类
         job.setReducerClass(SkewRedu2.class);
         // 设置map和Reducer类的输出类型
    /*    job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);*/
         job.setOutputKeyClass(Text.class);
         job.setOutputValueClass(IntWritable.class);
         job.setNumReduceTasks(1);
         //数据的输入路径
         FileInputFormat.setInputPaths(job,new Path("D:\\mrdata\\skew\\out01"));
         // 输出的结果保存路径
         FileOutputFormat.setOutputPath(job,new Path("D:\\mrdata\\skew\\out02"));
         // 运行Job  等待运行完毕
         job.waitForCompletion(true) ;

    }


    static void init1() throws IOException, InterruptedException, ClassNotFoundException {
        // 创建一个配置对象
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS","file:///");
        // 初始化一个Job
        Job job = Job.getInstance(conf, "skew");
        // 设置map类
        job.setMapperClass(SkewMapper.class);
        // 设置Reducer类
        job.setReducerClass(SkewReducer.class);
        // 设置map和Reducer类的输出类型
    /*    job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);*/
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);
        job.setNumReduceTasks(2);
        //数据的输入路径
        FileInputFormat.setInputPaths(job,new Path("D:\\mrdata\\skew\\input"));
        // 输出的结果保存路径
        FileOutputFormat.setOutputPath(job,new Path("D:\\mrdata\\skew\\out01"));
        // 运行Job  等待运行完毕
        job.waitForCompletion(true) ;

    }


}
