package MapReduce.Demo10_TemperatureByYear;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.partition.InputSampler;
import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
import utils.JobSubmit;

import java.io.IOException;

/**
 * @Author lixinlei
 * @Date 2023/3/22 8:32
 */
public class TemperatureByYearApp {

    public static class TBYMapper extends Mapper<IntWritable, IntWritable,IntWritable,IntWritable>{

//        Text outKey = new Text();
//        Text outValue = new Text();

        @Override
        protected void map(IntWritable key, IntWritable value, Mapper<IntWritable, IntWritable, IntWritable, IntWritable>.Context context) throws IOException, InterruptedException {

//            String line = value.toString();
//
//            String[] fields = line.split("\t");
//
//            outKey.set(fields[0]);
//            outValue.set(fields[1]);

//            context.write(outKey,outValue);

            context.write(key, value);

        }
    }

    public static class TBYReducer extends Reducer<IntWritable,IntWritable,IntWritable,IntWritable>{
        @Override
        protected void reduce(IntWritable key, Iterable<IntWritable> values, Reducer<IntWritable, IntWritable, IntWritable, IntWritable>.Context context) throws IOException, InterruptedException {
            for (IntWritable value : values) {
                context.write(key,value);
            }
        }
    }

    public static void main(String[] args) throws Exception {

        Configuration conf = new Configuration();

        Job job = Job.getInstance(conf);


        job.setMapOutputKeyClass(IntWritable.class);
        job.setMapOutputValueClass(IntWritable.class);
        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(IntWritable.class);

        job.setJarByClass(TemperatureByYearApp.class);
        job.setMapperClass(TBYMapper.class);
        job.setReducerClass(TBYReducer.class);

        Path path = new Path(args[1]);

        FileSystem fs = FileSystem.get(conf);

        if(fs.exists(path)){
            fs.delete(path,true);
        }

        FileInputFormat.setInputPaths(job,new Path(args[0]));
        FileOutputFormat.setOutputPath(job,path);


        job.setInputFormatClass(SequenceFileInputFormat.class);

        job.setNumReduceTasks(4);

        //设置自定义分区类(由MR源码提供)
        job.setPartitionerClass(TotalOrderPartitioner.class);

        //创建抽样样本
        InputSampler.RandomSampler<Text,Text> sampler =
                new InputSampler.RandomSampler<>(0.5,1000,1);

        //把从样本中确定的临界点写入 分区文件中
//        InputSampler.writePartitionFile(job,sampler);

        //如果已有分区临界点文件，可以不使用抽样数据确定，可以直接设置临界点文件
        TotalOrderPartitioner.setPartitionFile(job.getConfiguration(),new Path("file:///D://part.seq"));

        job.waitForCompletion(true);

        Object[] samplerPara = {0.5,1000,1};

        JobSubmit.submitJobByCriticalPoint(
                TemperatureByYearApp.class,
                args,
                true,
                samplerPara,
                4,
                SequenceFileInputFormat.class
        );

        JobSubmit.submitJobByCriticalPoint(
                TemperatureByYearApp.class,
                args,
                false,
                "file:///D://part.seq",
                4,
                SequenceFileInputFormat.class
        );

    }


}
