package com.example.springboot1.file;

/**
 * @author ckj
 * @description TODO
 * @date 2023-03-14 14:21
 */


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

import java.io.IOException;

public class FileProcessorV2 {
    public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
        // 创建一个新的MapReduce作业
        Configuration conf = new Configuration();
        Job job = Job.getInstance(conf, "FileProcessor");

        // 设置输入和输出格式
        job.setInputFormatClass(TextInputFormat.class);
        job.setOutputFormatClass(TextOutputFormat.class);

        // 设置Map和Reduce类
        job.setMapperClass(ProcessorMapper.class);
        job.setReducerClass(ProcessorReducer.class);

        // 设置Map的输出键和值类型
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(LongWritable.class);

        // 设置Reduce的输出键和值类型
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);

        // 设置输入和输出文件路径
        TextInputFormat.addInputPath(job, new Path("E:/numbers.txt"));
        TextOutputFormat.setOutputPath(job, new Path("E:/result.txt"));

        // 启动MapReduce作业
        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }

    public static class ProcessorMapper extends Mapper<LongWritable, Text, Text, LongWritable> {
        @Override
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            // 将行拆分成数字列表
            String[] nums = value.toString().split(",");

            // 遍历数字列表并发出键-值对
            for (String numStr : nums) {
                long num = Long.parseLong(numStr);
                context.write(new Text("key"), new LongWritable(num));
            }
        }
    }

    public static class ProcessorReducer extends Reducer<Text, LongWritable, Text, Text> {
        @Override
        public void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
            // 初始化最大值、最小值和总和
            long maxVal = Long.MIN_VALUE;
            long minVal = Long.MAX_VALUE;
            long totalSum = 0;
            int numCount = 0;

            // 遍历所有值并计算最大值、最小值和总和
            for (LongWritable val : values) {
                long num = val.get();
                maxVal = Math.max(maxVal, num);
                minVal = Math.min(minVal, num);
                totalSum += num;
                numCount++;
            }

            // 计算平均值
            double avgVal = (double) totalSum / numCount;

            // 发出结果
            context.write(new Text("max"), new Text(String.valueOf(maxVal)));
            context.write(new Text("min"), new Text(String.valueOf(minVal)));
        }
    }
}
