package my.io.hadoop.jobs.hdfs;

import cn.hutool.core.util.StrUtil;
import lombok.RequiredArgsConstructor;
import my.io.hadoop.Main;
import my.io.hadoop.jobs.JobComponent;
import my.io.hadoop.jobs.MapToReduce;
import my.io.hadoop.utils.DateUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.springframework.stereotype.Component;

import java.io.IOException;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.time.LocalDateTime;
import java.util.Arrays;

@Component
@RequiredArgsConstructor
public class HdfsJobByStrokeMan implements JobComponent {

    private final Configuration configuration;

    private final FileSystem fs;
    public static final String[] INPUT_HEADER = new String[]{ "时间", "年龄", "性别","北京","上海","广州","深圳","武汉","西安"};
    public static final String[] OUTPUT_HEADER = INPUT_HEADER;

    @Override
    public void runJob(String jobName, String inPath) throws IOException, InterruptedException, ClassNotFoundException {
        Job job = Job.getInstance(configuration, jobName);

        job.setJarByClass(Main.class);

        job.setMapperClass(JobMap.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(MapToReduce.class);

        job.setReducerClass(JobReducer.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(MapToReduce.class);

        // 设置输入输出
        inPath = StrUtil.blankToDefault(inPath, jobName);
        FileInputFormat.addInputPath(job, new Path(HDFS_INPUT_BASE_PATH, inPath));

        Path outPath = new Path(HDFS_OUT_BASE_PATH, jobName);
        if (fs.exists(outPath)) {
            fs.delete(outPath, true);
        }
        FileOutputFormat.setOutputPath(job, outPath);

        // 运行任务
        job.waitForCompletion(true);
    }

    public static class JobMap extends Mapper<LongWritable, Text, Text, MapToReduce> {
        private final Text k = new Text();

        @Override
        protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, MapToReduce>.Context context) throws IOException, InterruptedException {
            // 读头文件
            if (key.get() == 0) {
                return;
            }
            if (value.getLength() == 0) {
                return;
            }

            String[] row = value.toString().split(",");
            if (row.length < INPUT_HEADER.length) {
                return;
            }
//            LocalDateTime dateTime = DateUtils.parse(row[0]);
//            if (dateTime == null) {
//                return;
//            }

            if (row[10].equals("Unknown")){
                return;
            }

            if (row[9].equals("N/A")){
                return;
            }


            k.set(row[11]);
            if(row[1].equals("Male")){
                MapToReduce entity = new MapToReduce();
                entity.set("Male", 1);
                context.write(k, entity);
            }else if (row[1].equals("Female")){
                MapToReduce entity = new MapToReduce();
                entity.set("Female", 1);
                context.write(k, entity);
            }

        }
    }

    public static class JobReducer extends Reducer<Text, MapToReduce, NullWritable, Text> {

        @Override
        protected void setup(Reducer<Text, MapToReduce, NullWritable, Text>.Context context) throws IOException, InterruptedException {
            context.write(null, new Text(StrUtil.join(",", "中风","Male","Female")));
        }

        @Override
        protected void reduce(Text key, Iterable<MapToReduce> values, Reducer<Text, MapToReduce, NullWritable, Text>.Context context) throws IOException, InterruptedException {
            BigDecimal[] sumValues = new BigDecimal[2];
            Arrays.fill(sumValues, BigDecimal.ZERO);
            BigDecimal sumValue = BigDecimal.ZERO;

            String[] split = {"中风","Male","Female"};
            int count = 0;
            for (MapToReduce value : values) {
                System.out.println(value.getBigDecimal(split[2]));
                for (int i = 0; i < sumValues.length; i++) {
                    if (value.getBigDecimal(split[i+1])!=null && !value.getBigDecimal(split[i+1]).equals("null")){
                        sumValues[i] = sumValues[i].add(value.getBigDecimal(split[i+1]));
//                        System.out.println(sumValues[i]);

                    }
                }
                count++;
            }
//            for (MapToReduce value : values) {
//                sumValue = sumValue.add(value.getBigDecimal("bmi"));
//                count++;
//            }
//            BigDecimal decimal = new BigDecimal(count);
//            for (int i = 0; i < sumValues.length; i++) {
//                sumValues[i] = sumValues[i].divide(decimal, 0, RoundingMode.HALF_UP);
//            }

            String val = StrUtil.join(",", key, sumValues);
            context.write(null, new Text(val));
        }
    }
}
