package com.shujia.MR;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

public class MapReduce03_02 {
    // 自定义Writable类 进行自定义排序
    public static class MyWritable implements WritableComparable<MyWritable> {
        private String xmlb;
        private String id;
        private Double dkje;

        public MyWritable() {

        }

        public MyWritable(String xmlb, String id, Double dkje) {
            this.xmlb = xmlb;
            this.id = id;
            this.dkje = dkje;
        }

        @Override
        public int compareTo(MyWritable o) {
            int i = this.xmlb.compareTo(o.xmlb);
            if (i == 0) {
                return o.dkje.compareTo(this.dkje);
            }

            return i;
        }

        @Override
        public void write(DataOutput dataOutput) throws IOException {
            dataOutput.writeUTF(this.xmlb);
            dataOutput.writeUTF(this.id);
            dataOutput.writeDouble(this.dkje);
        }

        @Override
        public void readFields(DataInput dataInput) throws IOException {
            this.xmlb = dataInput.readUTF();
            this.id = dataInput.readUTF();
            this.dkje = dataInput.readDouble();
        }

        @Override
        public String toString() {
            return this.xmlb + "|" + this.id + "|" + this.dkje;
        }
    }

    public static class MyMapper extends Mapper<LongWritable, Text, MyWritable, NullWritable> {
        @Override
        protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, MyWritable, NullWritable>.Context context) throws IOException, InterruptedException {
            String xmlb = value.toString().split("\t")[0].split(",")[0];
            String id = value.toString().split("\t")[0].split(",")[1];
            double sum_dkje = Double.parseDouble(value.toString().split("\t")[1]);

            MyWritable myWritable = new MyWritable(xmlb, id, sum_dkje);

            context.write(myWritable, NullWritable.get());
        }
    }

//    public static class MyReducer extends Reducer<Text, MyWritable, Text, MyWritable> {
//        @Override
//        protected void reduce(Text key, Iterable<MyWritable> values, Reducer<Text, MyWritable, Text, MyWritable>.Context context) throws IOException, InterruptedException {
//            for (MyWritable value : values) {
//                context.write(key, value);
//            }
//        }
//    }

    public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
        Configuration conf = new Configuration();

        Job job = Job.getInstance(conf);
        job.setJobName("MapReduce03_02");
        job.setJarByClass(MapReduce03_02.class);

        // 设置Map端
        job.setMapperClass(MyMapper.class);
        job.setMapOutputKeyClass(MyWritable.class);
        job.setMapOutputValueClass(NullWritable.class);
        // 设置Reduce端
//        job.setReducerClass(MyReducer.class);
//        job.setOutputKeyClass(Text.class);
//        job.setOutputValueClass(MyWritable.class);

        // 设置输入输出路径
        FileInputFormat.addInputPath(job, new Path("/data/MR/output3-1/"));

        // 判断输出路径是否存在
        FileSystem fs = FileSystem.get(conf);
        Path path = new Path("/data/MR/output3-2");
        if (fs.exists(path)) {
            fs.delete(path, true);
        }

        FileOutputFormat.setOutputPath(job, path);

        job.waitForCompletion(true);

        // hadoop jar Competition2021-1.0.jar com.shujia.MR.MapReduce03_02

    }
}
