
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.File;
import java.io.IOException;

import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class test {
    public static class testMap extends Mapper<LongWritable, Text, Text, MapReduce> {
        Text text = new Text();

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String line = value.toString();
            String[] split = line.split(",");
            //订单id
            String id = split[0];
            //用户id
            String ids = split[1];
            //总和
            Double price = Double.parseDouble(split[3]) * Double.parseDouble(split[4]);
            String name = split[2];
            text.set(id);
            context.write(new Text(text), new MapReduce(id, ids, name, price));
        }

    }
        public static class testReduce extends Reducer<Text, MapReduce, MapReduce, NullWritable> {
            @Override
            protected void reduce(Text key, Iterable<MapReduce> values, Context context) throws IOException, InterruptedException {

                List<MapReduce> list = new ArrayList<MapReduce>();
                for (MapReduce value : values) {

                     MapReduce mapReduce = new MapReduce(value.getId(),value.getIds(),value.getName(),value.getPrice());
                    list.add(mapReduce);
                }
                Collections.sort(list);
                for (int i = 0; i < list.size(); i++) {
                    if (i == 3) {
                        break;
                    }
                    context.write(list.get(i), NullWritable.get());
                }
            }
        }

        public static void main(String[] args) {
            Configuration conf = new Configuration();
            try {
                Job job = Job.getInstance(conf);
                job.setJarByClass(test.class);

                job.setMapperClass(testMap.class);
                job.setReducerClass(testReduce.class);

                job.setMapOutputKeyClass(Text.class);
                job.setMapOutputValueClass(MapReduce.class);

                job.setOutputKeyClass(MapReduce.class);
                job.setOutputValueClass(NullWritable.class);
                job.setNumReduceTasks(1);

                File file = new File("F:\\all\\a\\b.txt");
                if (file.exists()) {
                    FileUtils.deleteDirectory(file);
                }
                //mapereduce 输入数据的文件目录
                FileInputFormat.addInputPath(job, new Path("F:\\all\\a\\a.txt"));
                //MaperReduce 执行后输出的数据目录
                FileOutputFormat.setOutputPath(job, new Path("F:\\all\\a\\b.txt"));
                //执行完毕退出
                System.exit(job.waitForCompletion(true) ? 0 : 1);
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    }


