package cn.com.coding.common.utils;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.springframework.stereotype.Component;

import java.io.IOException;

/**
 * 数据二次清理，进行排序
 *
 * @author inke219223m
 */
@Component
public class OneSort {
    public static class Map extends Mapper<Object, Text, IntWritable, Text> {

        private static Text goods = new Text();

        private static IntWritable num = new IntWritable();

        @Override
        public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
            String line = value.toString();
            String arr[] = line.split("\t");
            num.set(Integer.parseInt(arr[1]));
            goods.set(arr[0]);

            context.write(num, goods);
        }
    }

    public static class Reduce extends Reducer<IntWritable, Text, IntWritable, Text> {
        private static IntWritable result = new IntWritable();

        @Override
        public void reduce(IntWritable key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
            for (Text val : values) {
                context.write(key, val);
            }
        }
    }

    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        if (dataSort()) {
            System.out.println("操作成功");
        }
    }

    public static boolean dataSort() {
        try {
            System.setProperty("HADOOP_USER_NAME", "codingce");

            Configuration conf = new Configuration();
            conf.set("dfs.replication", "2");
            conf.set("dfs.client.socket-timeout", "300000");
            //添加此配置信息即可
            conf.set("dfs.client.use.datanode.hostname", "true");

            Job job = Job.getInstance(conf, "OneSort");
            job.setJarByClass(OneSort.class);
            job.setMapperClass(Map.class);
            job.setReducerClass(Reduce.class);
            job.setOutputKeyClass(IntWritable.class);
            job.setOutputValueClass(Text.class);
            job.setInputFormatClass(TextInputFormat.class);
            job.setOutputFormatClass(TextOutputFormat.class);
            Path in = new Path("hdfs://8.130.17.56:8020/fly/out1/part-r-00000");
            Long startTs = System.currentTimeMillis();
            Path out = new Path("hdfs://8.130.17.56:8020/fly/sort_out/" + startTs);
            FileInputFormat.addInputPath(job, in);
            FileOutputFormat.setOutputPath(job, out);
            // System.exit(job.waitForCompletion(true) ? 0 : 1);
            return job.waitForCompletion(true);
        } catch (IOException | InterruptedException | ClassNotFoundException e) {
            e.printStackTrace();
            return false;
        } finally {
            System.out.println("方法处理完毕");
        }
    }
}