package mxx.mr.job1;


import org.apache.commons.codec.digest.DigestUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

/**
 * t_procedure_day_task
 */
public class MR1 {

    public static class MR1Mapper extends Mapper<LongWritable, Text, Text, Text> {
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

            // -- 构造key
            String[] datas = value.toString().split(",");
            String PARENT_SEQ_CODE = datas[1];
            String NODE_SEQ_CODE = datas[0];
            String ORDER_NUMBER = datas[9];
            String SAVE_TIME = datas[6].substring(0,10);

//            String AMOUNT = datas[3];
            // id Hash key(产品-工件-工序-时间)

            Text outKey = new Text(PARENT_SEQ_CODE + "_" +
                    NODE_SEQ_CODE + "_" +
                    ORDER_NUMBER + "_" +
                    SAVE_TIME);
//            Text outValue = new Text()
//            System.out.println(outKey.toString());
            // -- value保持不变

            // -- 写出
            context.write(outKey, value);

        }
    }

    public static class MR1Reducer extends Reducer<Text, Text, Text, Text>{

        @Override
        protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
            // -- 统计DAY_AMOUNT

            // TODO 优化 是否改用long或者BigDecimal?
            int DAY_AMOUNT = 0;
            for (Text value : values) {
                String[] datas = value.toString().split(",");
                Integer orderCount = Integer.valueOf(datas[3]);
                DAY_AMOUNT+=orderCount;
            }

            // -- 重新构造value（v1,v2,v3,v4）

            Text outValue = new Text(key.toString().replace("_", ",") + "," + DAY_AMOUNT);

            // -- 使用联合key的hash散列作为key
            Text outKey = new Text(DigestUtils.shaHex(key.toString()));

            context.write(outKey, outValue);
        }
    }

    public static void main(String[] args) throws Exception{


        Configuration configuration = new Configuration();
        configuration.set("mapred.textoutputformat.separator", ",");

        // 如果输出目录已经存在，则先删除
        FileSystem fileSystem = FileSystem.get(configuration);

        // TODO data/mr1_output
        Path outputPath = new Path(args[1]);
        if (fileSystem.exists(outputPath)) {
            fileSystem.delete(outputPath, true);
        }
        // TODO data/mr1_input/sql2.csv
        Path inputPath = new Path(args[0]);

        Job job = Job.getInstance(configuration);
        job.setJarByClass(MR1.class);

        job.setMapperClass(MR1Mapper.class);
        job.setReducerClass(MR1Reducer.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);


        FileInputFormat.setInputPaths(job, inputPath);
        FileOutputFormat.setOutputPath(job,outputPath);

        job.waitForCompletion(true);

        System.out.println("---MR1:t_procedure_day_task:完成---");



    }
}
