package com.lxl.testHd.etl.three;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

/**
 * @author ：e_lixilin
 * @date ：2022/2/22 13:53
 * @description：
 * @modified By：
 */
public class CarMr {
    static int TRAVEL_FLAG =0;
    static int TYPE_FLAG =1;
    static class TollStationMapper extends Mapper<LongWritable, Text, Text, CarBean> {
        String fileName;
        Text k = new Text();

        /**
         * 此方法被MapReduce框架仅且执行一次，在执行Map任务前，进行相关变量或者资源的集中初始化工作。
         * 若是将资源初始化工作放在方法map()中，导致Mapper任务在解析每一行输入时都会进行资源初始化工作，导致重复，程序运行效率不高！
         * @param context
         * @throws IOException
         * @throws InterruptedException
         */
        @Override
        protected void setup(Context context) throws IOException, InterruptedException {
            // 根据文件切片信息 判断是车型表还是行驶表
            FileSplit inputSplit = (FileSplit)context.getInputSplit();
             fileName = inputSplit.getPath().getName();
        }

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            CarBean bean = new CarBean();
            String[] fields = value.toString().split("\t");
            String carTypeId ;
            if (fileName.startsWith("carInfo")) {
                // 2018-09-15.10:36:09	2018-09-15.12:27:36	131	3	川ES65R1
                carTypeId = fields[fields.length - 2];
                bean.setCarTypeId(Integer.parseInt(carTypeId));
                //行驶信息的字段，放入JavaBean (cartype.txt字段的值用""、0.0f等初始值占位) ，作为map的VALUEOUT
                bean.setBean(fields[0],fields[1],Long.parseLong(fields[2]),fields[4],"",0.0f,TRAVEL_FLAG);
            }else {
                // 1	小型	1.5
                carTypeId = fields[0];
                //车型信息的字段，放入JavaBean (行驶信息字段的值用""、0等初始值占位) ，作为map的VALUEOUT
                bean.setBean("","",0,"",fields[1],Float.parseFloat(fields[2]),TYPE_FLAG);
            }
            // 用 carTypeId为key 写出到reduce
            k.set(carTypeId);
            context.write(k,bean);
        }

        @Override
        protected void cleanup(Context context) throws IOException, InterruptedException {
            super.cleanup(context);
        }
    }

    static class TollStationReducer extends Reducer<Text, CarBean, Text, NullWritable> {
        Text outKey = new Text();
        StringBuilder sb = new StringBuilder();
        NullWritable outVal = NullWritable.get();
        @Override
        protected void reduce(Text key, Iterable<CarBean> values, Context context) throws IOException, InterruptedException {
            // 遍历values 区分出 车型信息和行驶信息
            CarBean typeBean = new CarBean();
            List<CarBean> beanList = new ArrayList<CarBean>();
            for (CarBean carBean : values) {
                int flag = carBean.getFlag();
                if (TRAVEL_FLAG == flag) {
                    CarBean car = new CarBean();
                    car.setTollStationBean(carBean.getStartTime(), carBean.getEndTime(), carBean.getDistance(), carBean.getCarPlate());
                    beanList.add(car);
                }else {
                    typeBean.setTypeName(carBean.getTypeName());
                    typeBean.setPl(carBean.getPl());
                }
            }
            // 遍历行驶信息list,设置对应的车型数据
            for (CarBean carBean : beanList) {
                sb.setLength(0);
                sb.append(typeBean.getTypeName());
                sb.append("\t");
                sb.append(carBean.getDistance());
                sb.append("\t");
                sb.append(carBean.getCarPlate());
                outKey.set(sb.toString());
                // 写出数据
                context.write(outKey,outVal);
            }
        }
    }


    public static void main(String[] args) throws Exception {
        args = new String[]{"D:\\big-data\\etl\\join", "D:\\big-data\\etl\\join\\output"};
        Configuration conf = new Configuration();
        Job job = Job.getInstance(conf);
        job.setJarByClass(CarMr.class);
        job.setMapperClass(TollStationMapper.class);
        job.setReducerClass(TollStationReducer.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(CarBean.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(NullWritable.class);
        FileInputFormat.setInputPaths(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));
        boolean res = job.waitForCompletion(true);
        System.exit(res?0:1);
    }
}
