package org.wolfengi.handler.main;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DBOutputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.wolfengi.handler.common.Constants;
import org.wolfengi.handler.util.JDBCUtils;
import org.wolfengi.handler.entity.TableOperForShipDict;
import org.wolfengi.handler.mapred.FaultMapper;
import org.wolfengi.handler.mapred.FaultReducer;
import org.wolfengi.handler.util.HDFSUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.io.IOException;
import java.sql.SQLException;

/**
 * @Author: wolfengi
 * @Description: ${TODO}
 * @Date: Created in 10:29 2020/5/18
 * @Modified By:
 */
public class HandlerMain extends Configured implements Tool {
    private final static Logger log = LoggerFactory.getLogger(HandlerMain.class);

    public static void main(String[] args) {
        System.setProperty("HADOOP_USER_NAME", Constants.HDFSUSER);
        if (args.length == 2) {
            Constants.inputPath = args[0];
            Constants.ouputPath = args[1];
        }
        uploadFile();
        HDFSUtils.deleteRes(Constants.ouputPath);
        try {

            ToolRunner.run(new HandlerMain(), args);
            log.warn("[!] Map and Reducer task executed successfully.");
        } catch (Exception e) {
            log.error("[x] Map and Reducer task execute failed.", e);
        }
    }

    @Override
    public int run(String[] args) {
        try {
//            字典
            if (Constants.DICTLOAD) {
                jobTask(Text.class, IntWritable.class, TableOperForShipDict.class, NullWritable.class, FaultMapper.ShipDictMapper.class, FaultReducer.ShipDictReduce.class,
                        Constants.F_SHIP_DICT, Constants.F_SHIP_DICT_FIELDS);
            }
//            诊断故障船型
            jobTask(Text.class, IntWritable.class, Text.class, IntWritable.class, FaultMapper.ShipTypeCountMapper.class, FaultReducer.ShipTypeCountReduce.class,
                    Constants.F_SHIP_TYPE_COUNT, Constants.F_SHIP_TYPE_COUNT_FIELDS);
//            诊断故障原因
            jobTask(Text.class, IntWritable.class, Text.class, IntWritable.class, FaultMapper.ShipFaultCountMapper.class, FaultReducer.ShipFaultCountReduce.class,
                    Constants.F_SHIP_FAULT_COUNT, Constants.F_SHIP_FAULT_COUNT_FIELDS);
//            诊断船龄
            jobTask(Text.class, IntWritable.class, Text.class, IntWritable.class, FaultMapper.ShipAgeCountMapper.class, FaultReducer.ShipAgeCountReduce.class,
                    Constants.F_SHIP_AGE_COUNT, Constants.F_SHIP_AGE_COUNT_FIELDS);
//            诊断故障原因下的不同船型
            jobTask(Text.class, IntWritable.class, Text.class, IntWritable.class, FaultMapper.ShipFaultTypeCountMapper.class, FaultReducer.ShipFaultTypeCountReduce.class,
                    Constants.F_SHIP_FAULT_TYPE_COUNT, Constants.F_SHIP_FAULT_TYPE_COUNT_FIELDS);
//            诊断故障与状态
            jobTask(Text.class, IntWritable.class, Text.class, IntWritable.class, FaultMapper.ShipGroupCountMapper.class, FaultReducer.ShipGroupCountReduce.class,
                    Constants.F_SHIP_GROUP_COUNT, Constants.F_SHIP_GROUP_COUNT_FIELDS);
//            出行率
            jobTask(Text.class, IntWritable.class, Text.class, IntWritable.class, FaultMapper.ShipTravelRateCountMapper.class, FaultReducer.ShipTravelRateCountReduce.class, Constants.F_SHIP_TRAVEL_RATE_COUNT, Constants.F_SHIP_TRAVEL_RATE_COUNT_FIELDS);
            //概览
            jobTask(Text.class, IntWritable.class, Text.class, IntWritable.class, FaultMapper.ShipOverviewMapper.class, FaultReducer.ShipOverviewReduce.class, Constants.F_SHIP_OVERVIEW_COUNT, Constants.F_SHIP_OVERVIEW_COUNT_FIELDS);
            return 1;
        } catch (Exception e) {
            e.printStackTrace();
        }
        return 0;
    }


    private static int jobTask(Class<?> mapKeyOutClass, Class<?> mapValOutClass, Class<?> reduceKeyOutClass, Class<?> recuceValOutClass,
                               Class<? extends Mapper> mapClass, Class<? extends Reducer> reduceClass, String tablename, String[] fields) throws IOException, ClassNotFoundException, InterruptedException, SQLException {
        log.info("[-] executing " + mapClass.getTypeName());
        JDBCUtils.truncateTable(tablename);
        Configuration conf = new Configuration();
        /**
         * TODO: 在 windows 平台（开发）需要放开此此代码
         */
//        conf.set("mapreduce.app-submission.cross-platform", "true");
        DBConfiguration.configureDB(conf, Constants.DIRVERCLASS, Constants.URL, Constants.USERNAME, Constants.PASSWORD);

        Job job = Job.getInstance(conf);
        job.setJarByClass(HandlerMain.class);
        /**
         * TODO: 在编辑器（如：IDEA）中运行时，需要指定打包好的可执行jar包，否则无法找到自定义的 mapper 和 reduce 类
         */
//        job.setJar("D:/WorkSpace/IDEA_WorkSpace/faultForHadoop/out/artifacts/dataClean_jar/dataClean.jar");
        job.setMapperClass(mapClass);
        job.setMapOutputKeyClass(mapKeyOutClass);
        job.setMapOutputValueClass(mapValOutClass);

        job.setReducerClass(reduceClass);
        job.setOutputKeyClass(reduceKeyOutClass);
        job.setOutputValueClass(recuceValOutClass);

        FileInputFormat.setInputPaths(job, new Path(Constants.inputPath));
        FileOutputFormat.setOutputPath(job, new Path(Constants.ouputPath));

        DBOutputFormat.setOutput(job, tablename, fields);

        boolean b = job.waitForCompletion(true);
        return b ? 0 : 1;
    }

    private static void uploadFile() {
        File file = new File(Constants.LOCALPATH);
        File[] files = file.listFiles();
        for (File f : files) {
//            if (f.isFile() && f.getName().endsWith(".txt") && !HDFSUtils.hdfsFileIsExists(Constants.HDFSPATH + "/" + f.getName())) {
            if (f.isFile() && f.getName().endsWith(".txt") && Constants.ISLOAD) {
                HDFSUtils.uploadFileCharset(f.getPath(), Constants.HDFSPATH + "/" + f.getName(), "GBK");
            }
        }
    }
}
