package top.wintp.offlinedataanalysis.etl.mr;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;

import top.wintp.offlinedataanalysis.common.ConstantValues;
import top.wintp.offlinedataanalysis.common.EventLogConstant;
import top.wintp.offlinedataanalysis.util.StringUtils;
import top.wintp.offlinedataanalysis.util.TimeUtil;

/**
 * description:
 * <p>
 * @author:  upuptop
 * <p>
 * qq: 337081267
 * <p>
 * CSDN:   http://blog.csdn.net/pyfysf
 * <p>
 * cnblogs:   http://www.cnblogs.com/upuptop
 * <p>
 * blog:   http://wintp.top
 * <p>
 * email:  pyfysf@163.com
 * <p>
 * time: 2019/08/2019/8/13
 * <p>
 */
public class AnalyserETLTool implements Tool {
    private static final Logger logger = LoggerFactory.getLogger(AnalyserETLTool.class);

    private Configuration conf;

    @Override
    public int run(String[] args) throws Exception {

        initConf(args);

        Job job = Job.getInstance(conf);
        job.setJarByClass(AnalyserETLTool.class);

        job.setMapOutputKeyClass(NullWritable.class);
        job.setMapOutputValueClass(Put.class);

        job.setMapperClass(AnalyserETLMapper.class);

        job.setNumReduceTasks(0);


        //没有reduce方法 但是需要reduce端
        TableMapReduceUtil.initTableReducerJob(EventLogConstant.HBASE_TABLE_NEME_EVENT_LOGS, null, job, null,
                null, null, null, false);

        this.setJobInputPath(job);

        return job.waitForCompletion(true) ? 0 : 1;
    }

    /**
     * 设置文件路径
     *
     * @param job
     * @throws IOException
     */
    public void setJobInputPath(Job job) throws IOException {

        String dateParam = this.conf.get(ConstantValues.LOG_INPUT_PATH_KEY);
        Path inputPath = new Path(dateParam);

        //获取文件系统
        FileSystem fs = null;
        try {
            fs = FileSystem.get(conf);
            if (fs.exists(inputPath)) {
                FileInputFormat.setInputPaths(job, inputPath);
            } else {
                throw new RuntimeException("文件路径不存在：" + dateParam);
            }
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            if (fs != null) {
                fs.close();
            }
        }


        // 可以动态的输入参数 通过fileSystem来获取fs的文件路径
        //FileSystem fs = FileSystem.get(conf);
        //FileInputFormat.setInputPaths(job, new Path("hdfs://node132:9000/log/*"));
        //FileOutputFormat.setOutputPath(job, new Path("hdfs://node132:9000/out/" + System.currentTimeMillis()));
    }


    /**
     * 初始化配置
     *
     * @param args
     */
    private void initConf(String[] args) {
        //判断开发环境
        if (ConstantValues.CURRENT_ENV_TYPE == ConstantValues.SALVER_ENV_TYPE) {
            //    次系统开发
            conf.set("fs.defaultFS", "hdfs://node132:9000");
            //conf.set("yarn.resourcemanager.hostname", "hdfs://node132:9000");
            conf.set("hbase.zookeeper.quorum", "node132,node133,node134");

        } else {
            //    主系统开发
            conf.set("fs.defaultFS", "hdfs://hadoop137:9000");
            //conf.set("yarn.resourcemanager.hostname", "hdfs://node132:9000");
            conf.set("hbase.zookeeper.quorum", "hadoop137,hadoop138,hadoop139");
        }


        // -d 20190802

        String dateParam = "";

        for (int i = 0; i < args.length; i++) {

            if (ConstantValues.DATE_PARAM_SPLiT_TAG.equalsIgnoreCase(args[i])) {
                //    获取下一个参数
                dateParam = args[i + 1];
                break;
            }
        }

        logger.info("AnalyserETLTool  initConf()   参数日期为:   " + dateParam);

        if (StringUtils.isEmpty(dateParam)) {
            //获取昨天的日期
            dateParam = TimeUtil.getYesterday("yyyyMMdd");
        }

        //配置输入输出路径
        String inputPath = "/log/" + dateParam;

        this.conf.set(ConstantValues.LOG_INPUT_PATH_KEY, inputPath);
    }

    public static void main(String[] args) throws Exception {
        //System.setProperty("HADOOP_USER_NAME", "shaofei");

        int status = ToolRunner.run(new AnalyserETLTool(), args);
        System.exit(status);
    }

    @Override
    public void setConf(Configuration configuration) {
        this.conf = HBaseConfiguration.create(configuration);
    }

    @Override
    public Configuration getConf() {
        return this.conf;
    }


}
