package top.wintp.offlinedataanalysis.anlyser.mr.nu;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.MultipleColumnPrefixFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import top.wintp.offlinedataanalysis.anlyser.dim.StatsUserDimension;
import top.wintp.offlinedataanalysis.anlyser.value.map.TimeOutputValue;
import top.wintp.offlinedataanalysis.common.ConstantValues;
import top.wintp.offlinedataanalysis.common.EventLogConstant;
import top.wintp.offlinedataanalysis.util.StringUtils;
import top.wintp.offlinedataanalysis.util.TimeUtil;

/**
 * @author: upuptop
 * <p>
 * qq: 337081267
 * <p>
 * CSDN:   http://blog.csdn.net/pyfysf
 * <p>
 * cnblogs:   http://www.cnblogs.com/upuptop
 * <p>
 * blog:   http://wintp.top
 * <p>
 * email:  pyfysf@163.com
 * <p>
 * time: 2019/8/23
 * <p>
 */
public class NewInstallUserTools implements Tool {
    private static final Logger logger = LoggerFactory.getLogger(NewInstallUserTools.class);

    //配置
    private Configuration conf;

    @Override
    public int run(String[] args) throws Exception {
        //初始化配置
        initConfig();
        //解析参数 日期参数
        parserArgs(args);

        String date = this.conf.get(ConstantValues.PARAM_DATE_KEY);

        if (StringUtils.isEmpty(date)) {
            logger.info("NewInstallUserTools  parserArgs()   没有时间参数 ");
            return -1;
        }

        //获取任务对象
        Job job = Job.getInstance(this.conf);
        //配置运行的jar
        job.setJarByClass(NewInstallUserTools.class);
        //配置scan对象
        Scan scan = getScan();
        //使用 TableMapReduceUtil 配置Mapper
        TableMapReduceUtil.initTableMapperJob(EventLogConstant.HBASE_TABLE_NEME_EVENT_LOGS, scan,
                NewInstallUserMapper.class, StatsUserDimension.class, TimeOutputValue.class, job);

        //配置reduce
        job.setReducerClass(NewInstallUserReducer.class);
        //配置reduce的输出kv类型
        job.setOutputKeyClass(StatsUserDimension.class);
        job.setOutputValueClass(TimeOutputValue.class);

        //配置输出类 outputformat
        job.setOutputFormatClass(MySqlOutputFormat.class);

        //提交任务
        return job.waitForCompletion(true) ? 0 : 1;
    }

    /**
     * 获取scan
     * 封装scan  添加过滤器  开始-结束行
     *
     * @return
     */
    private Scan getScan() {
        //获取日期参数 yyyy-MM-dd
        String startRow = this.conf.get(ConstantValues.PARAM_DATE_KEY);
        //转为毫秒值
        startRow = String.valueOf(TimeUtil.parseString2Long(startRow, "yyyyMMdd"));

        //结束行为 开始毫秒+一天的毫秒
        String stopRow = String.valueOf(Long.valueOf(startRow) + ConstantValues.DAY_OF_MILLISECONDS);

        Scan scan = new Scan(Bytes.toBytes(startRow), Bytes.toBytes(stopRow));

        //构建过滤器
        scan.setFilter(buildFilter());

        return scan;
    }

    /**
     * 构建过滤器
     * <p>
     * 分别添加了 单列值为固定值的过滤器和指定列名查询的过滤器
     *
     * @return
     */
    private Filter buildFilter() {
        FilterList filterList = new FilterList();

        //创建列过滤器 仅仅分析launch事件  添加列值过滤器  对应的列族 对应的列 比较规则 值
        SingleColumnValueFilter singleColumnValueFilter = new SingleColumnValueFilter(Bytes.toBytes(EventLogConstant.EVENT_LOGS_FAMILY_NAME),
                Bytes.toBytes(EventLogConstant.LOG_COLUMN_NAME_EVENT),
                CompareFilter.CompareOp.EQUAL, Bytes.toBytes(EventLogConstant.EventEnum.LAUNCH.alias));
        filterList.addFilter(singleColumnValueFilter);

        //字段过滤器 仅仅查询有用的字段
        String[] columns = {
                EventLogConstant.LOG_COLUMN_NAME_SERVER_TIME,
                EventLogConstant.LOG_COLUMN_NAME_BROWSER_NAME,
                EventLogConstant.LOG_COLUMN_NAME_BROWSER_VERSION,
                EventLogConstant.LOG_COLUMN_NAME_EVENT,
                EventLogConstant.LOG_COLUMN_NAME_UUID,
                EventLogConstant.LOG_COLUMN_NAME_PLATFORM,

        };


        byte[][] bytes = new byte[columns.length][];


        for (int i = 0; i < columns.length; i++) {
            String column = columns[i];
            bytes[i] = Bytes.toBytes(column);
        }

        MultipleColumnPrefixFilter multipleColumnPrefixFilter = new MultipleColumnPrefixFilter(bytes);
        filterList.addFilter(multipleColumnPrefixFilter);

        return filterList;
    }

    /**
     * 解析参数
     * <p>
     * 输入的时间参数必须是 yyyy-MM-dd
     *
     * @param args
     */
    private void parserArgs(String[] args) {
        //获取时间参数 yyyy-MM-dd形式

        if (args == null || args.length == 0) {
            this.conf.set(ConstantValues.PARAM_DATE_KEY, TimeUtil.getYesterday());
            return;
        }

        //    获取时间参数
        String date = "";
        for (int i = 0; i < args.length; i++) {
            if (ConstantValues.DATE_PARAM_SPLiT_TAG.equalsIgnoreCase(args[i])) {
                date = args[i + 1];
                break;
            }
        }


        if (!TimeUtil.isValidateRunningDate(date)) {
            logger.info("NewInstallUserTools  parserArgs()   时间参数格式应为: yyyyMMdd   ");
            return;
        }

        logger.info("NewInstallUserTools  parserArgs()   输入的时间参数为:   {}", date);

        //输入的时间参数必须是 yyyy-MM-dd
        this.conf.set(ConstantValues.PARAM_DATE_KEY, date);
    }

    /**
     * 初始化配置信息
     */
    private void initConfig() {

        //    设置连接hbase的属性
        this.conf.set("hbase.zookeeper.quorum", "node132,node133,node134");
        //hdfs的配置
        this.conf.set("fs.defaultFS", "hdfs://node132:9000");

        if (ConstantValues.CURRENT_ENV_TYPE == ConstantValues.LEADER_ENV_TYPE) {
            //    设置连接hbase的属性
            this.conf.set("hbase.zookeeper.quorum", "hadoop137,hadoop138,hadoop139");
            //hdfs的配置
            this.conf.set("fs.defaultFS", "hdfs://hadoop137:9000");
        }

    }

    @Override
    public void setConf(Configuration conf) {
        this.conf = HBaseConfiguration.create(conf);
    }

    @Override
    public Configuration getConf() {
        return this.conf;
    }

    public static void main(String[] args) throws Exception {
        ToolRunner.run(new NewInstallUserTools(), args);
    }

}
