package top.wintp.offlinedataanalysis.anlyser.mr.nu;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.MultipleColumnPrefixFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import cn.hutool.core.convert.Convert;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.ArrayUtil;
import cn.hutool.core.util.StrUtil;
import top.wintp.offlinedataanalysis.anlyser.dim.StatsUserDimension;
import top.wintp.offlinedataanalysis.anlyser.mr.MySqlOutputFormat;
import top.wintp.offlinedataanalysis.anlyser.value.map.TimeOutputValue;
import top.wintp.offlinedataanalysis.anlyser.value.reduce.MapWritableValue;
import top.wintp.offlinedataanalysis.common.ConstantValues;
import top.wintp.offlinedataanalysis.common.EventLogConstant;
import top.wintp.offlinedataanalysis.util.TimeUtil;

/**
 * 用户基本信息之新增用户分析：
 * 主要分析事件为launch的数据，launch事件是指第一次点击
 *
 * @author: pyfysf
 * <p>
 * @qq: 337081267
 * <p>
 * @CSDN: http://blog.csdn.net/pyfysf
 * <p>
 * @blog: http://wintp.top
 * <p>
 * @email: pyfysf@163.com
 * <p>
 * @time: 2019/9/5
 */
public class NewInstallUserRunner implements Tool {
    private static final Logger logger = LoggerFactory.getLogger(NewInstallUserRunner.class);
    /**
     * 参数日期的格式
     */
    private static final String PARAM_DATE_PETTERN = "yyyyMMdd";
    /**
     * job任务运行失败的code
     */
    private static final int RUN_JOB_ERROR_CODE = -1;
    /**
     * scan的startRow存在conf里面的key
     */
    private static final String SCAN_START_ROW_KEY = "scan_start_row_key";
    /**
     * scan的stopRow存在conf里面的key
     */
    private static final String SCAN_STOP_ROW_KEY = "scan_stop_row_key";

    /**
     * HBase的列族名称
     */
    private static final byte[] HBASE_TABLE_FAMILY_NAME = Bytes.toBytes(EventLogConstant.EVENT_LOGS_FAMILY_NAME);

    private Configuration conf;

    public static void main(String[] args) {
        try {
            int status = ToolRunner.run(new Configuration(), new NewInstallUserRunner(), args);
            System.exit(status);
        } catch (Exception e) {
            e.printStackTrace();
            logger.info("NewInstallUserRunner  main()   任务运行失败   ");
        }
    }

    @Override
    public int run(String[] args) throws Exception {
        try {
            //初始化参数
            initArgs(args);
        } catch (Exception e) {
            e.printStackTrace();
            return RUN_JOB_ERROR_CODE;
        }
        //创建job对象
        Job job = Job.getInstance(this.conf, "newInstallUserJob");
        //设置运行的类
        job.setJarByClass(NewInstallUserRunner.class);
        //设置mapper
        TableMapReduceUtil.initTableMapperJob(EventLogConstant.HBASE_TABLE_NEME_EVENT_LOGS, getScan(),
                NewInstallUserMapper.class, StatsUserDimension.class, TimeOutputValue.class, job, false);

        //设置reducer
        job.setReducerClass(NewInstallUserReducer.class);
        job.setOutputKeyClass(StatsUserDimension.class);
        job.setOutputValueClass(MapWritableValue.class);

        //设置OutputFormat
        job.setOutputFormatClass(MySqlOutputFormat.class);

        int status = job.waitForCompletion(true) ? 0 : 1;
        return status;
    }

    /**
     * 初始化参数
     *
     * @param args
     */
    private void initArgs(String[] args) throws Exception {
        //是否是空
        if (ArrayUtil.isEmpty(args)) {
            throw new Exception("参数为空");
        }

        String date = null;
        for (int i = 0; i < args.length; i++) {
            if (ConstantValues.DATE_PARAM_SPLiT_TAG.equalsIgnoreCase(args[i])) {
                date = args[i + 1];
                break;
            }
        }

        //参数为空默认为昨天的数据分析
        if (StrUtil.isBlank(date)) {
            date = DateUtil.yesterday().toString(PARAM_DATE_PETTERN);
        }

        //判断输入的参数是否符合日期格式
        if (!TimeUtil.isValidateRunningDate(date)) {
            throw new Exception(StrUtil.format("输入的参数不是规范的日期格式:{}", PARAM_DATE_PETTERN));
        }

        logger.info("NewInstallUserRunner  initArgs()   date:   " + date);

        //    转换为开始时间
        String startTime = Convert.toStr(DateUtil.parse(date).getTime());
        //    添加一天的数据
        String endTime = Convert.toStr(DateUtil.offsetDay(DateUtil.parse(date), 1).getTime());

        //    添加数据到配置中
        this.conf.set(SCAN_START_ROW_KEY, startTime);
        this.conf.set(SCAN_STOP_ROW_KEY, endTime);

    }

    /**
     * 组合查询条件
     *
     * @return
     */
    private Scan getScan() {
        //获取开始时间和结束时间
        String startRow = this.conf.get(SCAN_START_ROW_KEY);
        String stopRow = this.conf.get(SCAN_STOP_ROW_KEY);

        Scan scan = new Scan(Bytes.toBytes(startRow), Bytes.toBytes(stopRow));

        FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);

        //单列值相同过滤器 只查询事件为launch的数据
        filterList.addFilter(new SingleColumnValueFilter(HBASE_TABLE_FAMILY_NAME,
                Bytes.toBytes(EventLogConstant.LOG_COLUMN_NAME_EVENT),
                CompareFilter.CompareOp.EQUAL, Bytes.toBytes(EventLogConstant.EventEnum.LAUNCH.alias)));

        //多列数据过滤器
        String[] multiColumn = {
                EventLogConstant.LOG_COLUMN_NAME_SERVER_TIME,
                EventLogConstant.LOG_COLUMN_NAME_EVENT,
                EventLogConstant.LOG_COLUMN_NAME_UUID,
                EventLogConstant.LOG_COLUMN_NAME_PLATFORM,
                EventLogConstant.LOG_COLUMN_NAME_BROWSER_NAME,
                EventLogConstant.LOG_COLUMN_NAME_VERSION,
        };

        filterList.addFilter(buildMultiColumnFilter(multiColumn));

        scan.setFilter(filterList);
        return scan;
    }

    /**
     * 构建多列过滤器
     *
     * @param multiColumn
     * @return
     */
    private MultipleColumnPrefixFilter buildMultiColumnFilter(String[] multiColumn) {
        byte[][] byts = new byte[multiColumn.length][];

        for (int i = 0; i < multiColumn.length; i++) {
            byts[i] = Bytes.toBytes(multiColumn[i]);
        }

        return new MultipleColumnPrefixFilter(byts);

    }

    @Override
    public void setConf(Configuration conf) {
        this.conf = HBaseConfiguration.create(conf);
        //初始化配置信息
        initConf();
    }

    /**
     * 初始化配置信息
     */
    private void initConf() {
        //    判断运行环境
        if (ConstantValues.CURRENT_ENV_TYPE == ConstantValues.SALVER_ENV_TYPE) {
            //    备份系统机运行
            this.conf.set("hbase.zookeeper.quorum", "node132,node133,node134");
            this.conf.set("hdfs.defaultFs", "hdfs://node132:9000");

        } else if (ConstantValues.CURRENT_ENV_TYPE == ConstantValues.LEADER_ENV_TYPE) {
            //    主系统机运行
            this.conf.set("hbase.zookeeper.quorum", "hadoop137,hadoop138,hadoop139");
            this.conf.set("hdfs.defaultFs", "hdfs://hadoop137:9000");
        }

        //    添加个人配置
        this.conf.addResource("person_conf.xml");
        this.conf.addResource("pro_env.xml");
    }

    @Override
    public Configuration getConf() {
        return this.conf;
    }
}
