package com.feidee.data.report.dao.impl;

import com.alibaba.fastjson.JSON;
import com.feidee.data.report.constant.Constant;
import com.feidee.data.report.exception.DataDaoException;
import com.feidee.data.report.exception.HiveSqlExecException;
import com.feidee.data.report.exception.MysqlSqlExecException;
import com.feidee.data.report.model.DbReturnModel;
import com.feidee.data.report.model.ReportConfiguration;
import com.feidee.data.report.model.ReportLogModel;
import com.feidee.data.report.model.ReportParam;
import com.feidee.data.report.process.AProcess;
import com.feidee.data.report.process.ProcessResult;
import com.feidee.data.report.util.*;
import com.feidee.data.report.util.SparkMonitor.CheckSqlStatus;
import com.feidee.fdcommon.configuration.CustomConfiguration;
import com.feidee.fdcommon.constant.CommonConstant;
import com.feidee.fdhadoop.logCollect.LogCollectService;
import org.apache.log4j.Logger;

import java.lang.reflect.Method;
import java.sql.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

public abstract class BaseReportQueryDao {

    private static Logger logger = Logger.getLogger(BaseReportQueryDao.class);

    public String driverStr;
    public String urlStr;
    public String usernameStr;
    public String passwordStr;
    public String customUsernameStr;
    public String customPasswordStr;
    public String engineFlag;
    public Map<Integer, Map<String, String>> sparkConf = new HashMap<>();
    // spark环境轮询连接全局计数器
    private static AtomicInteger sparkConNum = new AtomicInteger(0);

    public void query(ReportConfiguration reportconfiguration, ReportParam rp) throws HiveSqlExecException,
            MysqlSqlExecException {

        Connection conn = null;
        Statement stmt = null;
        ResultSet rs = null;

        String reportKey = reportconfiguration.getReport_key();
        String reportIndex = reportconfiguration.getReport_index();
        long enqueueTime = reportconfiguration.getEnqueueTime();
        String runHistory = reportconfiguration.getRunHistory();
        // 日志上报,先设置必要的共有参数
        ReportLogModel logs = new ReportLogModel();
        logs.setReport_key(reportKey);
        logs.setReport_index(reportIndex);
        logs.setEnqueueTime(ToolUtils.getTimeStr(enqueueTime));
        logs.setRunHistory(runHistory);

        try {
            //准确工作
            setup();

            //执行HiveSql
            if (RunFlagController.checkRunFlag(reportKey, Thread.currentThread().getId())) {
                DbReturnModel dbModel = executeSql(reportconfiguration, logs, rp);
                conn = dbModel.getConn();
                stmt = dbModel.getStmt();
                rs = dbModel.getRs();
            }

            //执行Mysql
            if (RunFlagController.checkRunFlag(reportKey, Thread.currentThread().getId())) {
                logger.info("准备将结果写入mysql...");
                writeResToMysql(reportconfiguration, rs, logs);
            } else {
                long curTimestamp = System.currentTimeMillis();
                String costTimeStr = ToolUtils.getCostTimeStr(curTimestamp - enqueueTime);
                logs.setCostTime(costTimeStr);
                logger.info("####### Thread " + Thread.currentThread().getId() + " --> Run KPI: "
                        + reportKey + "_" + reportIndex
                        + " Stop By Others! RunHistorySql:" + runHistory
                        + " ,ExecuteCostTime:" + costTimeStr + " #######");
                logCollectSend(logs, Constant.LOG_STOP, "Stopped by others");
            }

            //执行后续操作
            if (RunFlagController.checkRunFlag(reportKey, Thread.currentThread().getId())) {
                laterOperation(reportconfiguration);
            }

            //清理工作
            cleanup();
        } finally {
            DbOperationUtil.cleanConnEnv(conn, stmt, rs);
        }
    }

    private void recordSQLAndStartMonitor(String runningKey, int curSparkConNum) {
        Constant.RUNNING_SQL.get(curSparkConNum).add(runningKey);
        CheckSqlStatus.startSparkMonitor(curSparkConNum, runningKey);
        logger.info("运行环境: " + curSparkConNum + " 报表SQL: " + runningKey + " 记录运行标志成功！");
    }


    private void removeSQLMonitorFlag(String runningKey, int curSparkConNum) {
        Constant.RUNNING_SQL.get(curSparkConNum).remove(runningKey);
        logger.info("运行环境: " + curSparkConNum + " 报表SQL: " + runningKey + " 移除运行标志成功！");
    }

    /**
     * 准备工作
     */
    public void setup() {

    }

    /**
     * 执行sql获取结果集
     *
     * @param reportconfiguration
     * @param logs
     * @param rp
     * @return
     * @throws HiveSqlExecException
     */
    public DbReturnModel executeSql(ReportConfiguration reportconfiguration, ReportLogModel logs, ReportParam rp)
            throws HiveSqlExecException {
        Connection conn = null;
        Statement stmt = null;
        ResultSet rs = null;
        String isCustomed = reportconfiguration.getIsCustom(); //是否为自定义报表
        String[] sqlList = reportconfiguration.getHqlcontext().split(Constant.SQL_SPLIT);
        String isEnable = reportconfiguration.getIsEnable();
        String reportKey = reportconfiguration.getReport_key();
        String reportIndex = reportconfiguration.getReport_index();
        int k = 0; // 存储当前正在执行sql的索引,当出现异常时,方便确定是哪条sql出现问题
        try {
            //获取数据连接
            conn = getDbConnection(engineFlag, isCustomed, isEnable, rp);
            stmt = conn.createStatement();

            int curSparkConNum = rp.getCurSparkConNum();
            String history = rp.getHistory();
            //根据配置中心配置，确认是否需要将Hive MapJoin方式关闭
            if (Constant.HIVE_ENG.equals(engineFlag) &&
                    "0".equals(CustomConfiguration.getString("hive.mapjoin.switch", "0"))) {
                stmt.execute("SET hive.auto.convert.join=FALSE");
            }

            if ((Constant.HIVE_ENG.equals(engineFlag) && CustomConfiguration.getString
                    ("hive.execution.engine.mr.reportkeys", "").contains(reportKey)
            || "true".equalsIgnoreCase(RunFlagController.reportIndexSQLIsRestartMap.get(reportKey+"_"+reportIndex)))) {
                logger.info("当前报表" + reportKey + "第" + reportIndex + "段正在以MR方式跑...");
                stmt.execute("SET hive.execution.engine=mr");
            }

            if (Constant.HIVE_ENG.equals(engineFlag) &&
                    CustomConfiguration.getString("hive.strict.checks.reportkeys", "").contains(reportKey)) {
                logger.info("当前报表" + reportKey + "第" + reportIndex + "段正在开启非严格模式方式跑...");
                stmt.execute("SET hive.strict.checks.no.partition.filter=FALSE");
                stmt.execute("SET hive.mapred.mode=nonstrict");
            }

            for (k = 0; k < sqlList.length; k++) {
                String timeStr = ToolUtils.getTimeStr();
                String runningKey = reportKey + "_" + reportIndex + "_" + timeStr;
                try {
                    if (ToolUtils.isNotNull(sqlList[k])) {
                        String runSql = "-- " + reportKey + "_" + reportIndex + "\n" + sqlList[k];
                        if (sqlList[k].trim().toLowerCase().startsWith("drop ")) {
                            runSql = sqlList[k];
                        }
                        // 记录SQL执行标志并启动定时任务监控耗时,目前只记录spark引擎
                        if (Constant.SPARK_ENG.equals(engineFlag)) {
                            logger.info("报表: " + reportKey + "_" + reportIndex + " 连接到: " + curSparkConNum
                                    + " 号SPARK环境执行!");
                            recordSQLAndStartMonitor(runningKey, curSparkConNum);
                        } else if (Constant.HIVE_ENG.equals(engineFlag) && "0".equals(history)) {
                            CheckSqlStatus.startHiveMonitor(reportKey, reportIndex, conn);
                        }
                        if (k == sqlList.length - 1) {
                            logger.info("####### Thread " + Thread.currentThread().getId() + " --> Run " + engineFlag
                                    + " SQL:" + runSql);
                            if ("select".equals(reportconfiguration.getOperation())) {
                                rs = stmt.executeQuery(runSql);
                            } else {
                                stmt.execute(runSql);
                            }
                        } else {
                            logger.info("####### Thread " + Thread.currentThread().getId() + " --> Run " + engineFlag
                                    + " SQL:" + runSql);
                            stmt.execute(runSql);
                        }
                        // 移除SQL执行标志,目前只记录spark引擎
                        if (Constant.SPARK_ENG.equals(engineFlag)) {
                            removeSQLMonitorFlag(runningKey, curSparkConNum);
                        }
                        logger.info("run sql finished!runSql:" + runSql);
                    } else {
                        logger.info("####### Thread " + Thread.currentThread().getId() + " --> Run " + engineFlag
                                + " SQL is empty string!!!");
                    }
                } catch (SQLException e) {
                    // 移除SQL执行标志,目前只记录spark引擎
                    if (Constant.SPARK_ENG.equals(engineFlag)) {
                        removeSQLMonitorFlag(runningKey, curSparkConNum);
                    }
                    logger.info("####### Run " + reportKey + "_" + reportIndex + " Failed", e);
                    throw new HiveSqlExecException(e.getMessage());
                }
            }
            // 之所以将k值提到函数生命周期内,是为了在hivesql执行异常时,Exception处理块拿到是哪个hivesql执行出错
            // 如果所有hivesql全部成功执行,后面操作Mysql语句出错,在Exception语句处理块中会使用k去索引sqlList数据
            // 如果此处不减1,会导致数组检索越界
            k -= 1;
        } catch (Exception e) {
            long curTimestamp = System.currentTimeMillis();
            String costTimeStr = ToolUtils.getCostTimeStr(curTimestamp - reportconfiguration.getEnqueueTime());
            logs.setCostTime(costTimeStr);
            logger.error("Run Kpi: " + reportKey + "_" + reportIndex
                    + " Failed! RunHistorySql:" + reportconfiguration.getRunHistory()
                    + " ,ExecuteCostTime:" + costTimeStr);
            logger.error("Run sql:" + sqlList[k] + " ERROR:", e);
            // 日志上报
            String errMsg = "Get a Exception," + engineFlag + "Dao Run hive sql:" + ToolUtils.replaceBlank(sqlList[k])
                    + " MSG:" + ToolUtils.replaceBlank(e.getMessage());
            logCollectSend(logs, Constant.LOG_EXCEPTION, errMsg);
            // 清理局部的ResultSet状态
            DbOperationUtil.cleanConnEnv(conn, stmt, rs);
            throw new HiveSqlExecException(e.getMessage());
        }

        //构造数据库相关信息对象返回
        DbReturnModel dbModel = new DbReturnModel();
        dbModel.setConn(conn);
        dbModel.setStmt(stmt);
        dbModel.setRs(rs);
        logger.info("构造数据库相关信息对象返回");
        return dbModel;
    }

    /**
     * 将结果集写入Mysql中
     *
     * @param reportconfiguration
     * @param rs
     * @param logs
     * @throws MysqlSqlExecException
     */
    public void writeResToMysql(ReportConfiguration reportconfiguration, ResultSet rs, ReportLogModel logs)
            throws MysqlSqlExecException {
        try {
            String reportKey = reportconfiguration.getReport_key();
            String reportIndex = reportconfiguration.getReport_index();
            String engine = reportconfiguration.getEngine();
            // colN记录sql语句结果集的每列属性名
            logger.info("获取mysql表列属性...");
            String[] colN = getColumnNameArray(reportconfiguration.getIsCustom(), rs);
            logger.info("Source Sql executed Metadata Info: " + JSON.toJSONString(colN));

            long writeCnt = 0L; //返回写入Mysql结果表数据量大小
            if (("horizontal".equals(reportconfiguration.getTable_type())) && (colN != null)) {
                writeCnt = horizontalTblOpr(reportconfiguration, rs, colN);
            } else if (("vertical".equals(reportconfiguration.getTable_type())) && (colN != null)) {
                writeCnt = verticalTblOpr(reportconfiguration, rs, colN);
            }

            long curTimestamp = System.currentTimeMillis();
            String costTimeStr = ToolUtils.getCostTimeStr(curTimestamp - reportconfiguration.getEnqueueTime());
            logs.setCostTime(costTimeStr);
            logger.info("####### Thread " + Thread.currentThread().getId() + " --> Run KPI: "
                    + reportKey + " _" + reportIndex
                    + " Success! Write Count:" + writeCnt + " ,RunHistorySql:" + reportconfiguration.getRunHistory()
                    + " ,ExecuteCostTime:" + costTimeStr + " #######");
            if (writeCnt == 0 && Constant.SPARK_ENG.equalsIgnoreCase(engine) && Constant.ENV.equals(CommonConstant
                    .ENV_LIVE)) {
                addMonitorSql(reportKey, reportIndex);
            }
            // 日志上报
            logCollectSend(logs, Constant.LOG_SUCCESS, "", writeCnt);
        } catch (Exception e) {
            long curTimestamp = System.currentTimeMillis();
            String costTimeStr = ToolUtils.getCostTimeStr(curTimestamp - reportconfiguration.getEnqueueTime());
            logs.setCostTime(costTimeStr);
            logger.error("Run Kpi: " + reportconfiguration.getReport_key() + " _"
                    + reportconfiguration.getReport_index() + " Failed! RunHistorySql:"
                    + reportconfiguration.getRunHistory() + " ,ExecuteCostTime:" + costTimeStr);
            logger.error("Run Mysql: " + reportconfiguration.getSqlcontext() + " ERROR:", e);
            // 日志上报
            String errMsg = "Get a Exception," + engineFlag + "Dao Run Mysql:"
                    + ToolUtils.replaceBlank(reportconfiguration.getSqlcontext())
                    + " MSG:" + ToolUtils.replaceBlank(e.getMessage());
            logCollectSend(logs, Constant.LOG_EXCEPTION, errMsg);
            throw new MysqlSqlExecException(e.getMessage());
        }
    }

    private void addMonitorSql(String reportKey, String reportIndex) {
        if (Constant.MONITOR_SQL.containsKey(reportKey)) {
            Constant.MONITOR_SQL.get(reportKey).add(reportIndex);
        } else {
            ArrayList<String> list = new ArrayList<>();
            list.add(reportIndex);
            Constant.MONITOR_SQL.put(reportKey, list);
        }
    }

    /**
     * 结果入库后，后续操作
     *
     * @param reportconfiguration
     */
    public void laterOperation(ReportConfiguration reportconfiguration) {
        try {
            if (ToolUtils.isNotNull(reportconfiguration.getProcess_class())) {
                ProcessResult prs = new ProcessResult();
                String process_class = reportconfiguration.getProcess_class();
                String process_method = "process";
                logger.info("Run Process:" + process_class + "." + process_method);
                try {
                    Class<?> cls = Class.forName(process_class);
                    AProcess obj = (AProcess) cls.newInstance();
                    Method method = cls.getMethod(process_method, Connection.class, ProcessResult.class);
                    method.invoke(obj, getDbConnection(Constant.MYSQL_ENG, "0", null), prs);
                } catch (ClassNotFoundException e) {
                    logger.info(e);
                } catch (Exception e) {
                    logger.info(e);
                    logger.error("DataReporterServiceException kpi:" + reportconfiguration.getReport_key() + ": "
                            + e.getMessage());
                    throw new DataDaoException(e.getMessage());
                }
            }
        } catch (Exception e) {
            // 后台日志打印
            logger.error("Run Kpi: " + reportconfiguration.getReport_key() + "_"
                    + reportconfiguration.getReport_index() + " Failed! ");
            logger.error("Run Later Operation" + " ERROR:", e);
        }
    }

    /**
     * 清理工作
     */
    public void cleanup() {

    }

    /**
     * 横表操作
     *
     * @param reportconfiguration
     * @param rs
     * @param colN
     * @return 返回写入Mysql结果表的数据量大小
     * @throws Exception
     */
    public long horizontalTblOpr(ReportConfiguration reportconfiguration, ResultSet rs, String[] colN)
            throws Exception {
        Connection mysqlcon = null;
        PreparedStatement mysqlstmt = null;
        long writeCnt = 0L;
        String isCRFlag = reportconfiguration.getIsCustom();
        try {
            String mysqlStr = reportconfiguration.getSqlcontext();
            if (ToolUtils.isNotNull(mysqlStr)) {
                mysqlcon = getDbConnection(Constant.MYSQL_ENG, isCRFlag, null);
                logger.info("Run Mysql: " + mysqlStr);
                mysqlstmt = mysqlcon.prepareStatement(mysqlStr);
                mysqlcon.setAutoCommit(false);
            }

            String lowerMysqlStr = mysqlStr.trim().toLowerCase();
            // 拿到列名对应index的集合
            // 如果是自定义报表,并且是update语句,需要查找列名在对应update语句的索引,以便后面设置statement
            // 如果其他语句,则判断mysql语句中是否包含该列,如果不包含，则不加入到索引Map中
            // [暂时屏蔽非自定义报表判断mysql是否包含该列，因为Replace语句如果是替换全部列名,可以不写所有列名]
            Map<String, Integer> indexOfColumnMap = null;
            if ("1".equals(isCRFlag) && lowerMysqlStr.startsWith("update")) {
                indexOfColumnMap = getIndexOfColumn(mysqlStr, colN);
            } else {
                indexOfColumnMap = new HashMap<String, Integer>();
                int index = 1;
                for (int i = 0; i < colN.length; i++) {
                    if ("1".equals(isCRFlag) && !(mysqlStr.contains("`" + colN[i] + "`"))) {
                        continue;
                    }
                    indexOfColumnMap.put(colN[i], index);
                    index++;
                }
            }
            logger.info("Result Metadata Index Info:" + JSON.toJSONString(indexOfColumnMap));

            // 如果是非Update语句，之前是Replace Into语句，现在改为Upsert语句，所以需要更多的值设置
            Set<Map.Entry<String, Integer>> entries = indexOfColumnMap.entrySet();
            int count = indexOfColumnMap.size();
            int replaceRetry = 2;
            for (int i = 0; i < replaceRetry; i++) {
                try {
                    long flag = 0;
                    logger.info("开始遍历写入...");
                    while (rs != null && rs.next()) {
                        flag++;
                        for (Map.Entry<String, Integer> colEntry : entries) {
                            if (ToolUtils.isNotNull(mysqlStr)) {
                                // 将查询结果存储，达到200条结果时进行批处理执行mysql语句
                                mysqlstmt.setString(colEntry.getValue(), rs.getString(colEntry.getKey()));
                                // 如果是upsert语句，则需要进行额外的操作
                                if (!lowerMysqlStr.startsWith("update") && !lowerMysqlStr.startsWith("replace")) {
                                    mysqlstmt.setString(count + colEntry.getValue(), rs.getString(colEntry.getKey()));
                                }
                            }
                        }

                        if (ToolUtils.isNotNull(mysqlStr)) {
                            mysqlstmt.addBatch();
                            printResult(mysqlstmt, reportconfiguration);
                            if (flag > 200) {
                                mysqlstmt.executeBatch();
                                mysqlcon.commit();
                                writeCnt += flag;
                                flag = 0;
                            }
                        }
                    }
                    if (ToolUtils.isNotNull(mysqlStr)) {
                        mysqlstmt.executeBatch();
                        mysqlcon.commit();
                        writeCnt += flag;
                    }
                    break;
                } catch (Exception e) {
                    // 遇到主键冲突异常，将AUTO_INCREMENT更新并重新执行REPLACE语句
                    if (e.getMessage().contains("PRIMARY")) {
                        if (alterAutoIncrement(mysqlcon, reportconfiguration.getReport_key())) {
                            logger.info("Alter AUTO_INCREMENT To MAX(id) Success!");
                        } else {
                            throw e;
                        }
                    } else {
                        throw e;
                    }
                }
            }
        } finally {
            DbOperationUtil.cleanConnEnv(mysqlcon, mysqlstmt);
        }

        return writeCnt;
    }

    private void printResult(PreparedStatement mysqlstmt, ReportConfiguration reportconfiguration) {
        String reportKey = reportconfiguration.getReport_key();
        String reportIndex = reportconfiguration.getReport_index();
        try {
            if (Constant.PRINT_RESULT_SQL.containsKey(reportKey)) {
                Set<String> indexSet = Constant.PRINT_RESULT_SQL.get(reportKey);
                if (indexSet.contains(reportIndex) || indexSet.contains("*")) {
                    logger.info("报表: " + reportKey + "[" + reportIndex + "]的更新结果为: " + mysqlstmt);
                }
            }
        } catch (Exception e) {
            logger.error("报表: " + reportKey + "[" + reportIndex + "]打印结果异常！", e);
        }
    }

    /**
     * 纵表操作
     *
     * @param reportconfiguration
     * @param rs
     * @param colN
     * @return 返回写入Mysql结果表数据量大小
     * @throws Exception
     */
    public long verticalTblOpr(ReportConfiguration reportconfiguration, ResultSet rs, String[] colN) throws Exception {
        Connection mysqlcon = null;
        PreparedStatement mysqlstmt = null;
        long writeCnt = 0;
        try {
            String mysql_keys = reportconfiguration.getMysql_keys();
            if (ToolUtils.isNotNull(mysql_keys)) {
                mysqlcon = getDbConnection(Constant.MYSQL_ENG, "0", null);
                // 拼接sql
                String[] keys = mysql_keys.split(",");

                String sql = "REPLACE INTO " + reportconfiguration.getMysql_table() + "(";
                for (int i = 0; i < keys.length; i++) {
                    if (i == keys.length - 1) {
                        sql += keys[i] + ",quota,value)VALUES(";
                    } else {
                        sql += keys[i] + ",";
                    }
                }
                for (int i = 0; i < keys.length; i++) {
                    if (i == keys.length - 1) {
                        sql += "?,?,?)";
                    } else {
                        sql += "?,";
                    }
                }
                mysqlstmt = mysqlcon.prepareStatement(sql);
                mysqlcon.setAutoCommit(false);

                int flag = 0;
                while (rs != null && rs.next()) {
                    Map<String, String> keyMap = new HashMap<String, String>();
                    for (String key : keys) {
                        keyMap.put(key, rs.getString(key));
                    }

                    for (int i = 0; i < colN.length; i++) {
                        boolean isInkey = false;
                        for (String key : keys) {
                            if (colN[i].equals(key)) {
                                isInkey = true;
                                break;
                            }
                        }
                        if (!isInkey) {
                            flag++;
                            int j = 1;
                            for (; j <= keys.length; j++) {
                                String keyValue = keyMap.get(keys[j - 1]);
                                mysqlstmt.setString(j, keyValue == null ? "" : keyValue);
                            }
                            mysqlstmt.setString(j, colN[i]);
                            mysqlstmt.setString(j + 1, rs.getString(colN[i]));
                            mysqlstmt.addBatch();
                            if (flag > 200) {
                                mysqlstmt.executeBatch();
                                mysqlcon.commit();
                                writeCnt += flag;
                                flag = 0;
                            }
                        }
                    }
                    mysqlstmt.executeBatch();
                    mysqlcon.commit();
                    writeCnt += flag;
                }
            }
        } finally {
            DbOperationUtil.cleanConnEnv(mysqlcon, mysqlstmt);
        }

        return writeCnt;
    }

    /**
     * 获取sql执行结果集列名数组
     *
     * @param rs
     * @return
     */
    public String[] getColumnNameArray(String isCustom, ResultSet rs) throws SQLException {
        String[] colN = null;
        if (rs != null) {
            ResultSetMetaData rsm = rs.getMetaData();
            colN = new String[rsm.getColumnCount()];
            for (int i = 0; i < rsm.getColumnCount(); i++) {
                if (Constant.MYSQL_ENG.equals(engineFlag)) {
                    //  自定义报表结果集别名转换为小写兼容前端SQL别名为大写的情况
                    if (isCustom.equals("1")) {
                        colN[i] = rsm.getColumnLabel(i + 1).toLowerCase();
                    } else {
                        colN[i] = rsm.getColumnLabel(i + 1);
                    }
                } else {
                    if (Constant.SNAPPYDATA_ENG.equalsIgnoreCase(engineFlag)) {
                        colN[i] = rsm.getColumnName(i + 1).toLowerCase();
                    } else {
                        colN[i] = rsm.getColumnName(i + 1);
                    }
                }
                String[] colNArray = colN[i].split("\\.", -1);
                if (colNArray.length >= 2) {
                    colN[i] = colNArray[1];
                }
            }
        }

        return colN;
    }

    /**
     * 日志上报
     *
     * @param logs
     * @param errMsg
     */
    public void logCollectSend(ReportLogModel logs, String resStatus, String errMsg) {
        logs.setStatus(resStatus);
        logs.setTs(TimeUtil.getNow(Constant.LOG_TIME_FORMAT));
        logs.setMsg(errMsg);
        logs.setWriteCnt("0");
        LogCollectService.sendLog(CustomConfiguration.getString(Constant.LOG_APT), CustomConfiguration.getString
                (Constant.LOG_BUS), logs);
    }

    /**
     * 日志上报
     *
     * @param logs
     * @param errMsg
     */
    public void logCollectSend(ReportLogModel logs, String resStatus, String errMsg, long writeCnt) {
        logs.setStatus(resStatus);
        logs.setTs(TimeUtil.getNow(Constant.LOG_TIME_FORMAT));
        logs.setMsg(errMsg);
        logs.setWriteCnt(String.valueOf(writeCnt));
        LogCollectService.sendLog(CustomConfiguration.getString(Constant.LOG_APT), CustomConfiguration.getString
                (Constant.LOG_BUS), logs);
    }

    /**
     * 找出对应update语句中各个列名在语句中的索引
     *
     * @param updateMysql
     * @param colN
     * @return
     */
    public Map<String, Integer> getIndexOfColumn(String updateMysql, String[] colN) {
        Map<String, Integer> indexOfColumnMap = new HashMap<String, Integer>();
        updateMysql = updateMysql.toLowerCase();
        updateMysql = updateMysql.substring(updateMysql.indexOf("set") + 3).replace(" where ", " ").replace(" and ",
                " ");
        Pattern tagPattern = Pattern.compile("[,| ]?(.*?)=\\?[,| ]?");
        Matcher tagMatcher = tagPattern.matcher(updateMysql.replace(";", "").replace("`", "").toLowerCase());
        int index = 1;
        boolean flag = false;
        while (tagMatcher.find()) {
            flag = false;
            String columnName = tagMatcher.group(1).trim();
            for (String col : colN) {
                if (col.equalsIgnoreCase(columnName)) {
                    flag = true;
                    break;
                }
            }
            if (flag) {
                indexOfColumnMap.put(columnName, index);
                index += 1;
            }
        }

        return indexOfColumnMap;
    }

    /**
     * 获取数据库连接
     *
     * @param engine   获取哪种数据库的类型：Hive,Impala,Mysql,Spark
     * @param isCRFlag 如果是获取Mysql数据库类型,还需要判断是敏感库还是非敏感库
     * @param isEnable 如果是0并且是hive操作则用fdbd_rep_custom资源池
     * @return
     * @throws Exception
     */
    public Connection getDbConnection(String engine, String isCRFlag, String isEnable) throws Exception {

        return getDbConnection(engine, isCRFlag, isEnable, null);

    }

    /**
     * 获取数据库连接
     *
     * @param engine   获取哪种数据库的类型：Hive,Impala,Mysql,Spark
     * @param isCRFlag 如果是获取Mysql数据库类型,还需要判断是敏感库还是非敏感库
     * @param isEnable 如果是0并且是hive操作则用fdbd_rep_custom资源池
     * @param rp
     * @return
     * @throws Exception
     */
    public Connection getDbConnection(String engine, String isCRFlag, String isEnable, ReportParam rp) throws
            Exception {
        if (Constant.MYSQL_ENG.equals(engine)) {
            return DbOperationUtil.getMysqlConntion(isCRFlag);
        } else {
            String dbUserName = usernameStr;
            String dbPassword = passwordStr;

            //如果本次报表调度是Hive引擎，并且不是内部调度（内部调用apt为123，key为136AD6D79A084FE7），则使用业务方传过来的查询用户名密码
            if (Constant.HIVE_ENG.equals(engine) && (null != rp) && (null != rp.getApt()) &&
                    (null != rp.getKey()) && (!"123".equals(rp.getApt())) &&
                    (!"136AD6D79A084FE7".equals(rp.getKey()))) {
                dbUserName = rp.getApt();
                dbPassword = rp.getKey();
            } else if (Constant.HIVE_ENG.equals(engine) && "0".equals(isEnable)) { //如果是自定义报表，则使用自定义报表查询用户名密码
                dbUserName = customUsernameStr;
                dbPassword = customPasswordStr;
            }

            if (Constant.SPARK_ENG.equals(engine)) {
                int sparkUrlNum = CustomConfiguration.getInt("spark.url.num");
                // 首先尝试获取当前轮询到的spark连接
                int curSparkConNum = sparkConNum.getAndIncrement() % sparkUrlNum + 1;
                try {
                    Connection sparkConnection = getSparkConnection(curSparkConNum);
                    rp.setCurSparkConNum(curSparkConNum);
                    return sparkConnection;
                } catch (Exception e) {
                    // 若当前轮询到的spark连接异常，则逐个环境连接直到获取到可用连接为止
                    logger.error("Spark 当前轮询到的连接: " + curSparkConNum + " 异常，进行逐个尝试获取连接…………");
                    for (int i = 1; i <= sparkUrlNum; i++) {
                        try {
                            // 获取当前连接spark环境
                            Connection sparkConnection = getSparkConnection(i);
                            rp.setCurSparkConNum(i);
                            return sparkConnection;
                        } catch (Exception e1) {
                            logger.info(engineFlag + "第: " + i + " 个库连接失败！");
                            if (i == sparkUrlNum) {
                                logger.info(engineFlag + "所有环境连接失败…………");
                                throw new Exception("Database connection failed", e1);
                            }
                        }
                    }
                }

                return JdbcManager.getConnection(driverStr, urlStr, dbUserName, dbPassword);
            }

            logger.info(engineFlag + "库连接信息,Driver: " + driverStr + " ,Url: " + urlStr + " ,User: " + dbUserName
                    + " ,Password: " + dbPassword);
            return JdbcManager.getConnection(driverStr, urlStr, dbUserName, dbPassword);
        }
    }

    private Connection getSparkConnection(int i) throws DataDaoException {
        Map<String, String> sparkEnvConf = sparkConf.get(i);
        String url = sparkEnvConf.get("url");
        String username = sparkEnvConf.get("username");
        String password = sparkEnvConf.get("password");
        Connection connection = JdbcManager.getConnection(driverStr, url, username, password);
        logger.info(engineFlag + "第: " + i + " 个库连接信息,Driver: " + driverStr + " ,Url: " + url +
                " ,User: " + username + " ,Password: " + password);
        return connection;
    }

    /**
     * 修改AUTO_INCREMENT至MAX(id),避免主键冲突问题导致数据REPLACE失败
     *
     * @param connection
     * @param tableName
     * @return
     */
    private boolean alterAutoIncrement(Connection connection, String tableName) {

        Statement statement = null;
        ResultSet resultSet = null;
        try {
            String selectMaxId = "SELECT MAX(id) FROM `" + tableName + "`";
            statement = connection.createStatement();
            resultSet = statement.executeQuery(selectMaxId);
            resultSet.next();
            long maxId = resultSet.getLong(1);
            logger.info(tableName + "'s MAX(id) Is:" + maxId);

            String alterAutoIncrement = "ALTER TABLE `" + tableName + "` AUTO_INCREMENT = " + (maxId + 1);
            statement.execute(alterAutoIncrement);

            return true;
        } catch (SQLException e) {
            logger.error("Alter AUTO_INCREMENT To MAX(id) Failed!", e);
        } finally {
            try {
                JdbcManager.destroy(null, statement, resultSet);
            } catch (DataDaoException e) {
                logger.error("释放JDBC资源失败!", e);
            }
        }

        return false;
    }

}
