package org.jeecg.desen.run.component.hive;

import cn.hutool.core.date.DateField;
import cn.hutool.core.date.DatePattern;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import com.alibaba.fastjson.JSON;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.jeecg.common.api.vo.Result;
import org.jeecg.desen.cretab.service.DesensHiveCreateTableService;
import org.jeecg.desen.error.AbstractErrorService;
import org.jeecg.desen.file.DFSFileMigrate;
import org.jeecg.desen.run.component.DscConversionService;
import org.jeecg.desen.run.entity.DscSscFieldDetail;
import org.jeecg.desen.run.entity.DscSscSource;
import org.jeecg.desen.run.entity.DscSscTableDetail;
import org.jeecg.modules.meta.entity.SysTableinfo;
import org.jeecg.modules.meta.service.ISysTableinfoService;
import org.jeecg.modules.pubmgr.entity.Paramcode;
import org.jeecg.modules.pubmgr.service.IParamcodeService;
import org.jeecg.modules.rule.service.IRuleService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;

import javax.sql.DataSource;
import java.io.File;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.*;

@Slf4j
public class AbstractDscHiveConversionService extends AbstractErrorService implements DscHiveConversionService {

    Date startDate;
    Date endDate;
    String pro_num;

    @Autowired
    ISysTableinfoService iSysTableinfoService;

    @Autowired
    IRuleService iRuleService;

    @Autowired
    @Qualifier("hiveDruidDataSource")
    DataSource druidDataSource;

    @Autowired
    IParamcodeService iParamcodeService;

    private final static String DES_PATH = "des_path";//路径分类
    private final static String SHELL_PATH = "shell_path";//sheill脚本路径
    private final static String DES_DFS_PATH = "des_dfs_path";//脱敏库dfs文件路径
    private final static String TARGET_DFS_PATH = "target_dfs_path";//目标库dfs文件路径


    @Override
    public DscConversionService bindParam(String pro_num, Date date) {
        this.pro_num = pro_num;
        this.startDate = date;
        return this;
    }

    @Override
    public DscConversionService bindParam(String pro_num, Date startDate, Date endDate) {
        this.pro_num = pro_num;
        this.startDate = startDate;
        this.endDate = endDate;
        return this;
    }

    @Override
    public Result conversionDB(DscSscTableDetail dscSscTableDetail) throws Exception {
        log.info("脱敏表-hive开始执行:" + JSON.toJSONString(dscSscTableDetail));

        //验证数据正确性,是否可以执行脱敏处理
        String error = verification(dscSscTableDetail);
        if (StringUtils.isNotEmpty(error)) {
            return Result.error(error);
        }

        //生成分区集合，循环分区执行语句
        List<Date> loadDtList = getLoadDtList(dscSscTableDetail.getFdType());

        //执行分区语句
        return runLoadDTList(dscSscTableDetail, loadDtList);
    }

    private Result runLoadDTList(DscSscTableDetail dscSscTableDetail, List<Date> loadDtList) {
        for (Date date : loadDtList) {
            if (false == runHive(dscSscTableDetail, date)) {
                return Result.error("执行hivesql语句异常！");
            }
            //文件迁移 表名称 库名称 分区日期
            String dfsStr = dfsMigrate(dscSscTableDetail, date);
            if (StringUtils.isNotEmpty(dfsStr)) {//迁移失败
                return Result.error(dfsStr);
            }
        }

        return Result.ok(true);
    }

    private boolean runHive(DscSscTableDetail dscSscTableDetail, Date date) {
        //根据表信息转换成sql
        String hiveSql = getHiveSql(dscSscTableDetail, date);

        log.info("hive - sql=" + hiveSql);

        //执行sql
        boolean blean = executeHive(hiveSql);

        log.info("hive - 执行完成，结果=" + blean);
        if (blean == false) {
            recordError("执行错误:hive - sql", hiveSql);
        }
        return blean;
    }

    private List<Date> getLoadDtList(String type) {
        List<Date> loadDtList = new ArrayList<>();
        if (null == endDate) {
            loadDtList.add(startDate);
            return loadDtList;
        }

        if (DateUtil.formatDate(endDate).equals(DateUtil.formatDate(startDate))) {
            loadDtList.add(startDate);
            return loadDtList;
        }

        return getDateFieldType(type);
    }

    private List<Date> getDateFieldType(String type) {
        try {
            if ("1".equals(type) || "5".equals(type)) {
                return getDayBetween();
            } else if ("2".equals(type) || "6".equals(type)) {
                return getMonthBetween(startDate, endDate);
            } else if ("3".equals(type) || "7".equals(type)) {
                return getYearBetween(startDate, endDate);
            }

        } catch (ParseException e) {
            log.info("执行日期区间获取异常:", e);
        }
        return new ArrayList<>();
    }

    private List<Date> getDayBetween() {
        List<Date> returnDates = new ArrayList<>();
        List<DateTime> dateTimes = DateUtil.rangeToList(startDate, endDate, DateField.DAY_OF_YEAR);
        for (DateTime dateTime : dateTimes) {
            returnDates.add(dateTime);
        }
        return returnDates;
    }

    private static List<Date> getMonthBetween(Date minDate, Date maxDate) throws ParseException {
        ArrayList<Date> result = new ArrayList<>();

        Calendar min = Calendar.getInstance();
        Calendar max = Calendar.getInstance();

        min.setTime(minDate);
        min.set(min.get(Calendar.YEAR), min.get(Calendar.MONTH), 1);

        max.setTime(maxDate);
        max.set(max.get(Calendar.YEAR), max.get(Calendar.MONTH), 2);

        Calendar curr = min;
        while (curr.before(max)) {
            result.add(curr.getTime());
            curr.add(Calendar.MONTH, 1);
        }

        return result;
    }

    private static List<Date> getYearBetween(Date minDate, Date maxDate) throws ParseException {
        List<Date> result = getMonthBetween(minDate, maxDate);
        List<Date> retAry = new ArrayList<>();
        Map<String, String> yearMap = new HashMap<>();
        for (Date date : result) {
            String year = DateUtil.formatDate(date).substring(0, 4);
            if (yearMap.containsKey(year)) {
                continue;
            }
            yearMap.put(year, "");
            retAry.add(date);
        }

        return retAry;
    }

    private String verification(DscSscTableDetail dscSscTableDetail) {
        //验证实体
        String error = verificationEntity(dscSscTableDetail);
        if (StringUtils.isNotEmpty(error)) {
            return error;
        }
        //验证数据库关联数据是否存在
        error = verificationAssDB(dscSscTableDetail);
        if (StringUtils.isNotEmpty(error)) {
            return error;
        }
        return null;
    }

    /**
     * 验证实体信息是否缺失
     **/
    private String verificationEntity(DscSscTableDetail dscSscTableDetail) {
        if (dscSscTableDetail == null) {
            return recordError("验证-脱敏数据为空!");
        }

        if (dscSscTableDetail.getFieldDetailList() == null || dscSscTableDetail.getFieldDetailList().size() == 0) {
            return recordError("验证-脱敏表对应的字段为空ssid=" + dscSscTableDetail.getSscid() + ",表id!" + dscSscTableDetail.getFdTableId());
        }
        return null;
    }

    /**
     * 验证关联数据，表名称，规则函数是否存等
     **/
    private String verificationAssDB(DscSscTableDetail dscSscTableDetail) {
        //验证表名称 并赋值
        String error = verificationAssDBTbaleName(dscSscTableDetail);
        if (StringUtils.isNotEmpty(error)) {
            return recordError(error);
        }

        //验证字段脱敏规则对应的hive函数是否存在 如果为空则不做处理。
        error = verificationAssDBFieldRule(dscSscTableDetail);
        if (StringUtils.isNotEmpty(error)) {
            return recordError(error);
        }

        return null;
    }

    private String verificationAssDBTbaleName(DscSscTableDetail dscSscTableDetail) {
        SysTableinfo sysTableinfo = getSysTableinfoById(dscSscTableDetail.getFdTableId());
        if (null == sysTableinfo) {
            return recordError("验证-表id不存在,表id=" + dscSscTableDetail.getFdTableId());
        }
        //验证表id准确性
        String tableName = sysTableinfo.getFdTablename();
        if (StringUtils.isEmpty(tableName)) {
            return recordError("验证-表名称不存在,表id=" + dscSscTableDetail.getFdTableId());
        }
        dscSscTableDetail.setFdTableName(tableName);

        //验证表的schema
        String schema = sysTableinfo.getFdSchema();
        if (StringUtils.isEmpty(schema)) {
            return recordError("验证-表schema不存在,表id=" + dscSscTableDetail.getFdTableId());
        }
        dscSscTableDetail.setFdSchema(schema);

        return null;
    }

    private String verificationAssDBFieldRule(DscSscTableDetail dscSscTableDetail) {
        String errorStr = null;
        for (DscSscFieldDetail dscSscFieldDetail : dscSscTableDetail.getFieldDetailList()) {
            if (StringUtils.isEmpty(dscSscFieldDetail.getFdRulename()) ||
                    StringUtils.isEmpty(dscSscFieldDetail.getFdRulelevel())) {
                continue;
            }
            String algorithm = iRuleService.getAlgorithmByRuleType(dscSscFieldDetail.getFdRulename(), dscSscFieldDetail.getFdRulelevel());
            if (StringUtils.isEmpty(algorithm)) {
                errorStr = recordError("验证-表字段脱敏规则对应的数据库函数不存在=" + dscSscTableDetail.getFdTableId());
                break;
            }
            //设置脱敏函数
            dscSscFieldDetail.setFdAlgorithm(algorithm);
        }

        return errorStr;
    }


    /**
     * 获取hiveSql
     */
    private String getHiveSql(DscSscTableDetail dscSscTableDetail, Date date) {
        StringBuilder sqlBuilder = new StringBuilder();
        sqlBuilder.append(" insert overwrite table ");
        sqlBuilder.append(DesensHiveCreateTableService.schema + "." + dscSscTableDetail.getFdSchema() + "_" + dscSscTableDetail.getFdTableName());

        if (StringUtils.isNotEmpty(dscSscTableDetail.getFdLoaddt())) {
            sqlBuilder.append(getPartitionInsertHiveSql(dscSscTableDetail, date));
        }

        sqlBuilder.append(" select ");
        boolean isFg = false;
        for (DscSscFieldDetail dscSscFieldDetail : dscSscTableDetail.getFieldDetailList()) {
            if (isFg) {
                sqlBuilder.append(",");
            } else {
                isFg = true;
            }
            if (StringUtils.isNotEmpty(dscSscFieldDetail.getFdAlgorithm())) {
                sqlBuilder.append(getHiveAlgorithm(dscSscFieldDetail.getFdAlgorithm(), dscSscFieldDetail.getFdFieldname()));
            } else {
                sqlBuilder.append(dscSscFieldDetail.getFdFieldname());
            }
        }

        sqlBuilder.append(getPartitionField(dscSscTableDetail));

        sqlBuilder.append(" from " + dscSscTableDetail.getFdSchema() + "." + dscSscTableDetail.getFdTableName());

        sqlBuilder.append(" where 1=1 ");
        if (StringUtils.isNotEmpty(dscSscTableDetail.getFdWhere())) {
            sqlBuilder.append(dscSscTableDetail.getFdWhere());
        }

        sqlBuilder.append(getPartitionWhereHiveSql(dscSscTableDetail, date));

        return sqlBuilder.toString();
    }

    private String getPartitionField(DscSscTableDetail dscSscTableDetail) {
        if (dscSscTableDetail.getFdType().equals("5") ||
                dscSscTableDetail.getFdType().equals("6") ||
                dscSscTableDetail.getFdType().equals("7")) {
            return "," + dscSscTableDetail.getFdLoaddt2();
        }
        return " ";
    }

    private String getPartitionInsertHiveSql(DscSscTableDetail dscSscTableDetail, Date date) {
        if (dscSscTableDetail.getFdType().equals("4")) {//非分区表直接退出
            return "";
        }

        String dateFmt = getPartitionOneValue(dscSscTableDetail.getFdType(), date);
        StringBuilder partitionStr = new StringBuilder();
        partitionStr.append(" PARTITION (" + dscSscTableDetail.getFdLoaddt() + "='" + dateFmt + "'");
        if (dscSscTableDetail.getFdType().equals("5") || dscSscTableDetail.getFdType().equals("6") || dscSscTableDetail.getFdType().equals("7")) {
            partitionStr.append("," + dscSscTableDetail.getFdLoaddt2());
        }
        partitionStr.append(")");
        return partitionStr.toString();
    }

    private String getPartitionWhereHiveSql(DscSscTableDetail dscSscTableDetail, Date date) {
        if (dscSscTableDetail.getFdType().equals("4")) {//非分区表直接退出
            return "";
        }

        String dateFmt = getPartitionOneValue(dscSscTableDetail.getFdType(), date);

        return " and " + dscSscTableDetail.getFdLoaddt() + "='" + dateFmt + "'";
    }

    private String getPartitionOneValue(String fdType, Date date) {
        String dateFmt = DateUtil.format(date, DatePattern.PURE_DATE_PATTERN);
        switch (fdType) {
            case "1"://日分区
                break;
            case "2"://月分区
                dateFmt = dateFmt.substring(0, 6);
                break;
            case "3"://年分区
                dateFmt = dateFmt.substring(0, 4);
                break;
            case "5"://日分区二级分区
                break;
            case "6"://月分区二级分区
                dateFmt = dateFmt.substring(0, 6);
                break;
            case "7"://年分区二级分区
                dateFmt = dateFmt.substring(0, 4);
                break;
        }
        return dateFmt;
    }

    private String getHiveAlgorithm(String algorithm, String fieldName) {
        return algorithm.replaceAll("#F", fieldName);
    }

    private SysTableinfo getSysTableinfoById(String tableId) {
        return iSysTableinfoService.getTableInfoById(tableId);
    }

    private boolean executeHive(String sql) {
        try {
            druidDataSource.getConnection().createStatement().execute(sql);
        } catch (SQLException e) {
            recordError("error-rundes-executehivesql:sql =" + sql, 2, e);
            return false;
        }
        return true;
    }

    private String dfsMigrate(DscSscTableDetail dscSscTableDetail, Date date) {
        String logStr = "文件迁移操作:表名称=" + dscSscTableDetail.getFdSchema() + dscSscTableDetail.getFdTableName() +
                "日期=" + DateUtil.formatDate(date);
        log.info(logStr);

        //获取shell脚本路径和脱敏库文件地址
        List<String> pathAry = getMigratePathAry(dscSscTableDetail, date);
        if (null == pathAry || pathAry.size() < 1) {
            return "获取文件迁移相关路径异常!";
        }

        String source = "UAT";//dscSscTableDetail.getFdSource(); 暂时去掉

        List<DscSscSource> dscSscSources = getListSources(source);
        String dfsMigStr = "";
        for (DscSscSource dscSscSource : dscSscSources) {
            log.info("脱敏地址=" + JSON.toJSONString(dscSscSource));
            String targetDFSIP = dscSscSource.getDfsFilePath();
            dfsMigStr = DFSFileMigrate.dfsMigrate(pathAry.get(0), targetDFSIP, dscSscTableDetail.getFdSchema(), dscSscTableDetail.getFdTableName(),
                    getPartitionOneValue(dscSscTableDetail.getFdType(), date), dscSscTableDetail.getFdType(), dscSscTableDetail.getFdLoaddt());
            if (StringUtils.isNotEmpty(dfsMigStr)) {
                break;
            }
        }
        //暂时不做线程池处理 记录文件迁移进度，如果性能有问题在用线程池异常处理状态
        /*FileHiveDFSExecutor.execute(logStr, new Thread() {
            @Override
            public void run() {

            }
        });*/
        return dfsMigStr;
    }

    private List<DscSscSource> getListSources(String source) {
        List<DscSscSource> sscSources = new ArrayList<>();
        if (StringUtils.isEmpty(source)) {
            return sscSources;
        }
        String[] souArys = source.split(",");
        for (int nt = 0; nt < souArys.length; nt++) {
            DscSscSource dscSscSource = new DscSscSource();
            dscSscSource.setSourceAdr(souArys[nt]);
            dscSscSource.setDfsFilePath(getParamVal(souArys[nt], "operssc_source"));
            sscSources.add(dscSscSource);
        }
        return sscSources;
    }

    private List<String> getMigratePathAry(DscSscTableDetail dscSscTableDetail, Date date) {
        List<String> pathList = new ArrayList<>();
        //获取参数 shell脚本路径
        String shellPath = getParamVal(SHELL_PATH, DES_PATH);
        if (StringUtils.isEmpty(shellPath)) {
            recordError("shell脚本路径为空！");
            return pathList;
        }
        pathList.add(shellPath);

       /* String desDfs = getParamVal(DES_DFS_PATH, DES_PATH);
        if (StringUtils.isEmpty(desDfs)) {
            recordError("脱敏dfs文件路径为空！");
            return pathList;
        }
        pathList.add(desDfs + getDesTableDFSDatePath(dscSscTableDetail, date));*/
        return pathList;
    }

    private String getDesTableDFSDatePath(DscSscTableDetail dscSscTableDetail, Date date) {
        String path = File.separator;
        path += DesensHiveCreateTableService.schema + ".db" + File.separator;
        path += dscSscTableDetail.getFdSchema() + "_" + dscSscTableDetail.getFdTableName() + File.separator;
        path += dscSscTableDetail.getFdLoaddt() + "=" + getFileNameByProNum(date);
        path += File.separator;
        return path;
    }

    private String getTargetTableDFSDatePath(DscSscTableDetail dscSscTableDetail, Date date) {
        String path = File.separator;
        path += dscSscTableDetail.getFdSchema() + ".db" + File.separator;
        path += dscSscTableDetail.getFdTableName() + File.separator;
        path += dscSscTableDetail.getFdLoaddt() + "=" + getFileNameByProNum(date);
        path += File.separator;
        return path;
    }

    private String getFileNameByProNum(Date date) {
        String fileName = DateUtil.format(date, DatePattern.PURE_DATE_PATTERN);
        if (pro_num.equals("1")) {
            return fileName;
        } else if (pro_num.equals("2")) {
            return fileName.substring(0, 6);
        } else if (pro_num.equals("3")) {
            return fileName.substring(0, 6);
        } else if (pro_num.equals("4")) {
            return fileName.substring(0, 4);
        }
        return fileName;
    }

    //todo ls 需要增加缓存
    private String getParamVal(String key, String parterKey) {
        Paramcode paramcode = iParamcodeService.getParamPreByCode(parterKey, key);
        if (null == paramcode) {
            return null;
        }

        return paramcode.getFdParameterprice();
    }

}
