package com.hexinfo.dmpro.sparing.async;

import cn.hutool.core.date.DateUtil;
import cn.hutool.core.io.file.FileAppender;
import cn.hutool.core.util.StrUtil;
import com.hexinfo.dmpro.common.utils.LogFileUtil;
import com.hexinfo.dmpro.common.utils.ScanCommonConstants;
import com.hexinfo.dmpro.sparing.dto.DistCPDTO;
import com.hexinfo.dmpro.sparing.dto.StateDataDTO;
import com.hexinfo.dmpro.sparing.dto.TableDistCPDTO;
import com.hexinfo.dmpro.sparing.dto.TableSyncConfDTO;
import com.hexinfo.dmpro.sparing.exception.ExecException;
import com.hexinfo.dmpro.sparing.model.HdfsHeatBackup;
import com.hexinfo.dmpro.sparing.model.HotStandbyTable;
import com.hexinfo.dmpro.sparing.model.TimeFieldMatching;
import com.hexinfo.dmpro.sparing.service.*;
import lombok.AllArgsConstructor;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Lazy;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.annotation.AsyncResult;
import org.springframework.stereotype.Component;

import java.io.File;
import java.util.*;
import java.util.concurrent.Future;

@Component
@Slf4j
@RequiredArgsConstructor(onConstructor_ = {@Lazy, @Autowired})
public class AsyncTableBackup {

    private final LogFileUtil logFileUtil;
    private final TableSyncMethodService tableSyncMethodService;
    private final TimeFieldMatchingService timeFieldMatchingService;
    private final HotStandbyTableService hotStandbyTableService;

    /**
     * grep LS命令shell脚本
     */
    @Value("${hdfs.grepHdfsFiles}")
    private String grepHdfsFiles;
    /**
     * grep LS操作数据存储位置
     */
    @Value("${hdfs.sourceFilePath}")
    private String sourceFilePath;
    /**
     * distcp命令shell脚本
     */
    @Value("${hdfs.heatBackup}")
    private String heatBackup;
    /**
     * 库环境前缀
     */
    @Value("${hdfs.tarPrefix}")
    private String tarPrefix;

    /**
     * 单表热备同步全流程
     */
    @Async("tableBackupThreadPool")
    public Future<Boolean> tableHotStandby(HotStandbyTable hotStandbyTable, TableSyncConfDTO tableSyncConfDTO) {
        try {
            //修改任务状态，获取日志对象
            Map<String, String> map = upTask(hotStandbyTable);
            logFileUtil.appendLog(hotStandbyTable.getHeatLog(),"----------------------时间过滤",false);
            //① 时间过滤（获取表增量数据）
            List<HdfsHeatBackup> hdfsHeatBackups = lsTimeFilter(hotStandbyTable, tableSyncConfDTO);
            upTableState(hotStandbyTable);
            logFileUtil.appendLog(hotStandbyTable.getHeatLog(),"----------------------数据同步",false);
            //② 数据同步（增量数据同步）
            dataSync(hotStandbyTable, hdfsHeatBackups);

            //测试环境还是生产环境
            String dbTbName = hotStandbyTable.getDbTbName();
            if (StrUtil.isNotBlank(tarPrefix)){
                dbTbName = tarPrefix + hotStandbyTable.getDbName()+"."+hotStandbyTable.getTbName();
            }

            logFileUtil.appendLog(hotStandbyTable.getHeatLog(),"----------------------刷目标表",false);
            //③ 刷目标表
            refreshTable(hotStandbyTable,dbTbName);
            logFileUtil.appendLog(hotStandbyTable.getHeatLog(),"----------------------同步删除",false);
            //④ 同步删除（删除时间范围外的日期分区数据、删除源删除的时间范围内的日期分区数据）
            dropDateData(hotStandbyTable,map,dbTbName);
            logFileUtil.appendLog(hotStandbyTable.getHeatLog(),"----------------------数据比对",false);
            //⑤ 数据比对
            tableSyncMethodService.dataCompare(hotStandbyTable,map,dbTbName);
            //修改任务数据
            upTableState(hotStandbyTable);
            return new AsyncResult<>(true);
        }catch (ExecException e){
            upTableStateFail(hotStandbyTable, e.getMessage());
            return new AsyncResult<>(false);
        }catch (Exception e){
            log.error("【异常】单表热备同步全流程");
            log.error(e.getMessage(),e);
            return new AsyncResult<>(false);
        }finally {
            logFileUtil.appendLog(hotStandbyTable.getHeatLog(),"----------------------热备结束----------------------",false);
        }
    }

    /**
     * 修改任务状态，获取日志对象
     */
    public Map<String, String> upTask(HotStandbyTable hotStandbyTable){
        String format = DateUtil.format(DateUtil.date(), "yyyy-MM-dd HH:mm:ss");
        hotStandbyTable.setStartTime(format);
        hotStandbyTable.setTaskState(ScanCommonConstants.ClusterName.JXZ.value);
        hotStandbyTable.setHotStandbyTimeData("【"+ScanCommonConstants.ClusterName.SJGL.value + "】"+ format+"--");
        upTableState(hotStandbyTable);
        FileAppender heatLog = new FileAppender(new File(hotStandbyTable.getLogPath()), 100, true);
        hotStandbyTable.setHeatLog(heatLog);
        //获取表日期分区格式的当前日期、保留N月前日期
        TimeFieldMatching timeFieldMatching = timeFieldMatchingService.queryList(hotStandbyTable.getDatePartition());
        Map<String, String> map = getMonthsAgo(timeFieldMatching.getTimeFieldValue(), hotStandbyTable.getHoldDataMonth(), hotStandbyTable.getTaskDate());
        logFileUtil.appendLog(heatLog,"----------------------热备开始----------------------",true);
        logFileUtil.appendLog(heatLog,"热备同步时间范围："+hotStandbyTable.getSyncRangeStartTime()+" -- "+hotStandbyTable.getSyncRangeEndTime(),true);
        logFileUtil.appendLog(heatLog,"保留策略："+hotStandbyTable.getHoldDataMonth()+"（月）",true);
        logFileUtil.appendLog(heatLog,"N月前日期 -- 当前日期："+map.get("monthsAgo")+" -- "+map.get("monthsNow"),true);
        return map;
    }

    /**
     * 修改数据库状态
     */
    public void upTableStateFail(HotStandbyTable hotStandbyTable,String code){
        if ("10001".equals(code)){
            hotStandbyTable.setTaskState(ScanCommonConstants.ClusterName.GLSB.value);
        }else if ("10002".equals(code)){
            hotStandbyTable.setTaskState(ScanCommonConstants.ClusterName.TBSB.value);
        }else if ("10003".equals(code)){
            hotStandbyTable.setTaskState(ScanCommonConstants.ClusterName.SXSB.value);
        }else if ("10004".equals(code)){
            hotStandbyTable.setTaskState(ScanCommonConstants.ClusterName.SCSB.value);
        }else {
            hotStandbyTable.setTaskState(ScanCommonConstants.ClusterName.BDSB.value);
        }
        upTableState(hotStandbyTable);
    }

    /**
     * 修改数据库状态（加同步锁）
     */
    public synchronized void upTableState(HotStandbyTable hotStandbyTable){
        hotStandbyTableService.updateById(hotStandbyTable);
    }

    /**
     * 时间过滤（获取表增量数据）
     */
    public List<HdfsHeatBackup> lsTimeFilter(HotStandbyTable hotStandbyTable,TableSyncConfDTO tableSyncConfDTO){
        //① 增量数据获取
        Map<String, String> map = hotStandbyTable.gainLsShell(tableSyncConfDTO.getJQHdfsPrefix(), tableSyncConfDTO.getRangeAllDate(), sourceFilePath, grepHdfsFiles);
        int execCode = tableSyncMethodService.execShellCode(map.get("grepLsShell"),hotStandbyTable.getHeatLog());
        //② 时间范围过滤
        List<HdfsHeatBackup> filePath = tableSyncMethodService.gainFileData(tableSyncConfDTO.getRangeStartDate(), tableSyncConfDTO.getRangeEndDate(),
                tableSyncConfDTO.getJQHdfsPrefix(), tableSyncConfDTO.getWGHdfsPrefix(), map.get("filePath"));
        logFileUtil.appendLog(hotStandbyTable.getHeatLog(),"hdfs所有源变更数量："+filePath.size(),true);
        //③ 排除不是这个源的hdfs(无源不需要这一步)
        List<HdfsHeatBackup> hdfsHeatBackups = new ArrayList<>();
        if (hotStandbyTable.getSourceName().equals("无源")){
            logFileUtil.appendLog(hotStandbyTable.getHeatLog(),"源分区值：无源",true);
            hdfsHeatBackups = filePath;
        }else {
            logFileUtil.appendLog(hotStandbyTable.getHeatLog(),"源分区值：" + hotStandbyTable.getSourceValue(),true);
            String[] split = hotStandbyTable.getSourceValue().split(",");
            for (String sourceValue : split) {
                String source = "="+sourceValue;
                for (HdfsHeatBackup hdfsHeatBackup : filePath) {
                    if (hdfsHeatBackup.getSourceAddress().contains(source)){
                        hdfsHeatBackups.add(hdfsHeatBackup);
                    }
                }
            }
        }
        int size = hdfsHeatBackups.size();
        hotStandbyTable.setHdfsNumber(Integer.toString(size));
        upTableTimeData(hotStandbyTable,ScanCommonConstants.ClusterName.SJTB.value);
        logFileUtil.appendLog(hotStandbyTable.getHeatLog(),"hdfs此源变更数量："+size,false);
        return hdfsHeatBackups;
    }

    /**
     * 同步删除（删除时间范围外的日期分区数据、删除源删除的时间范围内的日期分区数据）
     */
    public void dropDateData(HotStandbyTable hotStandbyTable,Map<String, String> map, String dbTbName){
        // ① 删除时间范围外的日期分区数据
        tableSyncMethodService.dropRangeBeyond(hotStandbyTable,map,dbTbName);
        // ② 删除源删除的时间范围内的日期分区数据
        tableSyncMethodService.dropRangeWithin(hotStandbyTable,map,dbTbName);

        upTableTimeData(hotStandbyTable,ScanCommonConstants.ClusterName.SJBD.value);
    }

    /**
     * 刷目标表
     */
    public void refreshTable(HotStandbyTable hotStandbyTable,String dbTbName){
//        //① hive刷新元数据
//        String setSql = "set hive.msck.path.validation=ignore";
//        String sqlMsck = "msck repair table {dbTbName}";
//        String msckSql = sqlMsck.replace("{dbTbName}", dbTbName);
//        tableSyncMethodService.execSqlHive(setSql,msckSql,hotStandbyTable.getHeatLog());
        //① impala刷新元数据
        String alterSql = "alter table {tableName} recover partitions";
        String sqlAlter = alterSql.replace("{tableName}", dbTbName);
        tableSyncMethodService.refreshDataImpala(sqlAlter,hotStandbyTable.getHeatLog());
        //② 刷新impala目标表
        String sql = "invalidate metadata {dbTbName}";
        String refreshSql = sql.replace("{dbTbName}", dbTbName);
        tableSyncMethodService.execSqlImpala(refreshSql,hotStandbyTable.getHeatLog());

        upTableTimeData(hotStandbyTable,ScanCommonConstants.ClusterName.TBSC.value);
    }

    /**
     * 数据同步（增量数据同步）
     */
    public void dataSync(HotStandbyTable hotStandbyTable,List<HdfsHeatBackup> hdfsHeatBackups){
        logFileUtil.appendLog(hotStandbyTable.getHeatLog(),"执行distcp命令：",false);
        for (HdfsHeatBackup hdfsHeatBackup : hdfsHeatBackups) {
            String targetAddress = hdfsHeatBackup.getTargetAddress();
            if (StrUtil.isNotBlank(tarPrefix)){
                targetAddress = targetAddress.replace(hotStandbyTable.getDbName()+".db/", tarPrefix + hotStandbyTable.getDbName()+".db/");
            }
            //完整的distcp的sh脚本命令
            String distcpShell = heatBackup +" "+ hdfsHeatBackup.getSourceAddress() +" "+targetAddress;
            int code = tableSyncMethodService.execDistcpCode(distcpShell, hotStandbyTable.getHeatLog());
        }

        upTableTimeData(hotStandbyTable,ScanCommonConstants.ClusterName.SMBB.value);
    }

    /**
     * 修改数据库状态
     */
    public void upTableTimeData(HotStandbyTable hotStandbyTable,String stage){
        String format = DateUtil.format(DateUtil.date(), "yyyy-MM-dd HH:mm:ss");
        String timeData = hotStandbyTable.getHotStandbyTimeData()+ format + "\n" + "【"+stage + "】"+ format+"--";
        hotStandbyTable.setHotStandbyTimeData(timeData);
    }

    /**
     * 按照日期格式获取N个月前的日期
     */
    private Map<String,String> getMonthsAgo(String timeFieldValue,String holdDataMonth,String taskDate) {
        Date date = DateUtil.parse(taskDate, "yyyyMMdd");
        int monthsNum = Integer.parseInt(holdDataMonth);
        // 计算N个月前的日期
        Date monthsAgo = DateUtil.offsetMonth(date, -monthsNum);
        Map<String,String> map = new HashMap<>();
        if ("yyyyMMdd".equals(timeFieldValue)){
            map.put("monthsAgo",DateUtil.format(monthsAgo, "yyyyMMdd"));
            map.put("monthsNow",DateUtil.format(date, "yyyyMMdd"));
        }else if ("yyyyMM".equals(timeFieldValue)){
            map.put("monthsAgo",DateUtil.format(monthsAgo, "yyyyMM"));
            map.put("monthsNow",DateUtil.format(date, "yyyyMM"));
        }else {
            map.put("monthsAgo",DateUtil.format(monthsAgo, "yyyy"));
            map.put("monthsNow",DateUtil.format(date, "yyyy"));
        }
        return map;
    }
}
