package com.hexinfo.dmpro.sparing.service.impl;

import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.io.file.FileAppender;
import cn.hutool.core.io.file.FileReader;
import cn.hutool.core.io.file.FileWriter;
import cn.hutool.core.text.StrSpliter;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.hexinfo.dmpro.common.model.MessageCenter;
import com.hexinfo.dmpro.common.service.MessageCenterService;
import com.hexinfo.dmpro.common.utils.CommonConstants;
import com.hexinfo.dmpro.common.utils.ConnExecuteUtil;
import com.hexinfo.dmpro.common.utils.ScanCommonConstants;
import com.hexinfo.dmpro.component.comparison.entity.TableContrastParamEntity;
import com.hexinfo.dmpro.component.comparison.entity.TableContrastResultEntity;
import com.hexinfo.dmpro.component.comparison.enums.TableContrastEnums;
import com.hexinfo.dmpro.component.comparison.service.ITableContrastService;
import com.hexinfo.dmpro.sparing.model.HdfsHeatBackup;
import com.hexinfo.dmpro.sparing.model.HdfsPathAndSizedto;
import com.hexinfo.dmpro.sparing.service.ScriptExecService;
import com.hexinfo.dmpro.sparing.service.ScriptExecTimeService;
import com.hexinfo.dmpro.sparing.service.ThreadPoolService;
import liquibase.pro.packaged.E;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;

import javax.sql.DataSource;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.sql.Connection;
import java.sql.Statement;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;

/**
 * 脚本执行
 */
@Service
@Slf4j
public class ScriptExecServiceImpl implements ScriptExecService {

    @Value("${hdfs.refreshTable}")
    private String refreshTable;
    @Value("${heatBackup.logPath}")
    private String logPath;
    @Value("${hdfs.hdfsIpPort}")
    private String hdfsIpPort;
    @Value("${hdfs.getHdfsFiles}")
    private String getHdfsFiles;
    @Value("${hdfs.sourceFilePath}")
    private String sourceFilePath;
    @Value("${hdfs.heatBackup}")
    private String heatBackup;

    @Autowired
    private ITableContrastService iTableContrastService;

    @Autowired
    private MessageCenterService messageCenterService;

    @Autowired
    private ThreadPoolService threadPoolService;

    @Autowired
    private ScriptExecTimeService scriptExecTimeService;

    @Autowired
    @Qualifier("connPoolHiveSource")
    private DataSource connPoolSource;

    @Autowired
    @Qualifier("connPoolHiveTarget")
    private DataSource connPoolTarget;

    //查询状态Map集合
    private Map<String, MessageCenter> messageMap = new ConcurrentHashMap<>();

    private Set<String> tableNameSet = new HashSet<>();

    @Override
    public Map<String, MessageCenter> getMessageMap(){
        return this.messageMap;
    }

    @Override
    public void setTableNameSet(Set<String> tableNameSet){
        this.tableNameSet = tableNameSet;
    }

    /**
     * 异步执行shell脚本
     */
    @Override
    public void execSh(List<HdfsHeatBackup> hdfsHeatBackups, String hiveSql,HdfsHeatBackup hdfsHeatBackup,Date date) {

        log.info("----------------"+hdfsHeatBackup.getSourceName()+"表distcp开始-------------------");
        //插入日志文件缓存操作
        FileAppender heatlog = new FileAppender(new File(path(
                hdfsHeatBackup.getSourceData()+"/"+hdfsHeatBackup.getSourceData())), 20, true);
        heatlog.append(appendLogStr(hdfsHeatBackup.getSourceName()+"表热备开始--hdfs数量："+hdfsHeatBackups.size()));
        try{
            String strPath = logPath.replace("{date}", DateUtil.format(DateUtil.date(), "yyyyMMdd"))
                                    +hdfsHeatBackup.getSourceData()+"/"
                                    +ScanCommonConstants.ClusterName.HEATBACKUPPATH.value+hdfsHeatBackup.getSourceName()+".txt";
            String ss = "";
            List<HdfsHeatBackup> hdfsHeatBackups1 = null;
            if (hdfsHeatBackup.getSyncRange() == null || hdfsHeatBackup.getSyncRange() == 0){
                heatlog.append(appendLogStr("【全量LS开始】"));
                ss = "全量";
                hdfsHeatBackups1 = getTimeHdfsFile(hdfsHeatBackups,date,heatlog,"1");
                heatlog.append(appendLogStr("【全量LS结束】"));
            }else {
                heatlog.append(appendLogStr("【增量LS开始】"));
                ss = "增量";
                hdfsHeatBackups1 = getTimeHdfsFile(hdfsHeatBackups,date,heatlog,"2");
                heatlog.append(appendLogStr("【增量LS结束】"));
            }
            log.info("---------------"+hdfsHeatBackup.getSourceName()+"表的分区条件："+hiveSql);
            //热备类型 2：时间窗口, 1：按源
            if (hdfsHeatBackup.getBackupSort().equals("2")){
                ss = "--" + ss + "--时间窗口--";
                scriptExecTimeService.execShTime(hdfsHeatBackups1,hiveSql,hdfsHeatBackup,strPath,ss);
            }else {
                ss = "--" + ss + "--按源--";
                execShSource(hdfsHeatBackups1,hiveSql,hdfsHeatBackup,strPath,ss);
            }
            heatlog.append(appendLogStr(hdfsHeatBackup.getSourceName()+"表热备结束"));
        }catch (Exception e){
            heatlog.append(appendLogStr("----------------【热备类型分类执行异常】："+e.getMessage()));
            log.error("--------按表热备hdfs失败，报错信息："+e.getMessage());
        }finally {
            heatlog.flush();   //刷入文件中
        }
    }

    /**
     * 按源热备比对
     */
    public void execShSource(List<HdfsHeatBackup> hdfsHeatBackups, String hiveSql,HdfsHeatBackup hdfsHeatBackupModel,
                             String strPath,String ss) throws Exception {
        //插入日志文件缓存操作
        appendLog(strPath,hdfsHeatBackupModel.getSourceName() +"表【"+ss+"】distcp开始，HDFS地址个数："
                +hdfsHeatBackups.size()+"，分区条件："+hiveSql);
        MessageCenter messageCenter = addMessageCenter(ScanCommonConstants.ClusterName.HEATBACKUP.value
                + ss + hdfsHeatBackupModel.getSourceName(), strPath);
        String strType = "执行成功";
        String sizes = "文件大小排序："+"\n";
        for (HdfsHeatBackup hdfsHeatBackup : hdfsHeatBackups){
            sizes += hdfsHeatBackup.getFileSize()+"\n";
            //第一步：热备到目标地址
            threadPoolService.execShellCodes(hdfsHeatBackup,strPath);
        }
        appendLog(strPath,sizes);
        //第二步：刷新目标表
        int e = ConnExecuteUtil.execShellCode(refreshTable+" "+hdfsHeatBackupModel.getTargetName());
//        log.info("----------------"+hdfsHeatBackupModel.getSourceName()+"刷新表结果："+e);
//        appendLog(strPath,"刷新目标库表结果："+e+"\n");
        //修改执行状态
        messageCenter.setDataType(strType);
        updataMessageCenter(messageCenter);
        //比对消息中心添加
//        String contrastPath = logPath.replace("{date}", DateUtil.format(DateUtil.date(), "yyyyMMdd"))
//                +ScanCommonConstants.ClusterName.CONTRASTPATH.value+hdfsHeatBackupModel.getSourceName()+".txt";
//        appendLog(contrastPath,"【表数据比对开始】"+"\n");
//        MessageCenter messageCenter1 = addMessageCenter(ScanCommonConstants.ClusterName.CONTRAST.value
//                + ss + hdfsHeatBackupModel.getSourceName(),contrastPath);
//        messageMap.put(TableContrastEnums.WARM.getValue()+"."+hdfsHeatBackupModel.getSourceName(),messageCenter1);
//        boolean bool = iTableContrastService.createTableContrastTask(createModel(hdfsHeatBackupModel,hiveSql));
    }

    /**
     * 源库和目标库比对信息
     * @param hdfsHeatBackup
     * @return
     */
    private TableContrastParamEntity createModel(HdfsHeatBackup hdfsHeatBackup, String hiveSql) throws Exception{
        TableContrastParamEntity tableContrastParam = new TableContrastParamEntity();
        tableContrastParam.setContrastType(TableContrastEnums.WARM.getValue());
        tableContrastParam.setSourceDbName(hdfsHeatBackup.getSourceDbName());
        tableContrastParam.setSourceTableName(hdfsHeatBackup.getSourceTableName());
        tableContrastParam.setSourceQueryCriteria(hiveSql);
        tableContrastParam.setTargetDbName(hdfsHeatBackup.getTargetDbName());
        tableContrastParam.setTargetTableName(hdfsHeatBackup.getTargetTableName());
        tableContrastParam.setTargetQueryCriteria(hiveSql);
        return tableContrastParam;
    }

    /**
     * 比对返回值添加进信息表
     */
    @Override
    public String getHeatBackupResult(String tableFlag) {
        try {
            TableContrastResultEntity tableContrastResultEntity = iTableContrastService
                    .queryTableContrastResult(tableFlag);
            if (!tableContrastResultEntity.getStatus().equals(TableContrastEnums.RUNNING.getValue())){
                MessageCenter messageCenter = messageMap.get(tableFlag);
                String strType = (tableContrastResultEntity.getStatus().equals(TableContrastEnums.ACHIEVE.getValue()))?"执行成功":"执行失败";
                appendLog(messageCenter.getLogPath(),"比对信息："+tableContrastResultEntity.getFalseMessage()+"\n");
                messageCenter.setDataType(strType);
                updataMessageCenter(messageCenter);
                appendLog(messageCenter.getLogPath(),"【数据比对结束】状态："+strType+"\n");
                return tableFlag;
            }else {
                return "";
            }
        }catch (Exception e){
            return "";
        }
    }

    /**
     * 比对任务超时强制结束
     */
    @Override
    public void timeoutEnd() {
        int i = tableNameSet.size();
        Set<String> stringSet = new HashSet<>();
        while (stringSet.size() != i){
            //动态map，使用迭代器遍历
            Iterator<Map.Entry<String, MessageCenter>> iterator = messageMap.entrySet().iterator();
            while (iterator.hasNext()) {
                Map.Entry<String, MessageCenter> entry = iterator.next();
                if (!stringSet.contains(entry.getKey())){
                    MessageCenter messageCenter = entry.getValue();
                    if (messageCenter != null && messageCenter.getDataType() != null
                            && messageCenter.getDataType().equals("进行中")){
                        iTableContrastService.cancelTableContrastTask(entry.getKey());
                        appendLog(messageCenter.getLogPath(),"比对超时强制结束"+"\n");
                        messageCenter.setDataType("执行失败");
                        updataMessageCenter(messageCenter);
                    }
                    stringSet.add(entry.getKey());
                }
                try {
                    Thread.sleep(1000);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }
        }
        log.info("-----------------------比对任务超时任务全部强制结束完成------------------------");
    }

    /**
     * 消息中心表数据添加
     * @return
     */
    private MessageCenter addMessageCenter(String messageSub,String logPath) {
        MessageCenter messageCenter = new MessageCenter();
        messageCenter.setMenu("数据热备");
        messageCenter.setOperator("admin");
        messageCenter.setDataType("进行中");
        messageCenter.setMessageSubject(messageSub);
        messageCenter.setMessageWeight("1");
        messageCenter.setLogPath(logPath);
        messageCenterService.save(messageCenter);
        return messageCenter;
    }

    /**
     * 消息中心表数据修改
     * @return
     */
    private void updataMessageCenter(MessageCenter messageCenter) {
        messageCenterService.updateById(messageCenter);
    }

    /**
     * 热备日志追加进文件
     * @return
     */
    private void appendLog(String strPath,String str) {
        FileWriter writer = new FileWriter(strPath);
        writer.append("["+DateUtil.format(DateUtil.date(), "yyyy-MM-dd HH:mm:ss")+"]"
                +str+"\n");
    }

    public void appendLogs(String name,String str) {
        String strPath = logPath.replace("{date}", DateUtil.format(DateUtil.date(), "yyyyMMdd"))
                + name + ".txt";
        FileWriter writer = new FileWriter(strPath);
        writer.append("[" + DateUtil.format(DateUtil.date(), "yyyy-MM-dd HH:mm:ss") + "]"
                + str + "\n");
    }

    private String path(String name) {
        return logPath.replace("{date}", DateUtil.format(DateUtil.date(), "yyyyMMdd"))
                + name + ".txt";
    }

    private String appendLogStr(String str) {
        return "["+DateUtil.format(DateUtil.date(), "yyyy-MM-dd HH:mm:ss")+"]" +str;
    }

    /**
     * 过滤上次热备时间前更新的hdfs
     * @return
     */
    public List<HdfsHeatBackup> getTimeHdfsFile(List<HdfsHeatBackup> hdfsHeatBackups,Date date,FileAppender heatlog
            ,String type) {
        int size = hdfsHeatBackups.size();
        Date dateD = DateUtil.date();
        //上次热备时间+3小时
        Date newDate3 = DateUtil.offsetHour(date, -1);
        //① ls获取到的文件数据添加进文件中
        HdfsHeatBackup hdfsHeatBackup = hdfsHeatBackups.get(0);
        String sourceTableName = hdfsHeatBackup.getSourceTableName();
        String tableHdfsPath = (StrSpliter.split(hdfsHeatBackup.getSourceAddress(),sourceTableName,true,true)
                .get(0)+sourceTableName)
                .replace(hdfsIpPort,"");                                   //到表的hdfs路径
        String filePath = sourceFilePath+hdfsHeatBackup.getSourceNameType()+".txt";  //存储文件路径
        String pathOrCommand = getHdfsFiles+" "+tableHdfsPath+" "+filePath;          //shell命令
        int code = ConnExecuteUtil.execShellCode(pathOrCommand);
        log.info("{}命令执行结果：{}", pathOrCommand, code);
        heatlog.append(appendLogStr(pathOrCommand + "命令执行结果：" + code));
        // ② 筛选文件中获取符合规则的hfds的Url
        List<HdfsPathAndSizedto> hdfsPathAndSizedtos = readTimeFile(filePath,tableHdfsPath,newDate3,dateD,heatlog);
        if (ObjectUtil.isEmpty(hdfsPathAndSizedtos)){
            heatlog.append(appendLogStr(hdfsHeatBackup.getSourceName()+"无符合条件的文件、目录"));
            return new ArrayList<HdfsHeatBackup>();
        }
        //全量无需排序
        if (type.equals("1")){
            return hdfsHeatBackups;
        }
        //文件大小赋值
        hdfsHeatBackups.forEach(entityA -> {
            for (HdfsPathAndSizedto entityB : hdfsPathAndSizedtos) {
                if (entityA.getSourceAddress().equals(entityB.getHdfsPath())) {
                    entityA.setFileSize(entityB.getFileSize());
                    break;
                }
            }
        });
        //属性为空则排除
        List<HdfsHeatBackup> filteredEntities = hdfsHeatBackups.stream()
                .filter(entityA -> entityA.getFileSize()==null || entityA.getFileSize()==0) // 使用StringUtils.isBlank()方法判断字段a是否为空
                .collect(Collectors.toList());
        //按大小降序获取hdfs路径
        List<HdfsHeatBackup> HdfsHeatBackupList = filteredEntities.stream()
                .sorted(Comparator.comparing(HdfsHeatBackup::getFileSize).reversed())
                .collect(Collectors.toList());
        // ③ 筛选相同源HFDS路径的数据
//        List<HdfsHeatBackup> heatBackupTimes = hdfsHeatBackups.stream()
//                .filter(heatBa -> hdfsUrlTimes.stream()
//                        .anyMatch(hdfsUrlTime -> heatBa.getSourceAddress().equals(hdfsUrlTime)))
//                .collect(Collectors.toList());
        heatlog.append(appendLogStr(hdfsHeatBackup.getSourceName()
                +"--表初始Hdfs文件数量"+size+ "--过滤后Hdfs文件数量："+HdfsHeatBackupList.size()));
        return HdfsHeatBackupList;
    }

    /**
     * 读取文件获取符合规则的hfds的Url
     * filePath 文件读取路径
     * pathName 到表的HDFS的路径
     * newDate3 上传热备时间-3小时
     * @return
     */
    private List<HdfsPathAndSizedto> readTimeFile(String filePath, String pathName, Date newDate3
            , Date dateD,FileAppender heatlog) {
        FileReader fileReader = new FileReader(filePath);
        List<HdfsPathAndSizedto> hdfsPathAndSizedtos = new ArrayList<>();
        HdfsPathAndSizedto hdfsPathAndSizedto = new HdfsPathAndSizedto();
        List<String> hdfsPaths = fileReader.readLines();
        if (ObjectUtil.isEmpty(hdfsPaths)) {
            heatlog.append(appendLogStr(pathName+"【hdfs路径不存在】"));
            return hdfsPathAndSizedtos;
        }
        for (String hdfsPath : hdfsPaths) {
            boolean fileType = StrUtil.startWith(hdfsPath, "-");
            //处理类型为文件
            if (fileType){
                int i = hdfsPath.indexOf(pathName);
                String times = "2023-09-15 00:00:00";
                String subTime = hdfsPath.substring(i - 17, i-1);
                String str = hdfsPath.substring(0, i - 18);
                String strSize = str.substring(str.lastIndexOf(" ")+1,str.length());
                if (DateUtil.isIn(DateUtil.parse(subTime),dateD,DateUtil.parse(times))) {
                    String subPath = hdfsPath.substring(i, hdfsPath.length());
                    String ss = hdfsIpPort+subPath;
                    hdfsPathAndSizedto.setHdfsPath(ss.substring(0,ss.lastIndexOf("/")));
                    hdfsPathAndSizedto.setFileSize(Long.parseLong(strSize));
                    hdfsPathAndSizedtos.add(hdfsPathAndSizedto);
                }
            }
        }
        return hdfsPathAndSizedtos;
    }

}