package com.hexinfo.dmpro.sparing.service.impl;


import cn.hutool.core.date.DateField;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.io.file.FileAppender;
import cn.hutool.core.io.file.FileReader;
import cn.hutool.core.text.StrSpliter;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.hexinfo.dmpro.common.model.MessageCenter;
import com.hexinfo.dmpro.common.service.MessageCenterService;
import com.hexinfo.dmpro.common.utils.ConnExecuteUtil;
import com.hexinfo.dmpro.common.utils.ScanCommonConstants;
import com.hexinfo.dmpro.sparing.dto.HdfsDistcpDTO;
import com.hexinfo.dmpro.sparing.dto.TableDistCPDTO;
import com.hexinfo.dmpro.sparing.dto.TableHandDTO;
import com.hexinfo.dmpro.sparing.dto.TbHdfsDistcpDTO;
import com.hexinfo.dmpro.sparing.model.HdfsHeatBackup;
import com.hexinfo.dmpro.sparing.model.HdfsPathAndSizedto;
import com.hexinfo.dmpro.sparing.service.HandBackupService;
import com.hexinfo.dmpro.sparing.service.HdfsDistCPService;
import lombok.Cleanup;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import javax.sql.DataSource;
import java.io.File;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.*;
import java.util.stream.Collectors;

@Service
@Slf4j
public class HandBackupServiceImpl implements HandBackupService {

    @Value("${hdfs.heatBackup}")
    private String heatBackup;
    @Value("${heatBackup.handBackupPath}")
    private String logPath;          //日志打印路径

    @Value("${hdfs.handBackupFilePath}")
    private String handFilePath;           //LS操作数据存储位置
    @Value("${hdfs.grepHdfsFiles}")
    private String grepHdfsFiles;      //LS命令shell脚本文件存放路径

    @Autowired
    @Qualifier("connPoolHiveTarget")
    private DataSource connPoolHiveTarget;
    @Autowired
    @Qualifier("connPoolImpalaTarget")
    private DataSource connPoolImpalaTarget;

    @Autowired
    private MessageCenterService messageCenterService;

    @Override
    public void handBackup(TableHandDTO tableHandDTO) {
        String id = tableHandDTO.getId();
        String souDbTbName = tableHandDTO.getSouDbTbName();
        String tarDbTbName = tableHandDTO.getTarDbTbName();
        String logPath = path(tableHandDTO.getFileName());
        FileAppender heatlog = new FileAppender(new File(logPath), 100, true); //日志
        logLog(heatlog,souDbTbName+"表--手动同步开始",true);
        try {
            //--------------------------------判空---------------------------------
            if (tableHandDTO.getHdfsDistcpDTOS() == null || tableHandDTO.getHdfsDistcpDTOS().size()==0){
                logLog(heatlog,"hdfs数量为0，不进行同步，请检查",true);
                messageCenterChange(id,ScanCommonConstants.ClusterName.YC.value,logPath);
                return;
            }

            //-------------------------------① LS过滤-------------------------
            boolean lsState = lsFilter(tableHandDTO, heatlog);
            if (!lsState){
                messageCenterChange(id,ScanCommonConstants.ClusterName.YWC.value,logPath);
                return;
            }
            logLog(heatlog,"【LS过滤完成】",true);

            //-------------------------------② 删表建表-------------------------
            boolean createState = dropCreateTable(souDbTbName, tarDbTbName, heatlog);
            if (!createState){
                messageCenterChange(id,ScanCommonConstants.ClusterName.YC.value,logPath);
                return;
            }
            logLog(heatlog,"【删表建表完成】",true);

            //-------------------------------③ distcp同步-------------------------
            logLog(heatlog,souDbTbName+"表同步的hdfs路径数量："+tableHandDTO.getHdfsDistcpDTOS().size(),true);
            for (HdfsDistcpDTO hdfsDistcpDTO : tableHandDTO.getHdfsDistcpDTOS()) {
                String shellTar = heatBackup + " " + hdfsDistcpDTO.getSouToTar();
                //第二步：源--》目标
                int b = ConnExecuteUtil.execShellCode(shellTar);
                if (b == 999){
                    logLog(heatlog,"dsitcp执行错误shell：" + shellTar,true);
                }
            }
            logLog(heatlog,"【数据同步完成】",true);

            //-------------------------------④ 刷新hive+刷新impala-------------------------
            Connection connHiveTar = connPoolHiveTarget.getConnection();
            refreshTableHive(connHiveTar, tarDbTbName,heatlog);
            Connection connImpalaTar = connPoolImpalaTarget.getConnection();
            refreshTableImpala(connImpalaTar, tarDbTbName,heatlog);
            logLog(heatlog,"【刷新hive、impala表完成】",true);

            //-------------------------------⑤ 数据比对-------------------------
//            lsFilterSouService.dataContrast(sourceDataName,heatlog,ruleToDistCPDTO.getWhereScriptDTOS());

            logLog(heatlog,"【手动同步完成】",true);
            messageCenterChange(id,ScanCommonConstants.ClusterName.YWC.value,logPath);
        }catch (Exception e){
            logLog(heatlog,"获取连接池连接异常："+e,true);
            messageCenterChange(id,ScanCommonConstants.ClusterName.YC.value,logPath);
        }finally {
            logLog(heatlog,"【同步结束】",true);
            logLog(heatlog,"",false);  //刷进日志
        }
    }

    /**
     * 删表建表
     * @return
     */
    private boolean dropCreateTable(String souDbTbName,String tarDbTbName,FileAppender heatlog) {
        try {
            Connection connHive = connPoolHiveTarget.getConnection();    //获取连接
            return createTable(connHive, souDbTbName, tarDbTbName, heatlog);
        }catch (Exception e) {
            logLog(heatlog, "获取连接池连接异常：" + e, true);
            return false;
        }
    }

    /**
     * LS过滤
     * @return
     */
    private boolean lsFilter(TableHandDTO tableHandDTO,FileAppender heatlog) {
        int size = tableHandDTO.getHdfsDistcpDTOS().size();
        //判断是不是增量同步
        if (size > 1){
            addLsFilter(tableHandDTO, heatlog);
            if (ObjectUtil.isEmpty(tableHandDTO.getHdfsDistcpDTOS())){
                logLog(heatlog, "无符合文件，不进行热备操作", true);
                return false;
            }else {
                int size1 = tableHandDTO.getHdfsDistcpDTOS().size();
                logLog(heatlog,"hdfs数量初始："+size+"--过滤后hdfs数量:"+size1 +"】",true);
            }
        }else {
            logLog(heatlog, "hdfs数量初始："+size+"--不进行LS过滤", true);
        }
        return true;
    }

    /**
     * 修改这一次消息中心状态为已完成
     * @return
     */
    private void messageCenterChange(String id,String state,String logPath) {
        MessageCenter mess = messageCenterService.getById(id);
        mess.setDataType(state);
        mess.setLogPath(logPath);
        messageCenterService.updateById(mess);
    }

    /**
     * 获取总日志路径
     *
     * @param fileName
     * @return
     */
    private String path(String fileName) {
        return logPath.replace("{date}", DateUtil.format(DateUtil.date(), "yyyyMMdd"))
                + fileName + ".txt";
    }

    /**
     * 源删表建表
     * @return
     */
    private boolean createTable(Connection connection, String souName, String tarName, FileAppender heatlog) {
        try{
            @Cleanup
            Statement st = connection.createStatement();
            //删表
            try{
                String dropSql = "drop table if exists {tarName}";
                String dropSql1 = dropSql.replace("{tarName}", tarName);
                logLog(heatlog,"删除表sql："+dropSql1,true);
                st.execute(dropSql1);
            }catch (Exception e){
                logLog(heatlog,"删除表异常："+e,true);
            }
            //建表
            String createSql = "create table {tarName} like {souName}";
            String createSql1 = createSql.replace("{tarName}", tarName)
                    .replace("{souName}", souName);
            logLog(heatlog,"建表sql："+createSql1,true);
            st.execute(createSql1);
            st.close();
            return true;
        }catch (Exception e){
            logLog(heatlog,"建表异常："+e,true);
            return false;
        }finally {
            try {
                if (connection != null) {
                    connection.close();
                }
            } catch (Exception e) {
                logLog(heatlog,"关闭连接异常："+e,true);
            }
        }
    }

    /**
     * 刷新hive表
     * @return
     */
    private void refreshTableHive(Connection connection,String name,FileAppender heatlog) {
        try{
            String sql = "msck repair table {tableName}";
            String msckSql = sql.replace("{tableName}", name);
            @Cleanup
            Statement st = connection.createStatement();
            st.execute(msckSql);
            logLog(heatlog,"刷新hive表sql："+msckSql,true);
        }catch (Exception e){
            logLog(heatlog,"刷新hive表异常："+e,true);
        }finally {
            try {
                if (connection != null) {
                    connection.close();
                }
            } catch (Exception e) {
                logLog(heatlog,"关闭hive连接异常："+e,true);
            }
        }
    }

    /**
     * 刷新impala表
     * @return
     */
    private void refreshTableImpala(Connection connection,String name,FileAppender heatlog) {
        try{
            String sql = "refresh {tableName}";
            String refreshSql = sql.replace("{tableName}", name);
            @Cleanup
            Statement st = connection.createStatement();
            st.execute(refreshSql);
            logLog(heatlog,"刷新impala表sql："+refreshSql,true);
        }catch (Exception e){
            logLog(heatlog,"刷新impala表异常："+e,true);
        }finally {
            try {
                if (connection != null) {
                    connection.close();
                }
            } catch (Exception e) {
                logLog(heatlog,"关闭impala连接异常："+e,true);
            }
        }
    }

    /**
     * 加锁，防止并发问题
     * @param heatlog
     * @param logStr
     * @param state
     */
    private synchronized void logLog(FileAppender heatlog, String logStr ,boolean state) {
        if (state){
            heatlog.append(logTime(logStr));
        }else {
            heatlog.flush();     //刷进日志中
        }
    }

    /**
     * 日志加日期时间
     * @param str
     * @return
     */
    private String logTime(String str) {
        return "["+DateUtil.format(DateUtil.date(), "yyyy-MM-dd HH:mm:ss")+"]" +str;
    }

    /**
     * 增量--LS操作过滤hdfs路径
     * @return
     */
    private TableHandDTO addLsFilter(TableHandDTO tableHandDTO, FileAppender heatlog) {
        //java8，复制一个List
        List<HdfsDistcpDTO> hdfsDistcpDTOS = tableHandDTO.getHdfsDistcpDTOS().stream().collect(Collectors.toList());
        //获取符合规则的hfds的Url和文件大小
        List<HdfsPathAndSizedto> hdfsPathAndSizedtos = getHdfsData(tableHandDTO,heatlog);
        if (ObjectUtil.isEmpty(hdfsPathAndSizedtos)) {
            tableHandDTO.setHdfsDistcpDTOS(new ArrayList<HdfsDistcpDTO>());
            return tableHandDTO;
        }
        //同hdfs路径的保留
        List<HdfsDistcpDTO> hdfsDistcpDTOs = hdfsDistcpDTOS.stream()
                .filter(hdfsDistcpDTO -> hdfsPathAndSizedtos.stream()
                        .anyMatch(hdfsPathAndSizedto -> hdfsDistcpDTO.getSourceAddress().equals(hdfsPathAndSizedto.getHdfsPath())))
                .collect(Collectors.toList());
        tableHandDTO.setHdfsDistcpDTOS(hdfsDistcpDTOs);
        return tableHandDTO;
    }

    /**
     * shell--grep命令进行日期过滤
     * 获取符合规则的hfds的Url和文件大小
     * @return
     */
    private List<HdfsPathAndSizedto> getHdfsData(TableHandDTO tableHandDTO,FileAppender heatlog) {
        Date startDate = DateUtil.parse(tableHandDTO.getStartDate());
        Date endDate = DateUtil.parse(tableHandDTO.getEndDate());
        String sourceAddress = tableHandDTO.getHdfsDistcpDTOS().get(0).getSourceAddress();

        String tableHdfsPath = StrSpliter.split(sourceAddress,
                tableHandDTO.getTbNameSou(), true, true).get(0) +
                tableHandDTO.getTbNameSou();
        int index = getSlashIndex(tableHdfsPath, 3);
        String tablePath = tableHdfsPath.substring(index);   //到表的hdfs文件路径
        String ipPort = tableHdfsPath.substring(0,index);   //hdfs的ip、端口

        //获取两个日期之间的所有日期
        List<DateTime> dateTimes = DateUtil.rangeToList(startDate, endDate, DateField.DAY_OF_YEAR);
        String str = "";
        for (DateTime dateTime : dateTimes) {
            str += DateUtil.format(dateTime, "yyyy-MM-dd")+"|";
        }
        String substring = str.substring(0, str.lastIndexOf("|"));
        //① ls获取到的文件数据添加进文件中
        String filePath = handFilePath + tableHandDTO.getFileName() + ".txt";             //源_库_表.txt存储文件路径
        String pathOrCommand = grepHdfsFiles+" "+tablePath+" "+substring+" "+filePath; //shell命令
        int execCode = ConnExecuteUtil.execShellCode(pathOrCommand);
        logLog(heatlog, "LS命令："+pathOrCommand+"---执行结果："+execCode, true);
        //② 筛选文件中获取符合规则的hfds的Url和文件大小
        List<HdfsPathAndSizedto> hdfsPathAndSizedtos = readTimeFile(
                filePath,
                tablePath,
                startDate,
                endDate,
                ipPort);
        return hdfsPathAndSizedtos;
    }

    /**
     * 读取文件获取符合规则的hfds的Url
     *
     * @return
     */
    private List<HdfsPathAndSizedto> readTimeFile(String filePath, String tablePath, Date startDate, Date endDate, String ipPort) {
        FileReader fileReader = new FileReader(filePath);
        List<HdfsPathAndSizedto> hdfsPathAndSizedtos = new ArrayList<>();
        List<String> hdfsPaths = fileReader.readLines();
        if (ObjectUtil.isEmpty(hdfsPaths)) {
            return hdfsPathAndSizedtos;
        }
        for (String hdfsPath : hdfsPaths) {
            HdfsPathAndSizedto hdfsPathAndSizedto = new HdfsPathAndSizedto();
            boolean fileType = StrUtil.startWith(hdfsPath, "-");
            //处理类型为文件
            if (fileType) {
                int i = hdfsPath.indexOf(tablePath);
                String subTime = hdfsPath.substring(i - 17, i - 1);   //截取日期
                String str = hdfsPath.substring(0, i - 18);
                String strSize = str.substring(str.lastIndexOf(" ") + 1, str.length());   //文件大小
//                Date nowdata = DateUtil.parse("2023-09-15 00:00:00");

                if (DateUtil.isIn(DateUtil.parse(subTime), startDate, endDate)) {
                    String subPath = hdfsPath.substring(i, hdfsPath.length());
                    String ss = ipPort + subPath;
                    hdfsPathAndSizedto.setHdfsPath(ss.substring(0, ss.lastIndexOf("/")));
                    hdfsPathAndSizedto.setFileSize(Long.parseLong(strSize));
                    hdfsPathAndSizedtos.add(hdfsPathAndSizedto);
                }
            }
        }
        //去重
        List<HdfsPathAndSizedto> newList = hdfsPathAndSizedtos.stream().collect(
                Collectors.collectingAndThen(Collectors.toCollection(() -> new TreeSet<HdfsPathAndSizedto>(
                        Comparator.comparing(HdfsPathAndSizedto::getHdfsPath)
                )), ArrayList::new));
        return newList;
    }

    /**
     * 获取第几个斜杆所在位置
     * @return
     */
    private int getSlashIndex(String url,int num){
        int slashCount = 0; // 记录斜杆的数量
        int slashIndex = -1; // 记录第几个斜杆的位置
        for (int i = 0; i < url.length(); i++) {
            if (url.charAt(i) == '/') {
                slashCount++;
                if (slashCount == num) {
                    slashIndex = i;
                    break;
                }
            }
        }
        return slashIndex;
    }

}
