package com.hexinfo.dmpro.sparing.service.impl;

import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DateField;
import cn.hutool.core.date.DatePattern;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.io.file.FileAppender;
import cn.hutool.core.io.file.FileReader;
import cn.hutool.core.text.StrSpliter;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.hexinfo.dmpro.common.utils.ConnExecuteUtil;
import com.hexinfo.dmpro.sparing.dto.RuleToDistCPDTO;
import com.hexinfo.dmpro.sparing.dto.TableDistCPDTO;
import com.hexinfo.dmpro.sparing.dto.WhereScriptDTO;
import com.hexinfo.dmpro.sparing.model.HdfsHeatBackup;
import com.hexinfo.dmpro.sparing.model.HdfsPathAndSizedto;
import com.hexinfo.dmpro.sparing.service.BackupSourceService;
import com.hexinfo.dmpro.sparing.service.DataFilterService;
import com.hexinfo.dmpro.sparing.util.Logger;
import com.xqfunds.job.core.log.XxlJobLogger;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import java.time.LocalDate;
import java.util.*;
import java.util.stream.Collectors;

@Service
@Slf4j
public class DataFilterServiceImpl implements DataFilterService {

    @Value("${hdfs.hdfsIpPort}")
    private String hdfsIpPort;        //集群hdfs的 hdfs://ip:port
    @Value("${hdfs.sourceFilePath}")
    private String sourceFilePath;    //LS操作数据存储位置
    @Value("${hdfs.getHdfsFiles}")
    private String getHdfsFiles;      //LS命令shell脚本文件存放路径
    @Value("${hdfs.grepHdfsFiles}")
    private String grepHdfsFiles;      //LS命令shell脚本文件存放路径
    @Value("${heatBackup.logPath}")
    private String logPath;          //每张表日志打印路径

    @Autowired
    private BackupSourceService backupSourceService;

    /**
     * 增量--LS操作过滤hdfs路径
     *
     * @param tableDistCPDTO
     * @return
     */
    @Override
    public TableDistCPDTO addLsFilter(TableDistCPDTO tableDistCPDTO, FileAppender heatlog,FileAppender tableLog) {
        //java8，复制一个List
        List<HdfsHeatBackup> hdfsHeatBackups = tableDistCPDTO.getHdfsHeatBackups().stream().collect(Collectors.toList());
        //获取符合规则的hfds的Url和文件大小
        List<HdfsPathAndSizedto> hdfsPathAndSizedtos = getHdfsData1(tableDistCPDTO,heatlog,tableLog);
        if (ObjectUtil.isEmpty(hdfsPathAndSizedtos)) {
            //tableDistCPDTO.setHdfsHeatBackups(new ArrayList<HdfsHeatBackup>());
            return tableDistCPDTO;
        }
        //③ 筛选符合的hdfs路径
        //文件大小赋值
        Long sizeLong = 0L;     //文件总大小
        for (HdfsHeatBackup entityA : hdfsHeatBackups) {
            for (HdfsPathAndSizedto entityB : hdfsPathAndSizedtos) {
                if (entityA.getSourceAddress().equals(entityB.getHdfsPath())) {
                    entityA.setFileSize(entityB.getFileSize());
                    sizeLong += entityB.getFileSize();
                    break;
                }
            }
        }
        //属性为空则排除
        List<HdfsHeatBackup> filteredEntities = hdfsHeatBackups.stream()
                .filter(entityA -> entityA.getFileSize() != null && entityA.getFileSize() > 0L) // 使用StringUtils.isBlank()方法判断字段a是否为空
                .collect(Collectors.toList());
        tableLog.append(logTime("【过滤后hdfs路径】"));
        for (HdfsHeatBackup filteredEntity : filteredEntities) {
            tableLog.append(logTime(filteredEntity.getSourceAddress()));
        }
        //按大小降序获取hdfs路径
//        List<HdfsHeatBackup> HdfsHeatBackupList = filteredEntities.stream()
//                .sorted(Comparator.comparing(HdfsHeatBackup::getFileSize).reversed())
//                .collect(Collectors.toList());
        tableDistCPDTO.setHdfsFileSize(sizeLong);
        tableDistCPDTO.setHdfsHeatBackups(filteredEntities);
        return tableDistCPDTO;
    }

    /**
     * 全量--LS操作过滤hdfs路径
     *
     * @param tableDistCPDTO
     * @return
     */
    @Override
    public TableDistCPDTO allLsFilter(TableDistCPDTO tableDistCPDTO, FileAppender heatlog,FileAppender tableLog) {
        //获取符合规则的hfds的Url和文件大小
        List<HdfsPathAndSizedto> hdfsPathAndSizedtos = getHdfsData(tableDistCPDTO,heatlog,tableLog);
        //只需判断返回集合是否有数据
        if (ObjectUtil.isEmpty(hdfsPathAndSizedtos)) {
            //tableDistCPDTO.setHdfsHeatBackups(new ArrayList<HdfsHeatBackup>());
            return tableDistCPDTO;
        }
        //有时间字段的表走全量，规则的路径是到表的，这种表的表路径下，是不会有hdfs文件的，所以只能判断是不是空
        tableLog.append(logTime("【过滤后hdfs路径】"));
        for (HdfsHeatBackup filteredEntity : tableDistCPDTO.getHdfsHeatBackups()) {
            tableLog.append(logTime(filteredEntity.getSourceAddress()));
        }
        return tableDistCPDTO;
    }

    /**
     * 全量
     * shell--grep命令进行日期过滤
     * 获取符合规则的hfds的Url和文件大小
     * @return
     */
    private List<HdfsPathAndSizedto> getHdfsData(TableDistCPDTO tableDistCPDTO,FileAppender heatlog, FileAppender tableLog) {
        Date newDate3 = DateUtil.offsetHour(tableDistCPDTO.getDate(), -1);   //上次热备时间向前偏移1小时
        Date date = DateUtil.offsetHour(DateUtil.date(), 24);       //偏移一天
        tableLog.append(logTime("数据库初始时间："+tableDistCPDTO.getDate()+"--变更后时间："+newDate3+"--结束时间："+date));
        List<DateTime> dateTimes = DateUtil.rangeToList(newDate3, date, DateField.DAY_OF_YEAR);
        String str = "";
        for (DateTime dateTime : dateTimes) {
            str += DateUtil.format(dateTime, "yyyy-MM-dd")+"|";
        }
        String substring = str.substring(0, str.lastIndexOf("|"));
        //① ls获取到的文件数据添加进文件中
        String filePath = sourceFilePath + tableDistCPDTO.getName() + ".txt";             //源_库_表.txt存储文件路径
        String pathOrCommand = grepHdfsFiles+" "+tableDistCPDTO.getTableHdfsPath()+" "+substring+" "+filePath; //shell命令
        int execCode = execShellCode(pathOrCommand,heatlog,tableLog);
        if (execCode == 999){
            backupSourceService.updateTaskState(tableDistCPDTO.getSourceDataName(),false);
            return new ArrayList<HdfsPathAndSizedto>();
        }
        //② 筛选文件中获取符合规则的hfds的Url和文件大小
        List<HdfsPathAndSizedto> hdfsPathAndSizedtos = readTimeFile(
                filePath,
                tableDistCPDTO.getTableHdfsPath(),
                newDate3,
                date);
        //本身这段时间没有更新热备数据
        if (CollectionUtil.isEmpty(hdfsPathAndSizedtos)){
            tableDistCPDTO.setHdfsHeatBackups(new ArrayList<HdfsHeatBackup>());
        }
        return hdfsPathAndSizedtos;
    }

    /**
     * 增量
     * shell--grep命令进行日期过滤
     * 获取符合规则的hfds的Url和文件大小
     * @return
     */
    private List<HdfsPathAndSizedto> getHdfsData1(TableDistCPDTO tableDistCPDTO,FileAppender heatlog, FileAppender tableLog) {
        Date newDate3 = DateUtil.offsetHour(tableDistCPDTO.getDate(), -1);   //上次热备时间向前偏移1小时
        Date date = DateUtil.offsetHour(DateUtil.date(), 24);       //偏移一天
        tableLog.append(logTime("数据库初始时间："+tableDistCPDTO.getDate()+"--变更后时间："+newDate3+"--结束时间："+date));
        List<DateTime> dateTimes = DateUtil.rangeToList(newDate3, date, DateField.DAY_OF_YEAR);
        String str = "";
        for (DateTime dateTime : dateTimes) {
            str += DateUtil.format(dateTime, "yyyy-MM-dd")+"|";
        }
        String substring = str.substring(0, str.lastIndexOf("|"));
        //① ls获取到的文件数据添加进文件中
        String filePath = sourceFilePath + tableDistCPDTO.getName() + ".txt";             //源_库_表.txt存储文件路径
        String pathOrCommand = grepHdfsFiles+" "+tableDistCPDTO.getTableHdfsPath()+" "+substring+" "+filePath; //shell命令
        int execCode = execShellCode(pathOrCommand,heatlog,tableLog);
        if (execCode == 999){
            backupSourceService.updateTaskState(tableDistCPDTO.getSourceDataName(),false);
            return new ArrayList<HdfsPathAndSizedto>();
        }
        //② 筛选文件中获取符合规则的hfds的Url和文件大小
        List<HdfsPathAndSizedto> hdfsPathAndSizedtos = readTimeFile(
                filePath,
                tableDistCPDTO.getTableHdfsPath(),
                newDate3,
                date);
        //本身这段时间没有更新热备数据
        if (CollectionUtil.isEmpty(hdfsPathAndSizedtos)){
            tableDistCPDTO.setHdfsHeatBackups(new ArrayList<HdfsHeatBackup>());
        }
        return hdfsPathAndSizedtos;
    }

    /**
     * 读取文件获取符合规则的hfds的Url
     * filePath 文件读取路径
     * tablePath 到表的HDFS的路径
     * newDate3 上传热备时间-3小时
     * nowDate 当前时间
     *
     * @return
     */
    private List<HdfsPathAndSizedto> readTimeFile(String filePath, String tablePath, Date newDate3, Date nowDate) {
        FileReader fileReader = new FileReader(filePath);
        List<HdfsPathAndSizedto> hdfsPathAndSizedtos = new ArrayList<>();
        List<String> hdfsPaths = fileReader.readLines();
        if (ObjectUtil.isEmpty(hdfsPaths)) {
            return hdfsPathAndSizedtos;
        }
        for (String hdfsPath : hdfsPaths) {
            HdfsPathAndSizedto hdfsPathAndSizedto = new HdfsPathAndSizedto();
            boolean fileType = StrUtil.startWith(hdfsPath, "-");
            //处理类型为文件
            if (fileType) {
                int i = hdfsPath.indexOf(tablePath);
                String subTime = hdfsPath.substring(i - 17, i - 1);   //截取日期
                String str = hdfsPath.substring(0, i - 18);
                String strSize = str.substring(str.lastIndexOf(" ") + 1, str.length());   //文件大小

                if (DateUtil.isIn(DateUtil.parse(subTime), newDate3, nowDate)) {
                    String subPath = hdfsPath.substring(i, hdfsPath.length());
                    //String ss = hdfsIpPort + subPath;
                    String hdfsFilePath = subPath.substring(0, subPath.lastIndexOf("/"));
                    hdfsPathAndSizedto.setHdfsPath(hdfsFilePath);
                    hdfsPathAndSizedto.setFileSize(Long.parseLong(strSize));
                    hdfsPathAndSizedtos.add(hdfsPathAndSizedto);
                }
            }
        }
        //去重
        List<HdfsPathAndSizedto> newList = hdfsPathAndSizedtos.stream().collect(
                Collectors.collectingAndThen(Collectors.toCollection(() -> new TreeSet<HdfsPathAndSizedto>(
                Comparator.comparing(HdfsPathAndSizedto::getHdfsPath)
        )), ArrayList::new));
        return newList;
    }

    /**
     * 封装获得每个表的hdfs实体类
     *
     * @return
     */
    @Override
    public TableDistCPDTO getTableDistCPDTO(RuleToDistCPDTO ruleToDistCPDTO, List<HdfsHeatBackup> hdfsHeatBackups) {
        HdfsHeatBackup hdfsHeatBackup = hdfsHeatBackups.get(0);
        List<WhereScriptDTO> tableWhere = new ArrayList<>();
        //获取每个表的where条件集合
        if(ObjectUtil.isNotEmpty(ruleToDistCPDTO.getWhereScriptDTOS())&&ruleToDistCPDTO.getWhereScriptDTOS().size()>0){
            tableWhere = ruleToDistCPDTO.getWhereScriptDTOS().stream()
                    .filter(b -> b.getDbTableName().equals(hdfsHeatBackup.getSourceName()))
                    .collect(Collectors.toList());
        }

        String tableHdfsPath = StrSpliter.split(hdfsHeatBackup.getSourceAddress(), hdfsHeatBackup.getSourceTableName(),true, true)
                .get(0) + hdfsHeatBackup.getSourceTableName();
//                .replace(hdfsIpPort, "");
        String strPath = logPath.replace("{date}", DateUtil.format(DateUtil.date(), "yyyyMMdd"))
                + ruleToDistCPDTO.getSourceDataName() + "/" + hdfsHeatBackup.getSourceName() + ".txt";
        return new TableDistCPDTO(ruleToDistCPDTO.getSourceDataName(),
                hdfsHeatBackup.getSourceDbName(), hdfsHeatBackup.getSourceTableName(),
                hdfsHeatBackup.getBackupDbName(), hdfsHeatBackup.getBackupTableName(),
                hdfsHeatBackup.getTargetDbName(), hdfsHeatBackup.getTargetTableName(),
                ruleToDistCPDTO.getDate(), tableWhere, hdfsHeatBackup.getPkeyName(),
                hdfsHeatBackup.getSyncRange(), 0L, strPath, tableHdfsPath, hdfsHeatBackups,hdfsHeatBackups.size(),"",ruleToDistCPDTO.getUatSyncLogsTask());
    }

    /**
     * 执行shell脚本，只返回执行结果
     * @param pathOrCommand 脚本路径或者命令
     * @return
     */
    private int execShellCode(String pathOrCommand, FileAppender heatlog, FileAppender tableLog) {
        int exitValue = 0;
        try {
            Process ps = Runtime.getRuntime().exec(pathOrCommand);
            exitValue = ps.waitFor();
            tableLog.append(logTime("【LS命令】"+pathOrCommand+"---执行结果："+exitValue+"（成功）"));
        } catch (Exception e) {
            log.error("执行shell脚本失败命令：{}，执行shell脚本失败报错：{}",pathOrCommand,e);
            logLog(heatlog,tableLog,"【LS命令】"+pathOrCommand+"---执行异常："+e,"1");
//            Logger.log("【LS命令】"+pathOrCommand+"---执行异常："+e);
            return 999;
        }
        return exitValue;
    }

    /**
     * 加锁，防止并发问题
     * @param heatlog
     * @param tableLog
     * @param logStr
     * @param str
     */
    private synchronized void logLog(FileAppender heatlog, FileAppender tableLog, String logStr ,String str) {
        if (str.equals("1")){
            heatlog.append(logTime(logStr));
            tableLog.append(logTime(logStr));
        }else if (str.equals("2")){
            heatlog.flush();     //总日志刷进日志中
        }else if (str.equals("3")){
            tableLog.append(logTime(logStr));
        }else if (str.equals("4")){
            tableLog.flush();     //表日志刷进日志中
        }
    }

    /**
     * 按 库.表 名分类成多个子List
     * key是表名
     * value是这个表下所有hdfs路径
     *
     * @param hdfsHeatBackups
     * @return
     */
    @Override
    public Map<String, List<HdfsHeatBackup>> dbTableNameSort(List<HdfsHeatBackup> hdfsHeatBackups) {
        return hdfsHeatBackups.stream().collect(Collectors.groupingBy(HdfsHeatBackup::getSourceName));
    }

    /**
     * 获取总日志路径
     *
     * @param sounName
     * @return
     */
    @Override
    public String path(String sounName) {
        return logPath.replace("{date}", DateUtil.format(DateUtil.date(), "yyyyMMdd"))
                + sounName + "/" + sounName + ".txt";
    }

    /**
     * 日志加日期时间
     * @param str
     * @return
     */
    private String logTime(String str) {
        return "["+ DateUtil.format(DateUtil.date(), "yyyy-MM-dd HH:mm:ss")+"]" +str;
    }

}
