package com.hexinfo.dmpro.sparing.service.impl;


import cn.hutool.core.date.DateUnit;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.io.file.FileAppender;
import cn.hutool.core.io.file.FileReader;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.hexinfo.dmpro.sparing.async.AsyncTask;
import com.hexinfo.dmpro.sparing.dto.ColdStandbyWeekDTO;
import com.hexinfo.dmpro.sparing.model.HdfsPathAndSizedto;
import com.hexinfo.dmpro.sparing.service.ColdStandbyService;
import com.hexinfo.dmpro.sparing.service.ScanMetadataService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import java.io.File;
import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;

@Service
@Slf4j
public class ColdStandbyServiceImpl implements ColdStandbyService {

    @Value("${hdfs.sourceFilePath}")
    private String sourceFilePath;
    @Value("${hdfs.grepHdfsFiles}")
    private String grepHdfsFiles;      //LS命令shell脚本文件存放路径
    @Value("${hdfs.heatBackup}")
    private String heatBackup;
    @Value("${hdfs.tarPrefix}")
    private String tarPrefix;
    @Autowired
    private ScanMetadataService scanMetadataService;

    @Autowired
    private AsyncTask asyncTask;

    @Override
    public ColdStandbyWeekDTO lsWeek(ColdStandbyWeekDTO weekDTO,FileAppender coldLog) {
        long betweenDay = DateUtil.between(DateUtil.parse(weekDTO.getDateSta()), DateUtil.parse(weekDTO.getDateEnd()), DateUnit.DAY);
        String str = "";
        for (int i = 0; i < betweenDay+1; i++) {
            str += DateUtil.format(DateUtil.offsetDay(DateUtil.parse(weekDTO.getDateSta()), i), "yyyy-MM-dd")+"|";
        }
        String substring = str.substring(0, str.lastIndexOf("|"));
        //① ls获取到的文件数据添加进文件中
        String filePath = sourceFilePath + weekDTO.getDateSta() + ".txt";           //源_库_表.txt存储文件路径
        String pathOrCommand = grepHdfsFiles+" "+weekDTO.getHdfsPath()+" "+substring+" "+filePath; //shell命令
        log.warn("=============ls信息存储文件路径"+filePath);
        log.warn("=============ls shell命令"+pathOrCommand);
        int execCode = execShellCode(pathOrCommand,coldLog);
        if (execCode == 999){
            log.warn("========ls失败");
            return weekDTO;
        }
        //② 筛选文件中获取符合规则的hfds的Url和文件大小
        List<HdfsPathAndSizedto> hdfsPathAndSizedtos = readTimeFile(
                filePath,
                weekDTO.getHdfsPath(),
                DateUtil.parse(weekDTO.getDateSta()),
                DateUtil.offsetDay(DateUtil.parse(weekDTO.getDateEnd()),1));
        weekDTO.setHdfsList(hdfsPathAndSizedtos);
        coldLog.flush();
        return weekDTO;
    }



    @Override
    public ColdStandbyWeekDTO distCPWeek(ColdStandbyWeekDTO weekDTO) {
        Date dateSta = DateUtil.date();
        List<HdfsPathAndSizedto> hdfsList = new ArrayList<>();
        List<HdfsPathAndSizedto> thresholdList = new ArrayList<>();
        Map<String,List<HdfsPathAndSizedto>> tableNum = new HashMap<>();
        String dbName = weekDTO.getHdfsPath().substring(getSlashIndex(weekDTO.getHdfsPath(),6)+1).replace(".db/","");
        FileAppender coldLog = new FileAppender(new File(weekDTO.getLogPath()), 50, true);
        lsWeek(weekDTO,coldLog);
        hdfsList = weekDTO.getHdfsList();
//        HdfsPathAndSizedto hdfsPathAndSizedto = new HdfsPathAndSizedto();
//        hdfsPathAndSizedto.setHdfsPath("hdfs://172.21.32.16:8020/user/hive/warehouse/ods.db/qwer/zxc=qwe");
//        hdfsList.add(hdfsPathAndSizedto);
//        HdfsPathAndSizedto hdfsPathAndSizedto1= new HdfsPathAndSizedto();
//        hdfsPathAndSizedto1.setHdfsPath("hdfs://172.21.32.16:8020/user/hive/warehouse/ods.db/qwer/zxc=qwe1");
//        hdfsList.add(hdfsPathAndSizedto1);
//        HdfsPathAndSizedto hdfsPathAndSizedto2 = new HdfsPathAndSizedto();
//        hdfsPathAndSizedto2.setHdfsPath("hdfs://172.21.32.16:8020/user/hive/warehouse/ods.db/tyu");
//        hdfsList.add(hdfsPathAndSizedto2);
        if(ObjectUtil.isNotEmpty(hdfsList)){
            //黑名单
            List<String> list = Arrays.asList(weekDTO.getList().split(","));
            List<Future<Map<String,Object>>> futures = new ArrayList<>();//存线程的返回结果
            logLog(coldLog,"【同步总数量】"+hdfsList.size(),"1");
            log.warn("===============同步总数量"+hdfsList.size());
            coldLog.flush();
            for(HdfsPathAndSizedto hdfs: hdfsList){
                int x = getSlashIndex(hdfs.getHdfsPath(),6);
                String tableName = hdfs.getHdfsPath().substring(x+1);
                int c = getSlashIndex(tableName,2);
                String dbTableName = "";
                if(c>0){
                    dbTableName = tableName.substring(0,c).replaceAll("db/","");
                }else{
                    dbTableName = tableName.replaceAll("db/","");
                }
                //库表名包含UAT_跳过循环
                if(dbTableName.contains("uat_")){
                    continue;
                }
                if(!tableNum.containsKey(dbTableName)){
                    List<HdfsPathAndSizedto> hp = new ArrayList<>();
                    hp.add(hdfs);
                    tableNum.put(dbTableName,hp);
                }else{
                    List<HdfsPathAndSizedto> hp = tableNum.get(dbTableName);
                    hp.add(hdfs);
                    tableNum.put(dbTableName,hp);
                }
            }
            for (String key:tableNum.keySet()){
                log.warn(key+"===============数量为"+tableNum.get(key).size());
            }
            for (String key:tableNum.keySet()){
                //在黑名单里面不触发阈值算法
                if(tableNum.get(key).size()>=weekDTO.getCbThreshold()&&!list.contains(key)){
                    //记录触发阈值的库表级别的日志
                    log.warn(key+"===============触发阈值 当前数量为"+tableNum.get(key).size());
                    //记录整个库表的同步操作
                    String tableHdfs = weekDTO.getWgHdfsPath() + "/" + key.replace(".","/");
                    String tableLbHdfs = StrUtil.replace(StrUtil.replace(tableHdfs,weekDTO.getWgHdfsPath(),weekDTO.getLbHdfsPath()),"warehouse","cb_test/warehouse");
                    log.warn("===============DISTCP路径"+tableHdfs);
                    log.warn("冷备===============DISTCP路径"+tableLbHdfs);
                    //遍历每个文件进行同步操作的记录
                    for (HdfsPathAndSizedto hdfs: tableNum.get(key)) {
                        log.warn("===============DISTCP单个文件路径"+hdfs.getHdfsPath());
                        String lbHdfs = StrUtil.replace(StrUtil.replace(hdfs.getHdfsPath(), weekDTO.getWgHdfsPath(), weekDTO.getLbHdfsPath()),"warehouse","cb_test/warehouse");
                        log.warn("冷备===============DISTCP单个文件路径"+lbHdfs);
                        //以下调用会启动线程进行执行，多线程并发
                        Future<Map<String,Object>> res = asyncTask.execShell(hdfs.getHdfsPath()+" "+lbHdfs,heatBackup,coldLog);
                        futures.add(res);
                    }
                }else{
                    log.warn(key+"===============未触发阈值 当前数量为"+tableNum.get(key).size());
                    for (HdfsPathAndSizedto hdfs: tableNum.get(key)) {
                        log.warn("===============DISTCP路径"+hdfs.getHdfsPath());
                        String lbHdfs = StrUtil.replace(StrUtil.replace(hdfs.getHdfsPath(), weekDTO.getWgHdfsPath(), weekDTO.getLbHdfsPath()),"warehouse","cb_test/warehouse");
                        log.warn("冷备===============DISTCP路径"+lbHdfs);
                        //以下调用会启动线程进行执行，多线程并发
                        Future<Map<String,Object>> res = asyncTask.execShell(hdfs.getHdfsPath()+" "+lbHdfs,heatBackup,coldLog);
                        futures.add(res);
                    }
                }
            }
            log.warn("===============DistCP任务总数"+futures.size());
            //会等待所有线程都执行结束，拿到结果
            try {
                for (Future future : futures) {
                    try {
                        Object obj = future.get(3, TimeUnit.HOURS); // 等待3小时
                        // 处理任务结果
                    } catch (InterruptedException e) {
                        // 当前线程被中断
                        log.error("统计线程中断", e);
                    } catch (ExecutionException e) {
                        // 处理任务执行异常
                        log.error("统计线程执行异常", e.getCause());// 获取实际的异常原因
                    } catch (TimeoutException e) {
                        // 等待超时
                        future.cancel(true); // 取消任务执行
                        log.error("统计线程超时", e);
                    }
                }
            } catch(Exception e) {
                e.printStackTrace();
            }
        }
        Date dateEnd = DateUtil.date();
        logLog(coldLog,"【同步总用时】"+weekDTO.getDateSta()+"【开始时间】"+DateUtil.formatDateTime(dateSta)+"【结束时间】"+DateUtil.formatDateTime(dateEnd),"1");
        coldLog.flush();
        return weekDTO;
    }





    /**
     * 读取文件获取符合规则的hfds的Url
     * filePath 文件读取路径
     * tablePath 到表的HDFS的路径
     * staDate 开始时间
     * endDate 结束时间
     *
     * @return
     */
    private List<HdfsPathAndSizedto> readTimeFile(String filePath, String tablePath, Date staDate, Date endDate) {
        FileReader fileReader = new FileReader(filePath);
        List<HdfsPathAndSizedto> hdfsPathAndSizedtos = new ArrayList<>();
        List<String> hdfsPaths = fileReader.readLines();
        if (ObjectUtil.isEmpty(hdfsPaths)) {
            log.warn("============暂无LS命令数据");
            return hdfsPathAndSizedtos;
        }
        for (String hdfsPath : hdfsPaths) {
            log.warn("============LS命令数据"+hdfsPath);
            HdfsPathAndSizedto hdfsPathAndSizedto = new HdfsPathAndSizedto();
            boolean fileType = StrUtil.startWith(hdfsPath, "-");
            //处理类型为文件
            if (fileType) {
                int i = hdfsPath.indexOf(tablePath);
                String subTime = hdfsPath.substring(i - 17, i - 1);   //截取日期
                String str = hdfsPath.substring(0, i - 18);
                String strSize = str.substring(str.lastIndexOf(" ") + 1, str.length());   //文件大小

                if (DateUtil.isIn(DateUtil.parse(subTime), staDate, endDate)) {
                    String subPath = hdfsPath.substring(i, hdfsPath.length());
                    String hdfsFilePath = subPath.substring(0, subPath.lastIndexOf("/"));
                    hdfsPathAndSizedto.setHdfsPath(hdfsFilePath);
                    hdfsPathAndSizedto.setFileSize(Long.parseLong(strSize));
                    hdfsPathAndSizedtos.add(hdfsPathAndSizedto);
                }
            }
        }
        //去重
        List<HdfsPathAndSizedto> newList = hdfsPathAndSizedtos.stream().collect(
                Collectors.collectingAndThen(Collectors.toCollection(() -> new TreeSet<HdfsPathAndSizedto>(
                        Comparator.comparing(HdfsPathAndSizedto::getHdfsPath)
                )), ArrayList::new));
        return newList;
    }

    /**
     * 执行shell脚本，只返回执行结果
     * @param pathOrCommand 脚本路径或者命令
     * @return
     */
    private int execShellCode(String pathOrCommand, FileAppender coldLog) {
        int exitValue = 0;
        try {
            Process ps = Runtime.getRuntime().exec(pathOrCommand);
            exitValue = ps.waitFor();
            coldLog.append(logTime("【LS命令】"+pathOrCommand+"---执行结果："+exitValue+"（成功）"));
        } catch (Exception e) {
            log.error("执行shell脚本失败命令：{}，执行shell脚本失败报错：{}",pathOrCommand,e);
            coldLog.append(logTime("【LS命令】"+pathOrCommand+"---执行异常："+exitValue+"（成功）"));
            return 999;
        }
        return exitValue;
    }

    /**
     * 日志加日期时间
     * @param str
     * @return
     */
    private String logTime(String str) {
        return "["+ DateUtil.format(DateUtil.date(), "yyyy-MM-dd HH:mm:ss")+"]" +str;
    }

    /**
     * 加锁，防止并发问题
     * @param tableLog
     * @param logStr
     * @param str
     */
    private synchronized void logLog(FileAppender tableLog, String logStr ,String str) {
        if (str.equals("1")){
            tableLog.append(logTime(logStr));
        }else if (str.equals("2")){
        }else if (str.equals("3")){
            tableLog.append(logTime(logStr));
        }else if (str.equals("4")){
            tableLog.flush();     //表日志刷进日志中
        }
    }


    /**
     * 获取第几个斜杆所在位置
     * @return
     */
    public int getSlashIndex(String url,int num){
        int slashCount = 0; // 记录斜杆的数量
        int slashIndex = -1; // 记录第几个斜杆的位置
        for (int i = 0; i < url.length(); i++) {
            if (url.charAt(i) == '/') {
                slashCount++;
                if (slashCount == num) {
                    slashIndex = i;
                    break;
                }
            }
        }
        return slashIndex;
    }
}
