package com.hexinfo.dmpro.sparing.service.impl;

import cn.hutool.core.date.DateField;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.StrUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.hexinfo.dmpro.common.model.MessageCenter;
import com.hexinfo.dmpro.common.service.MessageCenterService;
import com.hexinfo.dmpro.common.utils.ConnExecuteUtil;
import com.hexinfo.dmpro.common.utils.ScanCommonConstants;
import com.hexinfo.dmpro.component.comparison.enums.TableContrastEnums;
import com.hexinfo.dmpro.sparing.model.HdfsHeatBackup;
import com.hexinfo.dmpro.sparing.service.HdfsHeatBackupService;
import com.hexinfo.dmpro.sparing.service.ScriptExecService;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;

import java.util.*;
import java.util.stream.Collectors;

/**
 * 脚本执行
 */
@Service
@Slf4j
@AllArgsConstructor
public class HdfsHeatBackupServiceImpl implements HdfsHeatBackupService {

    private final ScriptExecService scriptExecService;

    private final MessageCenterService messageCenterService;
    /**
     * 热备
     * @param hdfsHeatBackups
     */
    @Override
    public String heatBackup(List<HdfsHeatBackup> hdfsHeatBackups,String id) {
        int size = hdfsHeatBackups.size();
        String str = hdfsHeatBackups.get(0).getSourceData();
        if (hdfsHeatBackups == null || size==0){
            log.info("-------------------热备数据为空-----------------");
            scriptExecService.appendLogs(str+"/"+str,"热备hdfs路径数据为空");
            return "";
        }
        log.info("----------------------------热备数据开始！！！！--------------------");
        Map<String, List<HdfsHeatBackup>> entrys = dbTableNameSort(hdfsHeatBackups);
        scriptExecService.appendLogs(str+"/"+str,"热备表数量："+entrys.size()+"，hdfs文件数量："+size);
        Date date = messageCenterService.maxCreateTime(ScanCommonConstants.ClusterName.YSJHQ.value
                +str);
        MessageCenter mess = new MessageCenter();
        mess.setId(id);
        mess.setDataType(ScanCommonConstants.ClusterName.YWC.value);
        messageCenterService.updateById(mess);
        //全部任务
        for (Map.Entry<String, List<HdfsHeatBackup>> entry : entrys.entrySet()) {
            HdfsHeatBackup hdfsHeatBackup = entry.getValue().get(0);
            String hiveSql = getPkeyName(hdfsHeatBackup.getSyncRange(),hdfsHeatBackup.getPkeyName());
            scriptExecService.execSh(entry.getValue(),hiveSql,hdfsHeatBackup,date);
        }
//        int i = entrys.size();
//        scriptExecService.setTableNameSet(entrys.keySet());
//        Set<String> stringSet = new HashSet<>();
//        Map<String, MessageCenter> messageMap = new HashMap<>();
//        while (stringSet.size() != i){
//            messageMap = scriptExecService.getMessageMap();
//            Iterator<Map.Entry<String, MessageCenter>> iterator = messageMap.entrySet().iterator();
//            while (iterator.hasNext()) {
//                Map.Entry<String, MessageCenter> entry = iterator.next();
//                if (!stringSet.contains(entry.getKey())){
//                    String result = scriptExecService.getHeatBackupResult(entry.getKey());
//                    if (!StrUtil.isBlank(result)){
//                        stringSet.add(result);
//                    }
//                }
//            }
//            try {
//                Thread.sleep(1000);
//            } catch (InterruptedException e) {
//                e.printStackTrace();
//            }
//        }
        scriptExecService.appendLogs(str+"/"+str,"全部热备结束");
        log.info("--------------------全部热备结束------------------------");
        return "热备结束";
    }

    /**
     * 比对任务超时强制结束
     */
    @Override
    public void timeoutEnd() {
        scriptExecService.timeoutEnd();
    }

    /**
     * 按 库.表 名分类成多个子List
     * @param hdfsHeatBackups
     * @return
     */
    public Map<String, List<HdfsHeatBackup>> dbTableNameSort(List<HdfsHeatBackup> hdfsHeatBackups) {
        return hdfsHeatBackups.stream().collect(Collectors.groupingBy(HdfsHeatBackup::getSourceName));
    }

    /**
     * 全量、近几年，分区条件拼接,hive的sql
     * @return
     */
    public String getPkeyName(Integer syncRange,String pkeyName) {
        String s = "where {pkeyName} between '{yearTime}' and '{nowTime}'";
        String timeFormat = "";
        //判断是否是日期分区
        if (syncRange == null){
            return "";
        }else if (syncRange > 0){
            //判断分区日期格式
            if (pkeyName.equals("sk_month") || pkeyName.equals("bk_month") || pkeyName.equals("effective_month")){
                timeFormat = "yyyyMM";
            } else if (pkeyName.equals("sk_year")) {
                timeFormat = "yyyy";
            }else {
                timeFormat = "yyyyMMdd";
            }
        }
        String nowTime = DateUtil.format(DateUtil.date(), timeFormat);
        //几年前时间

        String yearTime = DateUtil.format(DateUtil.offset(DateUtil.date(), DateField.DAY_OF_YEAR, syncRange*(-1)),timeFormat);
        String hiveSql = s.replace("{pkeyName}", pkeyName).replace("{nowTime}", nowTime).replace("{yearTime}", yearTime);
        return hiveSql;
    }

}
