package com.hexinfo.dmpro.sparing.service.impl;

import cn.hutool.core.date.DateUtil;
import cn.hutool.core.io.file.FileAppender;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.http.HttpUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.hexinfo.dmpro.common.model.ClusterSparingConf;
import com.hexinfo.dmpro.common.model.MessageCenter;
import com.hexinfo.dmpro.common.service.ClusterSparingConfService;
import com.hexinfo.dmpro.common.service.MessageCenterService;
import com.hexinfo.dmpro.common.utils.ScanCommonConstants;
import com.hexinfo.dmpro.sparing.service.*;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import java.io.File;
import java.util.Date;

/**
 * 按源
 * 异步方法，执行LS命令，获取
 */
@Service
@Slf4j
public class AvailabilityPathServiceImpl implements AvailabilityPathService {

    @Value("${nameNode.source.nameNodeUrl}")
    private String sourceUrl;
    @Value("${nameNode.source.nameNodeIp}")
    private String sourceIp;
    @Value("${nameNode.source.nameNodePort}")
    private String sourcePort;

    @Value("${nameNode.target.nameNodeUrl}")
    private String targetUrl;
    @Value("${nameNode.target.nameNodeIp}")
    private String targetIp;
    @Value("${nameNode.target.nameNodePort}")
    private String targetPort;

    @Value("${nameNode.uat.nameNodeUrl}")
    private String uatUrl;
    @Value("${nameNode.uat.nameNodeIp}")
    private String uatIp;
    @Value("${nameNode.uat.nameNodePort}")
    private String uatPort;

    @Value("${nameNode.hdfsUrl}")
    private String hdfsUrl;
    @Value("${nameNode.logPath}")
    private String logPath;

    @Autowired
    private ScanMetadataService scanMetadataService;
    @Autowired
    private ClusterSparingConfService clusterSparingConfService;
    @Autowired
    private MessageCenterService messageCenterService;

    /**
     * 获取源和目标的活跃节点
     * @return
     */
    @Override
    public String getSouAndTar() {
        //当前时间为日志文件名
        String logFilePath = path(DateUtil.format(DateUtil.date(), "yyyyMMddHHmmss"));
        FileAppender heatlog = new FileAppender(new File(logFilePath), 100, true);
        heatlog.append(logTime("【高可用获取namenode活跃节点开始】"));
        String souStatus = getNameNodeStatus(sourceUrl, sourceIp, sourcePort, heatlog);  //源活跃节点
        String tarStatus = getNameNodeStatus(targetUrl, targetIp, targetPort, heatlog);  //目标活跃节点
        String uatStatus = getNameNodeStatus(uatUrl, uatIp, uatPort, heatlog);  //uat活跃节点
        if (StrUtil.isBlank(souStatus) || StrUtil.isBlank(tarStatus) || StrUtil.isBlank(uatStatus)){
            if (StrUtil.isBlank(souStatus)){
                log.error("--------------金桥集群没有活跃nameNode的节点");
                heatlog.append(logTime(sourceIp+"--金桥集群没有活跃nameNode的节点"));
            }
            if (StrUtil.isBlank(tarStatus)){
                log.error("--------------万国集群没有活跃nameNode的节点");
                heatlog.append(logTime(targetIp+"--万国集群没有活跃nameNode的节点"));
            }
            if (StrUtil.isBlank(uatStatus)){
                log.error("--------------UAT集群没有活跃nameNode的节点");
                heatlog.append(logTime(uatIp+"--UAT集群没有活跃nameNode的节点"));
            }
            //添加进消息中心
            messageCenterService.save(getMessageCenter(ScanCommonConstants.ClusterName.YC.value,logFilePath));
        }else {
//            //金桥必须是10.160开头，万国必须是10.64开头,，uat在万国集群，也与万国一致
//            if(!souStatus.startsWith("10.189") || !tarStatus.startsWith("10.93") || !uatStatus.startsWith("10.93")){
//                heatlog.append(logTime("【高可用获取namenode活跃节点与：《金桥节点必须是10.189开头，万国节点必须是10.93开头，uat节点与万国一致》规则不符】"));
//                heatlog.append(logTime("【金桥活跃节点："+souStatus+"，万国活跃节点："+tarStatus+"，uat活跃节点："+uatStatus+"】"));
//                heatlog.append(logTime("【高可用获取namenode活跃节点结束】"));
//                heatlog.flush();
//                //添加进消息中心
//                messageCenterService.save(getMessageCenter(ScanCommonConstants.ClusterName.YC.value,logFilePath));
//                return "";
//            }
//            heatlog.append(logTime("【高可用获取namenode活跃节点与：《金桥节点必须是10.160开头，万国节点必须是10.64开头，uat节点与万国一致》规则相符合】"));
            String souHdfsUrl = hdfsUrl.replace("{namenode}", souStatus);  //hdfs文件的url
            String tarHdfsUrl = hdfsUrl.replace("{namenode}", tarStatus);  //hdfs文件的url
            String uatHdfsUrl = hdfsUrl.replace("{namenode}", uatStatus);  //hdfs文件的url
            String s = updateDataDase(souHdfsUrl, tarHdfsUrl, uatHdfsUrl, heatlog, logFilePath);
            heatlog.append(logTime("【高可用获取namenode活跃节点结束】"));
            heatlog.flush();
            return s;
        }
        heatlog.append(logTime("【高可用获取namenode活跃节点结束】"));
        heatlog.flush();
        return "";
    }

    /**
     * 修改数据库
     * @return
     */
    private String updateDataDase(String souHdfsUrl,String tarHdfsUrl,String uatHdfsUrl,FileAppender heatlog,String logFilePath) {
        //查询集群信息
        LambdaQueryWrapper<ClusterSparingConf> jQWrapper = Wrappers.lambdaQuery();
        LambdaQueryWrapper<ClusterSparingConf> wGWrapper = Wrappers.lambdaQuery();
        LambdaQueryWrapper<ClusterSparingConf> uatWrapper = Wrappers.lambdaQuery();
        ClusterSparingConf souCluster = scanMetadataService.wrapperClusterSparingConf(jQWrapper,"金桥");
        ClusterSparingConf tarCluster = scanMetadataService.wrapperClusterSparingConf(wGWrapper,"万国");
        ClusterSparingConf uatCluster = scanMetadataService.wrapperClusterSparingConf(uatWrapper,"UAT");
        if(ObjectUtil.isEmpty(souCluster) || ObjectUtil.isEmpty(tarCluster) || ObjectUtil.isEmpty(uatCluster)){
            if(ObjectUtil.isEmpty(souCluster)){
                log.error("异常(金桥集群信息)数据库表 TD_CLUSTER_SPARING_CONF 中未配置金桥集群信息");
                heatlog.append(logTime("异常(金桥集群信息)数据库表 TD_CLUSTER_SPARING_CONF 中未配置金桥集群信息"));
            }
            if(ObjectUtil.isEmpty(tarCluster)){
                log.error("异常(万国集群信息)数据库表 TD_CLUSTER_SPARING_CONF 中未配置万国集群信息");
                heatlog.append(logTime("异常(万国集群信息)数据库表 TD_CLUSTER_SPARING_CONF 中未配置万国集群信息"));
            }
            if(ObjectUtil.isEmpty(uatCluster)){
                log.error("异常(UAT集群信息)数据库表 TD_CLUSTER_SPARING_CONF 中未配置UAT集群信息");
                heatlog.append(logTime("异常(UAT集群信息)数据库表 TD_CLUSTER_SPARING_CONF 中未配置UAT集群信息"));
            }
            //添加进消息中心
            messageCenterService.save(getMessageCenter(ScanCommonConstants.ClusterName.YC.value,logFilePath));
        }else {
            //判断是否需要修改Hdfs的文件url
            if (!souCluster.getHdfsPrefixPath().equals(souHdfsUrl) || !tarCluster.getHdfsPrefixPath().equals(tarHdfsUrl)
                    || !uatCluster.getHdfsPrefixPath().equals(uatHdfsUrl)){
                //修改数据库中的Hdfs的文件url
                if (!souCluster.getHdfsPrefixPath().equals(souHdfsUrl)){
                    souCluster.setHdfsPrefixPath(souHdfsUrl);
                    clusterSparingConfService.updateById(souCluster);
                    heatlog.append(logTime("【(金桥集群信息)数据库表 TD_CLUSTER_SPARING_CONF hdfs的文件url已经修改为："+souHdfsUrl+"】"));
                }
                if (!tarCluster.getHdfsPrefixPath().equals(tarHdfsUrl)){
                    tarCluster.setHdfsPrefixPath(tarHdfsUrl);
                    clusterSparingConfService.updateById(tarCluster);
                    heatlog.append(logTime("【(万国集群信息)数据库表 TD_CLUSTER_SPARING_CONF hdfs的文件url已经修改为："+tarHdfsUrl+"】"));
                }
                if (!uatCluster.getHdfsPrefixPath().equals(uatHdfsUrl)){
                    uatCluster.setHdfsPrefixPath(uatHdfsUrl);
                    clusterSparingConfService.updateById(uatCluster);
                    heatlog.append(logTime("【(UAT集群信息)数据库表 TD_CLUSTER_SPARING_CONF hdfs的文件url已经修改为："+uatHdfsUrl+"】"));
                }
                //添加进消息中心
                messageCenterService.save(getMessageCenter(ScanCommonConstants.ClusterName.YWC.value,logFilePath));
                return "Hdfs文件的url已经修改";
            }else {
                heatlog.append(logTime("【三个集群活跃节点与数据库中节点一致，无需修改】"));
                return "Hdfs文件的url无需修改";
            }
        }
        return "";
    }

    /**
     * 消息中心
     * @return
     */
    private MessageCenter getMessageCenter(String state,String logFilePath) {
        MessageCenter messageCenter = new MessageCenter();
        messageCenter.setMenu(ScanCommonConstants.ClusterName.GKYJDBD.value);
        messageCenter.setOperator("admin");
        messageCenter.setDataType(state);
        messageCenter.setMessageSubject(ScanCommonConstants.ClusterName.NNJDBD.value);
        messageCenter.setMessageWeight(ScanCommonConstants.ClusterName.ONE.value);
        messageCenter.setLogPath(logFilePath);
        return messageCenter;
    }

    /**
     * 源库和目标库比对信息
     * @return
     */
    private String getNameNodeStatus(String nameNodeUrl,String nameNodeIp,String nameNodePort,FileAppender heatlog) {
        String[] strings = nameNodeIp.split(",");
        for (String ip : strings) {
            try{
                String url = nameNodeUrl.replace("{namenode}", ip).replace("{namenodePort}", nameNodePort);
                String result = HttpUtil.get(url); // 发送GET请求并获取响应结果
                log.info("获取nameNode活跃节点的url请求："+url);
                heatlog.append(logTime("获取nameNode活跃节点的url请求："+url));
                if (result.contains("active")){
                    log.info(nameNodeIp+"--nameNode活跃节点："+ip);
                    heatlog.append(logTime(nameNodeIp+"--nameNode活跃节点："+ip));
                    return ip;
                }
            }catch (Exception e){
                log.error("获取nameNode活跃节点的url请求异常："+e);
                heatlog.append(logTime("获取nameNode活跃节点的url请求异常："+e));
            }
        }
        return "";
    }

    public String path(String sounName) {
        return logPath.replace("{date}", DateUtil.format(DateUtil.date(), "yyyyMMdd"))
                + sounName + ".txt";
    }

    /**
     * 日志加日期时间
     * @param str
     * @return
     */
    private String logTime(String str) {
        return "["+DateUtil.format(DateUtil.date(), "yyyy-MM-dd HH:mm:ss")+"]" +str;
    }

}
