package com.hub.realtime.common.utils.hadoop;

import com.hub.realtime.common.core.domain.model.ClusterInfo;
import com.hub.realtime.common.utils.RequireUtil;
import com.hub.realtime.common.utils.StringUtils;
import com.streamxhub.streamx.common.conf.ConfigConst;
import com.streamxhub.streamx.common.util.*;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.service.Service;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.conf.HAUtil;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.util.RMHAUtils;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import scala.Tuple2;
import scala.collection.Iterator;

import javax.security.auth.DestroyFailedException;
import javax.security.auth.kerberos.KerberosTicket;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Method;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.security.PrivilegedAction;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;

import static com.hub.realtime.common.constant.Constants.HADOOP_USER_NAME;

/**
 * 多个hadoop集群工具类
 */
@Slf4j
public class HadoopUtil {

    private static ConcurrentHashMap<String, FileSystem> reusableHdfs = new ConcurrentHashMap<>();
    private static ConcurrentHashMap<String, UserGroupInformation> ugi = new ConcurrentHashMap<>();
    private static ConcurrentHashMap<String, YarnClient> reusableYarnClient = new ConcurrentHashMap<>();
    private static ConcurrentHashMap<String, Configuration> reusableConf = new ConcurrentHashMap<>();
    private static ConcurrentHashMap<String, String> rmHttpURL = new ConcurrentHashMap<>();
    private static final ConcurrentHashMap<String, Configuration> configurationCache = new ConcurrentHashMap<>();


    /**
     * 清空缓存数据
     *
     * @param clusterInfo
     */
    public static void clearClusterInfo(ClusterInfo clusterInfo) {
        try {
            reusableHdfs.remove(clusterInfo.getHadoopConfDir());
            ugi.remove(clusterInfo.getHadoopConfDir());
            reusableYarnClient.remove(clusterInfo.getHadoopConfDir());
            reusableConf.remove(clusterInfo.getHadoopConfDir());
            rmHttpURL.remove(clusterInfo.getHadoopConfDir());
            configurationCache.remove(clusterInfo.getHadoopConfDir());
        } catch (Exception e) {

        }
    }

    /**
     * 获取kerberos配置
     *
     * @return
     */
    public static Map<String, String> kerberosConf(ClusterInfo clusterInfo) {
        //xxxx/xxxx/kerberos.yml
        Map<String, String> resMap = null;
        if (StringUtils.isNotEmpty(clusterInfo.getKerberosConfDir())) {
            File file = new File(clusterInfo.getKerberosConfDir());
            if (file.exists() && file.isFile()) {
                resMap = new HashMap<>();
                scala.collection.immutable.Map<String, String> stringStringMap = PropertiesUtils.fromYamlFile(file.getAbsolutePath());
                Iterator<Tuple2<String, String>> iterator = stringStringMap.iterator();
                while (iterator.hasNext()) {
                    Tuple2<String, String> next = iterator.next();
                    resMap.put(next._1, next._2);
                }
            }
        }
        return resMap;
    }

    /**
     * 获取hadoop配置
     *
     * @return
     */
    public static Configuration hadoopConf(ClusterInfo clusterInfo) {
        String configDir = clusterInfo.getHadoopConfDir();
        String kerberosDir = clusterInfo.getKerberosConfDir();
        KerberosTicket tgt = null;
        if (reusableConf.get(configDir) == null) {
            Configuration configuration = initHadoopConf(clusterInfo);
            reusableConf.put(configDir, configuration);
            if (StringUtils.isNotEmpty(kerberosDir)) {
                if (kerberosConf(clusterInfo) != null) {
                    String enableString = kerberosConf(clusterInfo).getOrDefault(ConfigConst.KEY_SECURITY_KERBEROS_ENABLE(), "false");
                    try {
                        boolean kerberosEnable = Boolean.parseBoolean(enableString);
                        if (kerberosEnable) {
                            ugi.put(configDir, kerberosLogin(clusterInfo, configuration));
                            reLoginKerberos();
                        } else {
                            ugi.put(configDir, UserGroupInformation.createRemoteUser(hadoopUserName()));
                        }
                    } catch (Exception ex) {
                        log.error(ex.getMessage());
                        ex.printStackTrace();
                    }
                }
            }
        }
        return reusableConf.get(configDir);
    }

    private static Configuration initHadoopConf(ClusterInfo clusterInfo) {
        String configDir = clusterInfo.getHadoopConfDir();
        Configuration conf = getConfigurationFromHadoopConfDir(clusterInfo);
        ClassLoaderUtils.loadResource(configDir);

        if (StringUtils.isBlank(conf.get("hadoop.tmp.dir"))) {
            conf.set("hadoop.tmp.dir", "/tmp");
        }
        if (StringUtils.isBlank(conf.get("hbase.fs.tmp.dir"))) {
            conf.set("hbase.fs.tmp.dir", "/tmp");
        }
        conf.set("yarn.timeline-service.enabled", "false");
        conf.set("fs.hdfs.impl", DistributedFileSystem.class.getName());
        conf.set("fs.file.impl", LocalFileSystem.class.getName());
        conf.set("fs.hdfs.impl.disable.cache", "true");
        return conf;
    }

    private static UserGroupInformation kerberosLogin(ClusterInfo clusterInfo, Configuration configuration) {
        log.info("kerberos login starting....");
        String principal = kerberosConf(clusterInfo).getOrDefault(ConfigConst.KEY_SECURITY_KERBEROS_PRINCIPAL(), "").trim();
        String keytab = kerberosConf(clusterInfo).getOrDefault(ConfigConst.KEY_SECURITY_KERBEROS_KEYTAB(), "").trim();
        RequireUtil.require(StringUtils.isNotEmpty(principal)
                && StringUtils.isNotEmpty(keytab), ConfigConst.KEY_SECURITY_KERBEROS_PRINCIPAL().concat(" 和 ")
                .concat(ConfigConst.KEY_SECURITY_KERBEROS_KEYTAB())
                .concat("不能为空"));
        String krb5 = kerberosConf(clusterInfo).getOrDefault(
                ConfigConst.KEY_SECURITY_KERBEROS_KRB5_CONF(),
                kerberosConf(clusterInfo).getOrDefault(ConfigConst.KEY_JAVA_SECURITY_KRB5_CONF(), "")
        ).trim();
        if (StringUtils.isNotEmpty(krb5)) {
            System.setProperty("java.security.krb5.conf", krb5);
            System.setProperty("java.security.krb5.conf.path", krb5);
        }
        System.setProperty("sun.security.spnego.debug", "true");
        System.setProperty("sun.security.krb5.debug", "true");
        configuration.set(ConfigConst.KEY_HADOOP_SECURITY_AUTHENTICATION(), ConfigConst.KEY_KERBEROS());
        try {
            UserGroupInformation.setConfiguration(configuration);
            UserGroupInformation ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keytab);
            log.info("kerberos authentication successful");
            return ugi;
        } catch (Exception ex) {
            log.error("kerberos login failed ".concat(ExceptionUtils.stringifyException(ex)));
            throw new IllegalArgumentException(ex);
        }


    }

    private static Long getRefreshTime() {
        try {
            UserGroupInformation user = UserGroupInformation.getLoginUser();
            Method method = UserGroupInformation.class.getDeclaredMethod("getTGT");
            method.setAccessible(true);
            KerberosTicket tgt = (KerberosTicket) method.invoke(user);
            Optional<KerberosTicket> tgt1 = Optional.ofNullable(tgt);
            if (tgt1.isPresent()) {
                tgt = tgt1.get();
                Long start = tgt.getStartTime().getTime();
                Long end = tgt.getEndTime().getTime();
                return (long) ((end - start) * 0.90f);
            } else {
                return 0L;
            }
        } catch (Exception ex) {
            log.error("获取hadoop刷新时间出错：" + ex.getMessage());
            return 0L;
        }
    }

    private static void reLoginKerberos() {
        Timer timer = new Timer();
        Long refreshTime = getRefreshTime();
        timer.schedule(new TimerTask() {
            @Override
            public void run() {
                try {
                    UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab();
                    log.info("Check Kerberos Tgt And reLogin From Keytab Finish:refresh time: ".concat(DateUtils.fullFormat()));
                } catch (IOException e) {
                    e.printStackTrace();
                    log.error(e.getMessage());
                }
            }
        }, refreshTime, refreshTime);

    }


    private static Configuration getConfigurationFromHadoopConfDir(ClusterInfo clusterInfo) {
        String configDir = clusterInfo.getHadoopConfDir();
        if (!configurationCache.contains(configDir)) {
            FileUtils.exists(configDir);
            File hadoopConfDir = new File(configDir);
            List<String> confName = new ArrayList<>();
            confName.add("core-site.xml");
            confName.add("hdfs-site.xml");
            confName.add("yarn-site.xml");
            confName.add("mapred-site.xml");
            List<File> files = Arrays.stream(Objects.requireNonNull(hadoopConfDir.listFiles())).filter(x -> x.isFile() && confName.contains(x.getName())).collect(Collectors.toList());
            Configuration conf = new Configuration();
            if (files.size() > 0) {
                files.forEach(f -> {
                    conf.addResource(new Path(f.getAbsolutePath()));
                });
            }
            configurationCache.put(configDir, conf);
        }
        return configurationCache.get(configDir);
    }


    private static void closeHadoop(KerberosTicket tgt, ClusterInfo clusterInfo) {
        String confDir = clusterInfo.getHadoopConfDir();
        if (tgt != null && !tgt.isDestroyed()) {
            try {
                tgt.destroy();
            } catch (DestroyFailedException e) {
                log.error("关闭hadoop出错：" + e.getMessage());
                e.printStackTrace();
                throw new IllegalArgumentException(e);
            }
        }
        if (reusableYarnClient.get(confDir) != null) {
            try {
                reusableYarnClient.get(confDir).close();
            } catch (IOException e) {
                log.error("关闭hadoop出错：" + e.getMessage());
                e.printStackTrace();
                throw new IllegalArgumentException(e);
            }
            reusableYarnClient.remove(confDir);
        }
        if (reusableHdfs.get(confDir) != null) {
            try {
                reusableHdfs.get(confDir).close();
            } catch (IOException e) {
                log.error("关闭hadoop出错：" + e.getMessage());
                e.printStackTrace();
                throw new IllegalArgumentException(e);
            }
            reusableHdfs.remove(confDir);
        }

    }


    /**
     * 获取当前hadoop的用户
     */
    public static String hadoopUserName() {
        return SystemPropertyUtils.get(HADOOP_USER_NAME, "hdfs");
    }

    /**
     * 获取hdfs文件系统
     *
     * @return
     */
    public static FileSystem hdfs(ClusterInfo clusterInfo) {
        String confDir = clusterInfo.getHadoopConfDir();
        Configuration configuration = hadoopConf(clusterInfo);
        if (reusableHdfs.get(confDir) == null) {
            FileSystem dfs = null;
            if (ugi != null && !ugi.isEmpty()) {
                dfs = ugi.get(confDir).doAs(new PrivilegedAction<FileSystem>() {
                    @Override
                    public FileSystem run() {
                        try {
                            return FileSystem.get(configuration);
                        } catch (IOException e) {
                            throw new IllegalArgumentException("访问HDFS文件出错：" + e.getMessage());
                        }
                    }
                });
            } else {
                try {
                    dfs = FileSystem.get(configuration);
                } catch (IOException e) {
                    throw new IllegalArgumentException("访问HDFS文件出错：" + e.getMessage());
                }
            }
            reusableHdfs.put(confDir, dfs);
        }
        return reusableHdfs.get(confDir);
    }

    /**
     * 获取yarn客户端
     *
     * @return
     */
    public static YarnClient yarnClient(ClusterInfo clusterInfo) {
        String confDir = clusterInfo.getHadoopConfDir();
        YarnClient yarnClient = reusableYarnClient.get(confDir);
        if (yarnClient == null || !yarnClient.isInState(Service.STATE.STARTED)) {
            yarnClient = YarnClient.createYarnClient();
            YarnConfiguration yarnConfiguration = new YarnConfiguration(hadoopConf(clusterInfo));
            yarnClient.init(yarnConfiguration);
            yarnClient.start();
            reusableYarnClient.put(confDir, yarnClient);
        }
        return reusableYarnClient.get(confDir);
    }

    /**
     * 获取yarn的高可用地址
     *
     * @param getLatest
     * @return
     */
    public static String getRMWebAppURL(ClusterInfo clusterInfo, boolean getLatest) {
        String confDir = clusterInfo.getHadoopConfDir();
        String rmUrl = rmHttpURL.get(confDir);
        if (StringUtils.isEmpty(rmUrl) || getLatest) {
            synchronized (HadoopUtil.class) {
                if (StringUtils.isEmpty(rmUrl) || getLatest) {
                    Configuration conf = hadoopConf(clusterInfo);
                    boolean useHttps = YarnConfiguration.useHttps(conf);
                    String addressPrefix;
                    String defaultPort;
                    String protocol;
                    if (useHttps) {
                        addressPrefix = YarnConfiguration.RM_WEBAPP_HTTPS_ADDRESS;
                        defaultPort = "8090";
                        protocol = "https://";
                    } else {
                        addressPrefix = YarnConfiguration.RM_WEBAPP_ADDRESS;
                        defaultPort = "8088";
                        protocol = "http://";
                    }
                    String name;
                    if (!HAUtil.isHAEnabled(conf)) {
                        name = addressPrefix;
                    } else {
                        YarnConfiguration yarnConf = new YarnConfiguration(conf);
                        String activeRMId;
                        Optional<String> rmhaId = Optional.ofNullable(RMHAUtils.findActiveRMHAId(yarnConf));
                        if (rmhaId.isPresent()) {
                            log.info("findActiveRMHAId successful");
                            activeRMId = rmhaId.get();
                        } else {
                            log.warn("findActiveRMHAId is null,config yarn.acl.enable:".concat(yarnConf.get("yarn.acl.enable"))
                                    .concat(",now http try it."));
                            Map<String, String> idUrlMap = new HashMap<>();
                            Collection<String> rmhaIds = HAUtil.getRMHAIds(conf);
                            rmhaIds.forEach(id -> {
                                String s = conf.get(HAUtil.addSuffix(addressPrefix, id));
                                String address;
                                if (StringUtils.isNotEmpty(s)) {
                                    address = s;
                                } else {
                                    String hostName = conf.get(HAUtil.addSuffix("yarn.resourcemanager.hostname", id));
                                    address = hostName.concat(":".concat(defaultPort));
                                }
                                idUrlMap.put(protocol.concat(address), id);
                            });
                            String rmId = "";
                            int rpcTimeoutForChecks = yarnConf.getInt(CommonConfigurationKeys.HA_FC_CLI_CHECK_TIMEOUT_KEY,
                                    CommonConfigurationKeys.HA_FC_CLI_CHECK_TIMEOUT_DEFAULT);
                            for (Map.Entry<String, String> idu : idUrlMap.entrySet()) {
                                String activeUrl = httpTestYarnRMUrl(idu.getKey(), rpcTimeoutForChecks);
                                if (StringUtils.isNotEmpty(activeUrl)) {
                                    rmId = idUrlMap.get(activeUrl);
                                    break;
                                }
                            }
                            activeRMId = rmId;
                        }
                        RequireUtil.require(StringUtils.isNotEmpty(activeRMId), "HadoopUtil.getRMWebAppURL: can not found yarn active node");
                        log.info("current activeRMHAId: ".concat(activeRMId));
                        String appActiveRMKey = HAUtil.addSuffix(addressPrefix, activeRMId);
                        String hostnameActiveRMKey = HAUtil.addSuffix(YarnConfiguration.RM_HOSTNAME, activeRMId);
                        if (HAUtil.getConfValueForRMInstance(appActiveRMKey, yarnConf) == null
                                && null != HAUtil.getConfValueForRMInstance(hostnameActiveRMKey, yarnConf)) {
                            name = hostnameActiveRMKey;
                        } else {
                            name = appActiveRMKey;
                        }
                        log.info("Find rm web address by : ".concat(name));
                    }
                    InetSocketAddress inetSocketAddress = conf.getSocketAddr(name, "0.0.0.0:".concat(defaultPort), Integer.parseInt(defaultPort));
                    InetSocketAddress address = NetUtils.getConnectAddress(inetSocketAddress);
                    StringBuilder buffer = new StringBuilder(protocol);
                    InetAddress resolved = address.getAddress();
                    if (resolved != null && !resolved.isAnyLocalAddress() && !resolved.isLoopbackAddress()) {
                        buffer.append(address.getHostName());
                    } else {
                        try {
                            String value = InetAddress.getLocalHost().getCanonicalHostName();
                            buffer.append(value);
                        } catch (Exception ex) {
                            buffer.append(address.getHostName());
                        }
                    }

                    String url = buffer
                            .append(":")
                            .append(address.getPort())
                            .toString();
                    rmHttpURL.put(confDir, url);
                    log.info("yarn resourceManager webapp url:".concat(url));
                }
            }
        }
        return rmHttpURL.get(confDir);
    }

    private static String httpTestYarnRMUrl(String url, int timeout) {
        CloseableHttpClient httpClient = HttpClients.createDefault();
        HttpClientContext context = HttpClientContext.create();

        HttpGet httpGet = new HttpGet(url);
        RequestConfig requestConfig = RequestConfig
                .custom()
                .setSocketTimeout(timeout)
                .setConnectTimeout(timeout)
                .build();
        httpGet.setConfig(requestConfig);
        try {
            httpClient.execute(httpGet, context);
            return context.getTargetHost().toString();
        } catch (Exception ex) {
            return "";
        }
    }


    /**
     * yarnId转换
     *
     * @param appId
     * @return
     */
    public static ApplicationId toApplicationId(String appId) {
        RequireUtil.require(StringUtils.isNotEmpty(appId), "HadoopUtil.toApplicationId: applicationId muse not be null");
        String[] timestampAndId = appId.split("_");
        return ApplicationId.newInstance(Long.parseLong(timestampAndId[1]), Integer.parseInt(timestampAndId[timestampAndId.length - 1]));
    }

    public static String getYarnAppTrackingUrl(ApplicationId applicationId, ClusterInfo clusterInfo) {
        try {
            return yarnClient(clusterInfo).getApplicationReport(applicationId).getTrackingUrl();
        } catch (Exception e) {
            log.error("获取YarnAppTrackingUrl失败：".concat(e.getMessage()));
            e.printStackTrace();
            throw new IllegalArgumentException(e);
        }
    }

    @SneakyThrows
    public String downloadJar(ClusterInfo clusterInfo, String jarOnHdfs) {
        File tempDir = FileUtils.createTempDir();
        Configuration configuration = hadoopConf(clusterInfo);
        FileSystem fs = FileSystem.get(configuration);
        Path sourcePath = fs.makeQualified(new Path(jarOnHdfs));
        if (!fs.exists(sourcePath)) {
            throw new IOException("jar file: ".concat(jarOnHdfs) + "doesn't exist.");
        }
        Path destPath = new Path(tempDir.getAbsolutePath() + "/" + sourcePath.getName());
        fs.copyToLocalFile(sourcePath, destPath);
        return new File(destPath.toString()).getAbsolutePath();

    }

}
