package com.inspur.cloud.service.dataspace;

import com.alibaba.fastjson.JSONObject;
import com.inspur.cloud.dao.ambari.ComponentDesiredStateDao;
import com.inspur.cloud.entity.ambari.*;
import com.inspur.cloud.service.ambari.*;
import com.inspur.cloud.util.TConstants;
import org.apache.hadoop.yarn.api.records.timelineservice.ClusterEntity;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

@Component
public class ClusterConfigUtil {
    @Autowired
    private ClusterConfigService clusterConfigService;
    @Autowired
    private ComponentStateService componentStateService;
    @Autowired
    private ComponentDesiredStateService componentDesiredStateService;
    @Autowired
    private HostsService hostsService;
    @Autowired
    private ClustersService clustersService;

    private Logger logger = LoggerFactory.getLogger(this.getClass());

    public Map getYarnConfig() {
        Map retMap = new HashMap<>();
        List<String> property = new ArrayList<>();
        property.add("yarn.resourcemanager.webapp.address");
        Map<String, String> yarnSite = getConfigValue("yarn-site", property);
        retMap.putAll(yarnSite);
        property.clear();
        ClusterConfigEntity clusterConfigEntity = clusterConfigService.findByTypeNameAndSelected("capacity-scheduler", 1);
        retMap.put("properties", clusterConfigEntity.getConfigData());
        return retMap;
    }

    public Map getHdfsConfig() {
        Map retMap = new HashMap<>();
        List<String> property = new ArrayList<>();
        property.add("dfs.nameservices");
        Map<String, String> hdfsSite = getConfigValue("hdfs-site", property);
        String nameServices = "";
        if (hdfsSite.size() != 0) {
            nameServices = hdfsSite.get("dfs.nameservices");
            retMap.putAll(hdfsSite);
        }
        if (nameServices != null && !nameServices.equals("")) {
            property.clear();
            hdfsSite.clear();
            property.add("dfs.ha.namenodes." + nameServices);
            hdfsSite = getConfigValue("hdfs-site", property);
            retMap.putAll(hdfsSite);
            String[] namenodes = hdfsSite.get("dfs.ha.namenodes." + nameServices).split(",");
            property.clear();
            hdfsSite.clear();
            for (String namenode : namenodes) {
                property.add("dfs.namenode.rpc-address." + nameServices + "." + namenode);
            }
            hdfsSite = getConfigValue("hdfs-site", property);
            retMap.putAll(hdfsSite);
            retMap.put("dfs.client.failover.proxy.provider." + nameServices, "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
        }
        property.clear();
        property.add("hdfs_user_keytab");
        property.add("hdfs_principal_name");
        property.add("hdfs_user");
        Map<String, String> hadoopEnv = getConfigValue("hadoop-env", property);
        retMap.putAll(hadoopEnv);
        property.clear();
        property.add("hadoop.security.authentication");
        property.add("hadoop.security.authorization");
        property.add("fs.defaultFS");
        Map<String, String> coreSite = getConfigValue("core-site", property);
        retMap.putAll(coreSite);
        retMap.put("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        return retMap;
    }

    public Map getHbaseConfig() {
        Map retMap = new HashMap();
        List<String> property = new ArrayList<>();
        property.add("hbase_principal_name");
        property.add("hbase_user_keytab");
        Map<String, String> hbaseEnv = getConfigValue("hbase-env", property);
        retMap.putAll(hbaseEnv);
        property.clear();
        property.add("hbase.superuser");
        property.add("hbase.security.authorization");
        property.add("hbase.zookeeper.property.clientPort");
        property.add("hbase.master.kerberos.principal");
        property.add("hbase.regionserver.kerberos.principal");
        property.add("zookeeper.znode.parent");
        property.add("hbase.security.authentication");
        property.add("hbase.zookeeper.quorum");
        // add begion by liudiansheng May 16, 2017 7:27:57 PM for 增加获取region大小的属性
        property.add("hbase.hregion.max.filesize");
        Map<String, String> hbaseSite = getConfigValue("hbase-site", property);
        retMap.putAll(hbaseSite);
        property.clear();
        property.add("hadoop.security.authentication");
        Map<String, String> coreSite = getConfigValue("core-site", property);
        retMap.putAll(coreSite);
        return retMap;
    }

    public Map getHiveConfig() {
        Map retMap = new HashMap();
        List<String> property = new ArrayList<>();
        property.add("hadoop.security.authentication");
        property.add("hadoop.security.authorization");
        Map<String, String> coreSite = getConfigValue("core-site", property);
        retMap.putAll(coreSite);
        property.clear();
        property.add("hive.server2.thrift.port");
        property.add("hive.server2.authentication.kerberos.keytab");
        property.add("hive.metastore.warehouse.dir");
        property.add("hive.metastore.warehouse.external.dir");
        Map<String, String> hiveSite = getConfigValue("hive-site", property);
        retMap.putAll(hiveSite);
//        Integer componentStateEntityHost=componentDesiredStateService.findByComponentName("HIVE_SERVER").getHostId();
//        HostsEntity hostsEntity=hostsService.findByhostId(componentStateEntityHost);
//        logger.info("hostEntity:{}  {}",hostsEntity.getHostName(),hostsEntity.getHostId());
        HostsEntity hostsEntity=componentDesiredStateService.findMaster("HIVE_SERVER");
        String hiveServer = hostsEntity.getHostName();
        retMap.put("hive.server", hiveServer);
        return retMap;
    }

    public Map getKafkaConfig() {
        List<ComponentStateEntity> list1 = componentStateService.findByComponentName("KAFKA_BROKER");
        List<String> list2 = new ArrayList<>();
        for (ComponentStateEntity entity : list1) {
            String hostName = hostsService.findByhostId(entity.getHostId()).getHostName();
            list2.add(hostName);
        }
        Map retMap;
        List<String> property = new ArrayList<>();
        property.add("zookeeper.connect");
        property.add("zookeeper.connection.timeout.ms");
        property.add("zookeeper.session.timeout.ms");
        property.add("security.inter.broker.protocol");
        property.add("listeners");
        property.add("port");
        retMap = getConfigValue("kafka-broker", property);
        retMap.put(TConstants.KAFKA_BROKER_LIST, list2);
        return retMap;
    }

    public Map getRangerConfig() {
        //取到装Ranger的主机，就一个
        List<ComponentStateEntity> list = componentStateService.findByComponentName("RANGER_ADMIN");
        String hostName = hostsService.findByhostId(list.get(0).getHostId()).getHostName();
        List<String> property = new ArrayList<>();
        property.add("ranger.service.http.port");
        Map retMap = getConfigValue("ranger-admin-site", property);
        retMap.put("host_name", hostName);
        return retMap;
    }

    public List<String> getHostsConfig() {
        List<HostsEntity> list = hostsService.findAll();
        List<String> hostnames = new ArrayList<>();
        for (HostsEntity e : list) {
            hostnames.add(e.getHostName());
        }
        return hostnames;
    }

    public Map getKerberosConfig() {
        List<String> property = new ArrayList<>();
        property.add("admin_server_host");
        property.add("kdc_hosts");
        property.add("realm");
        Map retMap = getConfigValue("kerberos-env", property);
        return retMap;
    }

    /**
     * 得到集群主节点的主机名
     * @return
     */
    public String getAmbariServerHost(){
        ComponentDesiredStateEntity componentDesiredStateEntity = componentDesiredStateService.findByComponentName("DATASPACE_SERVER");
        String ambariServerHost = hostsService.findByhostId(componentDesiredStateEntity.getHostId()).getHostName();
        return  ambariServerHost;
    }

    /**
     * 判断集群是否开启kerberos，开启返回"kerberos",未开启返回"NONE"
     * @return
     */
    public String getSecurityType(){
        List<ClustersEntity> list = clustersService.findAll();
        String securityType = list.get(0).getSecurityType();
        return  securityType;
    }

    /**
     * 判断集群是否开启kerberos，开启true，未开启false
     * @return
     */
    public boolean checkKerberized() {
        return getSecurityType().equalsIgnoreCase("kerberos")?true:false;
    }

    public String getRangerSyncPassword(){
        String key="rangerusersync_user_password";
        ClusterConfigEntity clusterConfigEntity = clusterConfigService.findByTypeNameAndSelected("ranger-env", 1);
        JSONObject configdata = JSONObject.parseObject(clusterConfigEntity.getConfigData());
        logger.info("ranger-env {}",configdata);
        String password = configdata.getString(key);
        return password;
    }

    private Map<String, String> getConfigValue(String typeName, List<String> property) {
        Map retMap = new HashMap();
        ClusterConfigEntity clusterConfigEntity = clusterConfigService.findByTypeNameAndSelected(typeName, 1);
        JSONObject configdata = JSONObject.parseObject(clusterConfigEntity.getConfigData());
        for (String p : property) {
            String conf = configdata.getString(p);
            if (conf != null) {
                retMap.put(p, conf);
            }
        }
        return retMap;
    }
}
