package com.inspur.cloud.service.dataspace;

import com.inspur.cloud.configuration.AmbariConfig;
import com.inspur.cloud.configuration.ApiConfig;
import com.inspur.cloud.exception.HiveException;
import com.inspur.cloud.util.TConstants;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.sql.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.*;

@Service
public class HiveService {
    //log4j对象
    private static Logger log = LoggerFactory.getLogger(HiveService.class);
    @Autowired
    private ApiConfig apiConfig;
    @Autowired
    private KerberosService kerberosService;
    @Autowired
    AmbariConfig ambariConfig;
    @Autowired
    private ClusterConfigUtil clusterConfigUtil;

    /**
     * 获取Hive的连接
     *
     * @return Connection connection
     */
    public Connection getConnection(String database) {
        //Hbase默认配置类实例
        Configuration conf = new Configuration();
        Connection connection;
        try {
            Map map = clusterConfigUtil.getHiveConfig();
            StringBuilder sb = new StringBuilder();
            sb.append("jdbc:hive2://").append(map.get("hive.server")).append(":").append(map.get("hive.server2.thrift.port")).append("/").append(database);
            String url = sb.toString();
            System.out.println(url);
            final String finalUrl;
            String hadoopSecurity = (String) map.get("hadoop.security.authentication");
            final String pricipa;
            final String keyPath;
            if ("kerberos".equals(hadoopSecurity)) {
                //princal账号
                //        String principal_name = ambariConfig.getHivePrincipal();
                String principal_name = ambariConfig.getDataspacePrincipal();
                //keytab文件路径
                String keypath;
                //kerberos配置文件路径
                String krbConf;
                if (apiConfig.isDev()) {
                    //本地调试的keypath
                    //            keypath = Thread.currentThread().getContextClassLoader().getResource("tempfiles/hive.service.keytab").toString();
                    keypath = Thread.currentThread().getContextClassLoader().getResource("tempfiles/dataspace.keytab").toString();
                    keypath = keypath.substring(5);
                    krbConf = Thread.currentThread().getContextClassLoader().getResource("tempfiles/krb5.conf").toString();
                    krbConf = krbConf.substring(5);
                } else {
                    //服务器上管理员keypath
                    //            keypath = map.get("hive.server2.authentication.kerberos.keytab").toString();
                    keypath = ambariConfig.getDataspaceKeytab();
                    krbConf = TConstants.KRB5CONF_PATH;
                }
                log.info("keytab路径是:" + keypath);
                conf.set("hadoop.security.authentication", map.get("hadoop.security.authentication").toString());
                conf.set("hadoop.security.authorization", map.get("hadoop.security.authorization").toString());
                conf.set("hive.server2.authentication.kerberos.principal", principal_name);
                conf.set("hive.metastore.kerberos.principal", principal_name);
                log.info("krbConf目录:" + krbConf);
                Class.forName("org.apache.hive.jdbc.HiveDriver");
                System.clearProperty("java.security.krb5.conf");
                //初始化配置文件
                System.setProperty("java.security.krb5.conf", krbConf);
                String realm = clusterConfigUtil.getKerberosConfig().get("realm").toString();
                url = url + ";principal=hive/" + map.get("hive.server") + "@" + realm;
                log.info("hive2 url:[{}]", url);
                finalUrl=url;
                pricipa=principal_name;
                keyPath=keypath;
            }else{
                finalUrl=url;
                pricipa="";
                keyPath="";
            }
//            System.setProperty("HADOOP_USER_NAME","hive");
            final ExecutorService exec = Executors.newFixedThreadPool(1);
            Callable<Connection> call = () -> {
                if ("kerberos".equals(hadoopSecurity)) {
                    UserGroupInformation.setConfiguration(conf);
                    UserGroupInformation.loginUserFromKeytab(pricipa, keyPath);
                }
                DriverManager.setLoginTimeout(15);
                log.info("I am in Hive future------------------------", finalUrl);
                return DriverManager.getConnection(finalUrl, "dataspace", ambariConfig.getAmbariServerUser());
            };
            Future<Connection> future;
            try {
                future = exec.submit(call);
                connection = future.get(10000, TimeUnit.MILLISECONDS);
                log.info("成功获取hive的连接[{}]", connection);
                return connection;
            } catch (TimeoutException e) {
                log.info("create hive connection timeout", e);
                throw new HiveException("create hive connection timeout" + e.getMessage());
            } catch (Exception e) {
                log.error("create hive connection exception", e);
                throw new HiveException("create hive connection fail" + e.getMessage());
            }
        } catch (Exception e) {
            log.error("get Hive conn big exception {}",e);
            throw new HiveException("get Hive connection big exception");
        }

    }

    /**
     * 创建数据库
     *
     * @param database
     * @return
     */
    public void createDatabase(String database) {
        String sql = "create database if not exists " + database;
        log.info("创建数据库，sql语句:[{}]", sql);
        Statement statement = null;
        try (Connection connection = getConnection("default")) {
            statement = connection.createStatement();
            statement.setQueryTimeout(8);
            statement.execute(sql);
            log.info("创建数据库[{}]成功", database);
        } catch (Exception e) {
            log.error("创建数据库[{}]出错", database, e);
            throw new HiveException("创建数据库异常" + e.getMessage());
        } finally {
            try {
                statement.close();
            } catch (SQLException e) {
                e.printStackTrace();
            }
        }
    }

    /**
     * 查询所有数据库
     *
     * @return
     */
    public List<String> getDatabases() {
        String sql = "show databases";
        List<String> list = new ArrayList<>();
        Statement statement = null;
        try (Connection connection = getConnection("default")) {
            statement = connection.createStatement();
            ResultSet rs = statement.executeQuery(sql);
            while (rs.next()) {
//                System.out.println(rs.getString(1));
                list.add(rs.getString(1));
            }
            log.info("成功查询到数据库[{}]", list);
        } catch (SQLException e) {
            throw new HiveException("查询数据库异常" + e.getMessage());
        } finally {
            try {
                statement.close();
            } catch (SQLException e) {
                e.printStackTrace();
            }
        }
        return list;
    }

    /**
     * 查询指定数据库下的所有表
     *
     * @param database
     * @return
     */
    public List<String> getTables(String database) {
        String sql1 = "use " + database;
        String sql2 = "show tables";
        List<String> list = new ArrayList<>();
        Statement statement = null;
        try (Connection connection = getConnection(database)) {
            statement = connection.createStatement();
            statement.execute(sql1);
            ResultSet rs = statement.executeQuery(sql2);
            while (rs.next()) {
                list.add(rs.getString(1));
            }
            log.info("成功查询到数据库[{}]下的表有[{}]", database, list);
        } catch (SQLException e) {
            throw new HiveException("查询表异常" + e.getMessage());
        } finally {
            try {
                statement.close();
            } catch (SQLException e) {
                e.printStackTrace();
            }
        }
        return list;
    }

    /**
     * 查询某表中的列数,列名,列的数据类型
     *
     * @return 封装成一个Map：colNum=列数，colNames=列名，types=数据类型
     */
    public Map<String, Object> getColInfo(String database, String tableName) {
        String sql1 = "use " + database;
        String sql2 = "desc " + tableName;
        Map<String, Object> map = new HashMap<>();
        List<String> colNames = new ArrayList<>();
        List<String> types = new ArrayList<>();
        int i = 0;
        Statement statement = null;
        try (Connection connection = getConnection(database)) {
            statement = connection.createStatement();
            statement.execute(sql1);
            ResultSet rs = statement.executeQuery(sql2);
            while (rs.next()) {
                System.out.println(rs.getString(1) + "\t" + rs.getString(2));
                String colName = rs.getString(1);
                if (StringUtils.isNotEmpty(colName) && !colName.contains("#")) {
                    colNames.add(colName);
                    types.add(rs.getString(2));
                }
                i++;
            }
            map.put("colNum", i);
            map.put("colNames", colNames);
            map.put("types", types);

        } catch (SQLException e) {
            log.error("查询表[{}]的列信息失败", tableName);
            throw new HiveException("查询表的列信息异常" + e.getMessage());
        } finally {
            try {
                statement.close();
            } catch (SQLException e) {
                e.printStackTrace();
            }
        }
        return map;
    }

    /**
     * 删除数据库
     */
//    public void dropDatabase(String database) {
//        String sql = "drop database if exists " + database;
//        try (Connection connection = getConnection("default")) {
//            Statement statement = connection.createStatement();
//            statement.execute(sql);
//            log.info("删除数据库[{}]成功", database);
//        } catch (SQLException e) {
//            log.error("删除数据库[{}]出错", database, e);
//            throw new HiveException("删除数据库异常" + e.getMessage());
//        }
//    }

    /**
     * 强制删除数据库(若库不为空，则连同下面的表一同删除)
     */
    public void dropDatabaseForce(String database) {
        String sql = "drop database " + database + " cascade";
        Statement statement = null;
        try (Connection connection = getConnection("default")) {
            statement = connection.createStatement();
            statement.execute(sql);
            log.info("强制删除数据库[{}]成功", database);
        } catch (SQLException e) {
            log.error("强制删除数据库[{}]出错", database, e);
            throw new HiveException("强制删库异常" + e.getMessage());
        } finally {
            try {
                statement.close();
            } catch (SQLException e) {
                e.printStackTrace();
            }
        }
    }

    /**
     * 删除某个数据库下的表
     *
     * @param database
     * @param tableName
     * @return
     */
    public void dropTable(String database, String tableName) {
        String sql1 = "use " + database;
        String sql2 = "truncate table " + tableName;
        String sql3 = "drop table if exists " + tableName;
        Statement statement = null;
        try (Connection connection = getConnection(database)) {
            statement = connection.createStatement();
            statement.execute(sql1);
            statement.execute(sql2);
            statement.execute(sql3);
            log.info("删除数据库[{}]中的表[{}]成功", database, tableName);
        } catch (SQLException e) {
            log.error("删除表[{}]失败", tableName);
            throw new HiveException("删除表异常" + e.getMessage());
        } finally {
            try {
                statement.close();
            } catch (SQLException e) {
                e.printStackTrace();
            }
        }
    }

//    /**
//     * 删除指定数据库中的指定表的指定列
//     * @param database
//     * @param tableName
//     * @param colName
//     * @return
//     */
//    public static boolean dropCol(String database, String tableName,String colName,String username){
//        connection = getConnection(username);
//        boolean flag = false;
//        String sql1 = "use "+database;
//        String sql2 = "alter table "+tableName+" drop column "+colName;
//        try {
//            Statement statement = connection.createStatement();
//            statement.execute(sql1);
//            statement.execute(sql2);
//            flag = true;
//            log.info("删除数据库[{}]中的表[{}]中的列[{}]成功",database,tableName,colName);
//        } catch (SQLException e) {
//            log.error("删除数据库[{}]中的表[{}]中的列[{}]失败",database,tableName,colName);
//            e.printStackTrace();
//        }
//        return flag;
//    }
}
