package com.kingsoft.dc.khaos.module.spark.util;

import com.alibaba.druid.pool.DruidDataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.sql.DataSource;
import java.io.Serializable;
import java.sql.*;
import java.util.Properties;

/**
 * create by gs 20-3-30
 */
public class HiveDataSourceUtil implements Serializable {
    private static DruidDataSource hiveDataSource = new DruidDataSource();
    public static Connection conn = null;
    private static final Logger log = LoggerFactory.getLogger(HiveDataSourceUtil.class);


    public static DruidDataSource getHiveDataSource() {
        if (hiveDataSource.isInited()) {
            return hiveDataSource;
        }

        try {

//            hive_jdbc_url=jdbc:hive2://192.168.0.22:10000/default
//            hive.dbname=xxxxx
//            hive_jdbc_username=root
//            hive_jdbc_password=123456
//
//            #配置初始化大小、最小、最大
//            hive_initialSize=20
//            hive_minIdle=20
//            hive_maxActive=500
//
//            #配置获取连接等待超时的时间
//            hive_maxWait=60000

            Properties dsProp = new Properties();

            //基本属性 url、user、password
            hiveDataSource.setUrl(dsProp.getProperty("hive_jdbc_url"));
            hiveDataSource.setUsername(dsProp.getProperty("hive_jdbc_username", ""));
            hiveDataSource.setPassword(dsProp.getProperty("hive_jdbc_password"));

            //配置初始化大小、最小、最大
            hiveDataSource.setInitialSize(Integer.parseInt(dsProp.getProperty("hive_initialSize")));
            hiveDataSource.setMinIdle(Integer.parseInt(dsProp.getProperty("hive_minIdle")));
            hiveDataSource.setMaxActive(Integer.parseInt(dsProp.getProperty("hive_maxActive")));

            //配置获取连接等待超时的时间
            hiveDataSource.setMaxWait(Integer.parseInt(dsProp.getProperty("hive_maxWait")));

            //配置间隔多久才进行一次检测，检测需要关闭的空闲连接，单位是毫秒
            hiveDataSource.setTimeBetweenEvictionRunsMillis(60000);

            //配置一个连接在池中最小生存的时间，单位是毫秒
            hiveDataSource.setMinEvictableIdleTimeMillis(300000);

//            hiveDataSource.setValidationQuery("select * from xxxx");
            hiveDataSource.setTestWhileIdle(false);
//            hiveDataSource.setTestOnBorrow(false);
//            hiveDataSource.setTestOnReturn(false);

            //打开PSCache，并且指定每个连接上PSCache的大小
            hiveDataSource.setPoolPreparedStatements(true);
            hiveDataSource.setMaxPoolPreparedStatementPerConnectionSize(20);

            //配置监控统计拦截的filters
//            hiveDataSource.setFilters("stat");

            hiveDataSource.init();
        } catch (SQLException e) {
            e.printStackTrace();
            closeHiveDataSource();
        }
        return hiveDataSource;
    }

    public static DruidDataSource getHiveDataSource(Properties dsProp) {
        if (hiveDataSource.isInited()) {
            return hiveDataSource;
        }

        try {
            //基本属性 url、user、password
            hiveDataSource.setDriverClassName(dsProp.getProperty("hive.driver"));
            hiveDataSource.setUrl(dsProp.getProperty("hive.jdbc.url"));
            hiveDataSource.setUsername(dsProp.getProperty("hive.jdbc.username"));
            hiveDataSource.setPassword(dsProp.getProperty("hive.jdbc.password"));

            //配置初始化大小、最小、最大
            hiveDataSource.setInitialSize(Integer.parseInt(dsProp.getProperty("hive.initialSize")));
            hiveDataSource.setMinIdle(Integer.parseInt(dsProp.getProperty("hive.minIdle")));
            hiveDataSource.setMaxActive(Integer.parseInt(dsProp.getProperty("hive.maxActive")));
            //配置获取连接等待超时的时间 单位毫秒
            hiveDataSource.setMaxWait(Integer.parseInt(dsProp.getProperty("hive.maxWait")));
            //配置间隔多久才进行一次检测，检测需要关闭的空闲连接，单位是毫秒
            hiveDataSource.setTimeBetweenEvictionRunsMillis(60000);
            //配置一个连接在池中最小生存的时间，单位是毫秒
            hiveDataSource.setMinEvictableIdleTimeMillis(300000);

//            hiveDataSource.setValidationQuery("select * from xxxx");
            hiveDataSource.setTestWhileIdle(false);
//            hiveDataSource.setTestOnBorrow(false);
//            hiveDataSource.setTestOnReturn(false);

            //打开PSCache，并且指定每个连接上PSCache的大小
            hiveDataSource.setPoolPreparedStatements(true);
            hiveDataSource.setMaxPoolPreparedStatementPerConnectionSize(20);

            //配置监控统计拦截的filters
//            hiveDataSource.setFilters("stat");

            hiveDataSource.init();
        } catch (SQLException e) {
            e.printStackTrace();
            closeHiveDataSource();
        }
        return hiveDataSource;
    }

    /**
     * @Description:关闭Hive连接池
     */
    public static void closeHiveDataSource() {
        if (hiveDataSource != null) {
            hiveDataSource.close();
        }
    }

    /**
     * @return
     * @Description:获取Hive连接
     */
    public static Connection getHiveConn() {
        try {
            hiveDataSource = getHiveDataSource();
            conn = hiveDataSource.getConnection();
        } catch (SQLException e) {
            log.error("--" + e + ":获取Hive连接失败！");
        }
        return conn;
    }

    /**
     * @Description:关闭Hive数据连接
     */
    public static void closeConn() {
        try {
            if (conn != null) {
                conn.close();
            }
        } catch (SQLException e) {
            log.error("--" + e + ":关闭Hive-conn连接失败！");
        }
    }


    public static void main(String[] args) throws Exception {
//        Configuration conf = new Configuration();
//        System.setProperty("java.security.krb5.conf", "/home/gaosong/krb5.conf");
//        String HIVE_PRINCIPAL = "hive/offline-master2@JHHADOOP.COM";
//        String HIVE_KEYTABPATH = "/home/gaosong/hive.service.keytab";
//        conf.set("hadoop.security.authentication", "kerberos");
//        UserGroupInformation.setConfiguration(conf);
//        UserGroupInformation.loginUserFromKeytab(HIVE_PRINCIPAL, HIVE_KEYTABPATH);

        System.setProperty("HADOOP_USER_NAME","hive");
        Properties dsProp = new Properties();
        dsProp.put("hive.jdbc.url", "jdbc:hive2://10.69.67.36:10000");
//        dsProp.put("hive.jdbc.url", "jdbc:hive2://111.229.35.132:10000/default");
        dsProp.put("hive.initialSize", "20");
        dsProp.put("hive.minIdle", "20");
        dsProp.put("hive.maxActive", "500");
        dsProp.put("hive.maxWait", "60000");
//        dsProp.put("hive.jdbc.username", "hive");
//        dsProp.put("hive.jdbc.password", "hive");

        try {
            DataSource ds = HiveDataSourceUtil.getHiveDataSource(dsProp);
            Connection connection = ds.getConnection();
            String sql = "insert into table test select * from default.test";
//            PreparedStatement pstatm = connection.prepareStatement(sql);
//            ResultSet rs = pstatm.executeQuery();
            Statement statm = connection.createStatement();
            ResultSet rs = statm.executeQuery(sql);
            while (rs.next()) {
                System.out.println(rs.getString(1));
            }
        }catch (SQLFeatureNotSupportedException e){
            System.out.println("hao.....");
        }
/*        DataSource ds = HiveDataSourceUtil.getHiveDataSource();
        Connection conn = ds.getConnection();
        Statement stmt = null;
        if(conn == null){
            System.out.println("null");
        }else{
            System.out.println("conn");
            stmt = conn.createStatement();
            ResultSet res = stmt.executeQuery("select * from xxxx t");
            int i = 0;
            while(res.next()){
                if(i<10){
                    System.out.println(res.getString(1));
                    i++;
                }
            }
        }

        stmt.close();
        conn.close();*/
    }
}
