package com.seaboxdata.dao;

import com.seaboxdata.entity.HiveDataInfo;
import com.seaboxdata.entity.TableConfig;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.time.DateUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import java.text.SimpleDateFormat;
import java.util.Date;

/**
 * @author
 * @create 2020-12-29 16:49
 **/
@Component
@Slf4j
public class HiveDataDao {
    private static String driverName = "org.apache.hive.jdbc.HiveDriver";

    @Value("${hive.url}")
    private String url;

    @Value("${hive.keytab.path}")
    private String keyTabPath;

    @Value("${hive.principal}")
    private String principal;

    @Value("${krb5.path}")
    private String krb5Path;

    @Value("${hive.database}")
    private String database;

    private Connection connection;

    public Connection getConnection() throws Exception {
        if (connection == null) {
            if (System.getProperty("os.name").toLowerCase().startsWith("win")) {
                this.keyTabPath = "F:/dfjx/keytab/hive.service.keytab";
                this.krb5Path = "F:/dfjx/keytab/krb5.conf";
            }
            System.setProperty("java.security.krb5.conf", krb5Path);
            System.setProperty("sun.security.krb5.debug", "false");
            Configuration conf = new Configuration();
            conf.set("hadoop.security.authentication", "Kerberos");
            UserGroupInformation.setConfiguration(conf);
            conf.set("keytab.file", keyTabPath);
            conf.set("kerberos.principal", principal);
            UserGroupInformation.loginUserFromKeytab(principal, keyTabPath);
            this.connection = DriverManager.getConnection(url);
        }
        return connection;
    }

    public HiveDataInfo getDataInfo(TableConfig config) {
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
        Date today = new Date();
        Date dataToday = DateUtils.addDays(today, -1);
        Date beforeDay = DateUtils.addDays(dataToday, -1);
        String dataTodayStr = sdf.format(dataToday);
        String beforeDayStr = sdf.format(beforeDay);
        boolean isExistTable = true;
        HiveDataInfo hiveDataInfo = new HiveDataInfo();
        Connection connection = null;
        try {
            connection = getConnection();
            Statement statement = connection.createStatement();
            try {
                statement.executeQuery("desc " + database + "." + config.getTableName());
            } catch (Exception e) {
                hiveDataInfo.setDataSize(0);
                hiveDataInfo.setDataSizeIncrement(0);
                hiveDataInfo.setNote("database not have this table,tableName:" + database+"."+config.getTableName()+"\n"+e.getMessage());
                isExistTable = false;
                log.error(e.getMessage());
                e.printStackTrace();
            }
            if (isExistTable) {
                //如果是全量,获取最新分区数据作为总量
                if ("N".equals(config.getIsIncrement())) {
                    //总量处理逻辑
                    int totalCount = 0;
                    ResultSet increaseSet = statement.executeQuery("select count(0) count from " + database + "." + config.getTableName() + " where data_dt_iso = '" + dataTodayStr + "'");
                    while (increaseSet.next()) {
                        totalCount = increaseSet.getInt("count");
                        hiveDataInfo.setDataSize(totalCount);
                    }

                    //获取当日增量,当前分区总数减去昨天分区总数量
                    ResultSet yesterdaySet = statement.executeQuery("select count(0) count from " + database + "." + config.getTableName() + " where data_dt_iso = '" + beforeDayStr + "'");
                    while (yesterdaySet.next()) {
                        hiveDataInfo.setDataSizeIncrement(totalCount - increaseSet.getInt("count"));
                    }

                    //最新的时间为最新的分区
                } else if ("Y".equals(config.getIsIncrement())) {
                    //总数据量就是总数据量
                    ResultSet totalCountSet = statement.executeQuery("select count(0) as totalCount from " + database + "." + config.getTableName());
                    while (totalCountSet.next()) {
                        hiveDataInfo.setDataSize(totalCountSet.getInt("totalCount"));
                    }

                    //增量为当前今日分区数量
                    ResultSet increaseSet = statement.executeQuery("select count(0) count from " + database + "." + config.getTableName() + " where data_dt_iso = '" + dataTodayStr + "'");
                    while (increaseSet.next()) {
                        hiveDataInfo.setDataSizeIncrement(increaseSet.getInt("count"));
                    }

                    //最新时间
                } else {
                    hiveDataInfo.setDataSize(0);
                    hiveDataInfo.setDataSizeIncrement(0);
                    hiveDataInfo.setNote("isIncresement must is Y or N,not allowed null");
                }
            }

        } catch (Exception e) {
            log.error("get hiveDataInfo failed,tableName:" + config.getTableName());
            e.printStackTrace();
        }
        return hiveDataInfo;
    }
}
