package com.wyt.spark.hive;

import org.apache.commons.lang3.StringUtils;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.StructField;

import java.sql.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @ClassName HiveMetaUtils
 * @Description: 用于获取hive表元数据
 * @Author wangyongtao
 * @Date 2022/12/9 19:25
 * @Version 1.0
 **/
public class HiveMetaUtils {

    public static void getLocationAndFormat(SparkSession sparkSession) {
        StructField[] fields = sparkSession.table("gdm.gdm_ggycode_kolap_test_fact_de").schema().fields();

        StringBuffer sb = new StringBuffer();
        sb.append("(");
        for (int i = 0; i < fields.length; i++) {
            sb.append(fields[i].name()).append(",");
        }
        sb.deleteCharAt(sb.length() - 1);
        sb.append(")");

        System.out.println(sb.toString());

        String location = null;
        List<Row> dataset = sparkSession.sql("desc formatted gdm.gdm_ggycode_kolap_test_fact_de").collectAsList();
        for (Row row : dataset) {
            if ("Location".equals(row.get(0))) {
                location = String.valueOf(row.get(1));
                System.out.println(location);
            }
            if ("InputFormat".equals(row.get(0)) && StringUtils.isNotBlank(row.getString(1))) {
                if (row.getString(1).contains("orc")) {
                    System.out.println("orc");
                }
                if (row.getString(1).contains("text")) {
                    System.out.println("text");
                }
                if (row.getString(1).contains("parquet")) {
                    System.out.println("parquet");
                }
                if (row.getString(1).contains("sequence")) {
                    System.out.println("sequence");
                }
            }
            System.out.println(row.get(0) + " : " + row.get(1));
        }
    }


    public static Map<String, Object> getTableInfo(ResultSet resultSet) throws Exception {
        Map<String, Object> result = new HashMap<>();

        // 定义多个集合用于存储hive不同模块的元数据
        Map<String, String> detailTableInfo = new HashMap<>();
        Map<String, String> tableParams = new HashMap<>();
        Map<String, String> storageInfo = new HashMap<>();
        Map<String, String> storageDescParams = new HashMap<>();
        Map<String, Map<String, String>> constraints = new HashMap<>();
        List<Map<String, String>> columns = new ArrayList<>();
        List<Map<String, String>> partitions = new ArrayList<>();

        Map<String, String> moduleMap = getDescTableModule();

        // 解析resultSet获得原始的分块数据
        String infoModule = "";
        while (resultSet.next()) {

            String title = resultSet.getString(1).trim();

            if (("".equals(title) && resultSet.getString(2) == null) || "# Constraints".equals(title)) continue;

            if (moduleMap.containsKey(title)) {
                if ("partition_info".equals(infoModule) && "col_name".equals(moduleMap.get(title))) continue;
                ;
                infoModule = moduleMap.get(title);
                continue;
            }

            String key = null;
            String value = null;
            switch (infoModule) {
                case "col_name":
                    Map<String, String> map = new HashMap<>();
                    int colNum = resultSet.getMetaData().getColumnCount();
                    for (int col = 0; col < colNum; col++) {
                        String columnName = resultSet.getMetaData().getColumnName(col + 1);
                        String columnValue = resultSet.getString(columnName);
                        map.put(columnName, columnValue);
                    }
                    columns.add(map);
                    break;

                case "table_info":
                    key = resultSet.getString(1).trim().replace(":", "");
                    value = resultSet.getString(2).trim();
                    detailTableInfo.put(key, value);
                    break;

                case "table_param":
                    key = resultSet.getString(2).trim().replace(":", "");
                    value = resultSet.getString(3).trim();
                    tableParams.put(key, value);
                    break;

                case "storage_info":
                    key = resultSet.getString(1).trim().replace(":", "");
                    value = resultSet.getString(2).trim();
                    storageInfo.put(key, value);
                    break;

                case "storage_desc":
                    key = resultSet.getString(2).trim().replace(":", "");
                    value = resultSet.getString(3).trim();
                    storageDescParams.put(key, value);
                    break;

                case "not_null_constraint":
                    Map<String, String> notNullMap = constraints.getOrDefault("notnull", new HashMap<>());
                    if ("Table:".equals(title.trim())) resultSet.next();

                    String notNullConstraintName = resultSet.getString(2).trim();
                    resultSet.next();

                    key = resultSet.getString(2).trim();
                    notNullMap.put(key, notNullConstraintName);

                    constraints.put("notnull", notNullMap);
                    break;

                case "default_constraint":
                    Map<String, String> defaultMap = constraints.getOrDefault("default", new HashMap<>());
                    if ("Table:".equals(title.trim())) resultSet.next();

                    String defaultConstraintName = resultSet.getString(2).trim();
                    resultSet.next();

                    key = resultSet.getString(1).trim().split(":")[1];
                    value = resultSet.getString(2).trim();
                    int valueIndex = value.indexOf(":");
                    value = value.substring(valueIndex + 1);
                    defaultMap.put(key, value);

                    defaultMap.put(key + "_constraintName", defaultConstraintName);

                    constraints.put("default", defaultMap);
                    break;

                case "partition_info":
                    Map<String, String> partitionMap = new HashMap<>();
                    int partitionColNum = resultSet.getMetaData().getColumnCount();
                    for (int col = 0; col < partitionColNum; col++) {
                        String columnName = resultSet.getMetaData().getColumnName(col + 1);
                        String columnValue = resultSet.getString(columnName);
                        partitionMap.put(columnName, columnValue);
                    }
                    partitions.add(partitionMap);
                    break;

                default:
                    System.out.print("unknown module,please update method to support it : " + infoModule);
            }

        }

        result.put("columns", columns);
        result.put("detailTableInfo", detailTableInfo);
        result.put("tableParams", tableParams);
        result.put("storageInfo", storageInfo);
        result.put("storageDescParams", storageDescParams);
        result.put("constraints", constraints);
        result.put("partitions", partitions);

        return result;
    }

    private static Map<String, String> getDescTableModule() {
        Map<String, String> descTableModule = new HashMap<>();

        descTableModule.put("# col_name", "col_name");
        descTableModule.put("# Detailed Table Information", "table_info");
        descTableModule.put("Table Parameters:", "table_param");
        descTableModule.put("# Storage Information", "storage_info");
        descTableModule.put("Storage Desc Params:", "storage_desc");
        descTableModule.put("# Not Null Constraints", "not_null_constraint");
        descTableModule.put("# Default Constraints", "default_constraint");
        descTableModule.put("# Partition Information", "partition_info");

        return descTableModule;
    }


    public static void main(String[] args) {
        try {
            String driverName = "org.apache.hive.jdbc.HiveDriver";
            Class.forName(driverName);
            Connection conn = DriverManager.getConnection("jdbc:hive2://192.168.71.135:10000/test_db");

            String sql = "desc formatted test_table";

            PreparedStatement ps = conn.prepareStatement(sql);
            ResultSet resultSet = ps.executeQuery();

            Map<String, Object> result = getTableInfo(resultSet);

            System.out.println(result.size());

        } catch (Exception throwables) {
            throwables.printStackTrace();
        }
    }

}