package com.cbi.clickhouse.component;

import cn.hutool.core.io.FileUtil;
import cn.hutool.core.text.CharSequenceUtil;
import cn.hutool.db.Db;
import cn.hutool.db.DbUtil;
import cn.hutool.db.Entity;
import cn.hutool.db.ds.simple.SimpleDataSource;
import com.alibaba.druid.pool.DruidDataSource;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import lombok.extern.slf4j.Slf4j;

import javax.sql.DataSource;
import java.io.File;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;

/**
 * @author Ivan.Qu reviewcode@163.com
 * @date 9:54 上午 2022/3/17
 */
@Slf4j
public class StatisticHiveTable {
    private final static String USER_NAME = "hdfs";
    private final static String PASSWORD = "hdfs";
    private final static String URL = "jdbc:hive2://172.16.251.14:10000";
    private final static String COUNT_FIELD = "_c0";
    private final static String DEFAULT_CHARSET = "utf-8";
    private final static String SPLIT_SYMBOL = ".";
    private final static String DRIVER_NAME = "org.apache.hive.jdbc.HiveDriver";
    private final static String NEED_COUNT_FILE_PATH = "/Users/reviewcode/cbi/txt/hive需要统计的库表";
    private final static String FILE_PATH_NAME = "/Users/reviewcode/cbi/诉讼数据条数统计.txt";
    private final static int CORE_POOL_SIZE = 10;
    private final static int MAXIMUM_POOL_SIZE = 15;
    private final static int KEEP_ALIVE_TIME = 3;
    private static final AtomicLong COUNT = new AtomicLong();
    private static List<String> NEED_SCHEMA_TABLE = new ArrayList<>();
    /**
     * 1000张表的统计,最大任务也就1000个,不会内存溢出.
     * 任务被拒绝后,该任务会被重新放到任务列表中
     */
    @SuppressWarnings("all")
    private static final ThreadPoolExecutor THREAD_POOL_EXECUTOR = new ThreadPoolExecutor(
            CORE_POOL_SIZE,
            MAXIMUM_POOL_SIZE,
            KEEP_ALIVE_TIME,
            TimeUnit.SECONDS,
            new LinkedBlockingQueue<>(),
            Executors.defaultThreadFactory(),
            new ThreadPoolExecutor.AbortPolicy()
    );


    /**
     * 在txt中获取需要统计的table
     */
    private static void loadNeedCountTableName() {
        List<String> needCountTableNameList = FileUtil.readLines(new File(NEED_COUNT_FILE_PATH), DEFAULT_CHARSET);
        NEED_SCHEMA_TABLE = needCountTableNameList
                .stream()
                .filter(CharSequenceUtil::isNotEmpty)
                .filter(v -> v.contains(SPLIT_SYMBOL))
                .map(SchemaAndTable::new)
                .map(SchemaAndTable::toString)
                .collect(Collectors.toList());
    }

    /**
     * 修改<code>matchSchemaAndTables()</code>方法的诉讼类型,即可产生相应的数据
     *
     * @param args args
     */
    public static void main(String[] args) {
        loadNeedCountTableName();
        List<SchemaAndTable> schemaAndTableNames = matchSchemaAndTables(getSchemaAndTableNames());
        Db useDb = DbUtil.use(getDruidDataSource());
        List<CountItems> countResults = execSql(useDb, schemaAndTableNames);

        THREAD_POOL_EXECUTOR.shutdown();
        log.info("异步任务执行完毕,size:{}", countResults.size());
        List<String> countItemList = countResults
                .stream()
                .sorted(Comparator.comparingLong(CountItems::getSeq))
                .map(CountItems::toString)
                .collect(Collectors.toList());

        FileUtil.del(new File(FILE_PATH_NAME));
        FileUtil.writeLines(countItemList, new File(FILE_PATH_NAME), DEFAULT_CHARSET);
    }


    /**
     * 匹配自己需要表库
     *
     * @param schemaAndTableList 全部的库表
     * @return 自己需要的库表
     */
    private static List<SchemaAndTable> matchSchemaAndTables(List<SchemaAndTable> schemaAndTableList) {
        List<SchemaAndTable> tableSchemaList = new ArrayList<>();
        for (SchemaAndTable schemaAndTable : schemaAndTableList) {

            if (NEED_SCHEMA_TABLE.contains(schemaAndTable.toString())) {
                tableSchemaList.add(schemaAndTable);
            }
        }
        return tableSchemaList;
    }


    /**
     * 异步执行sql
     *
     * @param execute        dbUtils
     * @param schemaAndTable 库表信息
     * @return 异步执行任务
     */
    private static CompletableFuture<CountItems> execSql(Db execute, SchemaAndTable schemaAndTable) {
        return CompletableFuture.supplyAsync(() -> {
            String schemaName = schemaAndTable.getSchemaName();
            String tableName = schemaAndTable.getTableName();
            String schemaAndTableName = String.format("%s.%s", schemaName, tableName);
            String sql = String.format("select count(1) from %s", schemaAndTableName);
            try {
                Entity entity = execute.queryOne(sql);
                Long num = entity.getLong(COUNT_FIELD);
                CountItems countItems = new CountItems(null, schemaAndTableName, num, COUNT.incrementAndGet());
                log.info("{}", countItems);
                return countItems;
            } catch (SQLException e) {
                log.error("{}\n:{}", sql, e.getMessage());
                return null;
            }
        }, THREAD_POOL_EXECUTOR);
    }

    /**
     * 执行sql
     * 执行失败,会一直添加任务重试
     *
     * @param execute            数据库
     * @param schemaAndTableList 所有的库表
     */
    private static List<CountItems> execSql(Db execute, List<SchemaAndTable> schemaAndTableList) {
        List<CompletableFuture<CountItems>> allTaskList = schemaAndTableList
                .stream()
                .map(v -> execSql(execute, v))
                .collect(Collectors.toList());

        return allTaskList
                .stream()
                .map(CompletableFuture::join)
                .filter(Objects::nonNull)
                .collect(Collectors.toList());
    }

    /**
     * 获取所有的表名,库名
     * <code>metaData.getTables</code>可以优化,只或许自己需要的表库,速度更快
     *
     * @return 库名\表名
     */
    private static List<SchemaAndTable> getSchemaAndTableNames() {
        List<SchemaAndTable> schemaAndTableList = new ArrayList<>();

        try (Connection connection = getConnection()) {
            DatabaseMetaData metaData = connection.getMetaData();
            ResultSet tablesResultSet = metaData.getTables(null, null, null, new String[]{"TABLE"});
            while (tablesResultSet.next()) {
                String tableName = tablesResultSet.getString("TABLE_NAME");
                String schemaName = tablesResultSet.getString("TABLE_SCHEM");
                SchemaAndTable schemaAndTable = new SchemaAndTable(schemaName, tableName);
                schemaAndTableList.add(schemaAndTable);
            }
        } catch (SQLException e) {
            e.printStackTrace();
        }
        log.info("{} schema tables读取完毕", schemaAndTableList.size());
        return schemaAndTableList;
    }


    /**
     * 获取数据库连接
     * <em>使用完后需关闭</em>
     *
     * @return 数据库链接
     * @throws SQLException 链接异常
     */
    private static Connection getConnection() throws SQLException {
        DataSource ds = new SimpleDataSource(URL, USER_NAME, PASSWORD, DRIVER_NAME);
        return ds.getConnection();
    }

    @Data
    @Accessors(chain = true)
    @AllArgsConstructor
    static class SchemaAndTable {
        /**
         * 库名
         */
        private String schemaName;
        /**
         * 表名
         */
        private String tableName;

        SchemaAndTable(String schemaAndTableName) {
            String[] schemaTableSplit = schemaAndTableName.split("\\.");
            this.schemaName = schemaTableSplit[0].trim();
            this.tableName = schemaTableSplit[1].trim();
        }

        @Override
        public String toString() {
            return String.format("%s.%s", schemaName, tableName);
        }
    }

    /**
     * 每个条统计结果
     */
    @Data
    @Accessors(chain = true)
    @AllArgsConstructor
    @NoArgsConstructor
    static class CountItems {
        /**
         * 格式化后的schemaName和tableName
         * db_df_lawsuits_v1_0.t_lawsuits_rolerelations->db_df_lawsuits_v.t_lawsuits_rolerelations
         */
        private String formatSchemaAndTableName;
        /**
         * 库名和表名
         */
        private String schemaAndTableName;
        /**
         * 结果
         */
        private Long num;
        /**
         * 序号
         */
        private Long seq;

        @Override
        public String toString() {
            return String.format("%s. %s=%s", seq, schemaAndTableName, num);
        }
    }

    /**
     * druid连接池配置
     *
     * @return 数据库连接
     */
    private static DruidDataSource getDruidDataSource() {
        DruidDataSource ds = new DruidDataSource();
        ds.setUrl(URL);
        ds.setUsername(USER_NAME);
        ds.setPassword(PASSWORD);
        ds.setDriverClassName(DRIVER_NAME);
        return ds;
    }
}
