package com.yifeng.repo.micro.service.server.dao;

import com.alibaba.druid.DbType;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.util.JdbcUtils;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.MapperFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.json.JsonMapper;
import com.gomcarter.frameworks.base.common.CollectionUtils;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.tuple.Pair;
import org.slf4j.helpers.MessageFormatter;

import javax.sql.DataSource;
import java.lang.reflect.Field;
import java.sql.*;
import java.util.*;
import java.util.function.Consumer;
import java.util.stream.Collectors;

/**
 * Jdbc sql查询工具类
 * @author maoyun
 */
@Slf4j
public class DruidJdbcExecutor {
    /**
     * 每次拉取数据量。默认1000
     */
    private static final int DEFAULT_FETCH_SIZE = 1000;
    /**
     * 批量insert数据量
     */
    private static final int DEFAULT_INSERT_SIZE = 200;

    private static final ObjectMapper MAPPER = JsonMapper.builder()
            .disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)
            .serializationInclusion(JsonInclude.Include.NON_NULL)
            .disable(SerializationFeature.FAIL_ON_EMPTY_BEANS)
            .enable(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY)
            .build();


    /**
     * 全量按游标检索查询
     * @param dataSource 数据源
     * @param sql 查询语句
     * @param consumer 回调消费函数
     */
    public static void fetchAllData(DruidDataSource dataSource, String sql, Consumer<List<Map>> consumer) {
        fetchAllData(dataSource, sql, DEFAULT_FETCH_SIZE, Map.class, consumer);
    }

    /**
     * 全量按游标检索查询
     * @param dataSource 数据源
     * @param sql 查询语句
     * @param clazz 反序列化实体class
     * @param consumer 回调消费函数
     * @param <T> 实体
     */
    public static <T> void fetchAllData(DruidDataSource dataSource, String sql, Class<T> clazz, Consumer<List<T>> consumer) {
        fetchAllData(dataSource, sql, DEFAULT_FETCH_SIZE, clazz, consumer);
    }

    public static void fetchAllDataByConfig(DruidDataSourceManager.Config config, String sql, Consumer<List<Map>> consumer) {
        fetchAllData(DruidDataSourceManager.getDataSource(config), sql, DEFAULT_FETCH_SIZE, Map.class, consumer);
    }

    public static <T> void fetchAllDataByConfig(DruidDataSourceManager.Config config, String sql, Class<T> clazz, Consumer<List<T>> consumer) {
        fetchAllData(DruidDataSourceManager.getDataSource(config), sql, DEFAULT_FETCH_SIZE, clazz, consumer);
    }

    /**
     * 全量按游标检索查询
     * @param dataSource 数据源
     * @param sql 查询语句
     * @param fetchSize rs每次查询结果集
     * @param consumer 回调消费函数
     */
    public static <T> void fetchAllData(DruidDataSource dataSource, String sql, int fetchSize, Class<T> clazz, Consumer<List<T>> consumer) {
        // 1.创建连接、设置游标读取方式
        Connection connection = null;
        PreparedStatement ps = null;
        ResultSet resultSet = null;
        try {
            connection = dataSource.getConnection();
            ps = connection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
            if (dataSource.getDbType().equals(DbType.mysql.name())) {
                ps.setFetchSize(fetchSize);
                ps.setFetchDirection(ResultSet.FETCH_FORWARD);
                resultSet = ps.executeQuery();
            } else {
                ps.setFetchSize(fetchSize);
                resultSet = ps.executeQuery();
            }
            // 2.获取数据字典存储key-value结构化数据
            ResultSetMetaData rsMeta = resultSet.getMetaData();
            List<Map<String, Object>> rows = new ArrayList<>();
            while (resultSet.next()) {
                Map<String, Object> row = new LinkedHashMap<>();
                int i = 0;
                for(int size = rsMeta.getColumnCount(); i < size; ++i) {
                    String columName = rsMeta.getColumnLabel(i + 1);
                    Object value = resultSet.getObject(i + 1);
                    row.put(columName, value);
                }
                rows.add(row);
                // 3. 批次数据全量回调consumer函数
                if (rows.size() >= fetchSize) {
                    if (clazz == Map.class){
                        consumer.accept((List<T>) rows);
                    } else {
                        consumer.accept(rows.stream().map(input -> MAPPER.convertValue(input, clazz)).collect(Collectors.toList()));
                    }
                    rows.clear();
                }
            }
            // 未达到批次数量的数据兜底处理
            if (CollectionUtils.isNotEmpty(rows)){
                if (clazz == Map.class){
                    consumer.accept((List<T>) rows);
                } else {
                    consumer.accept(rows.stream().map(input -> MAPPER.convertValue(input, clazz)).collect(Collectors.toList()));
                }
            }
        } catch (Exception e) {
            log.error("db:{} sql:{} fetchSize:{} error:{}", dataSource.getDbType(), sql, fetchSize, e.getMessage());
        } finally {
            JdbcUtils.close(resultSet);
            JdbcUtils.close(ps);
            JdbcUtils.close(connection);
        }
    }


    public static List<Map<String, Object>> select(DataSource ds, String sql, Object... params) {
        try {
            return JdbcUtils.executeQuery(ds, sql, params);
        } catch (SQLException e) {
            log.error("select error:{}", e.getMessage(), e);
            throw new RuntimeException(e);
        }
    }

    public static <T> T selectOne(DataSource ds, String sql, Class<T> clazz, Object... params) {
        List<Map<String, Object>> rows = select(ds, sql, params);
        if (rows.size() > 1) {
            throw new RuntimeException("selectOne return 2 or more row: " + Arrays.toString(params));
        }
        if (rows.isEmpty()) {
            return null;
        }
        return MAPPER.convertValue(rows.get(0), clazz);
    }

    public static <T> List<T> select(DataSource ds, String sql, Class<T> clazz, Object... params) {
        List<Map<String, Object>> rows = select(ds, sql, params);
        if (rows.isEmpty()) {
            return Collections.emptyList();
        }
        return rows.stream().map(input -> MAPPER.convertValue(input, clazz)).collect(Collectors.toList());
    }

    public static int count(DataSource ds, String sql, Object... params) {
        List<Map<String, Object>> rows = select(ds, sql, params);
        if (rows.size() != 1) {
            throw new RuntimeException("count query rows is not 1, but " + rows.size());
        }
        Map<String, Object> row = rows.get(0);
        List<String> columns = new ArrayList<>(row.keySet());
        if (columns.size() != 1) {
            throw new RuntimeException("count query columns is not 1, but " + columns.size());
        }
        return Integer.parseInt(row.get(columns.get(0)).toString());
    }

    public static int insert(DataSource ds, String table, Map<String, Object> data) {
        try {
            JdbcUtils.insertToTable(ds, table, data);
            // temp value due to druid JdbcUtils does not return value
            return 1;
        } catch (SQLException e) {
            log.error("insert error:{}", e.getMessage(), e);
            throw new RuntimeException(e);
        }
    }

    public static int update(DataSource ds, String sql, Object... params) {
        try {
            return JdbcUtils.executeUpdate(ds, sql, params);
        } catch (SQLException e) {
            log.error("execute sql error:{}", e.getMessage(), e);
            throw new RuntimeException(e);
        }
    }

    public static int delete(DataSource ds, String sql, Object... params) {
        List<String> words = Splitter.on(" ").trimResults().splitToList(sql.toUpperCase());
        if (!words.containsAll(Arrays.asList("DELETE", "FROM", "WHERE"))) {
            throw new RuntimeException("sql is not safe: " + sql);
        }
        return update(ds, sql, params);
    }

    /**
     * 连接url中需配置rewriteBatchedStatements=true 开启批量执行
     * 只处理insert into语句, 每个批次限制在200条数据
     * 支持mysql、oracle 插入更新操作，默认使用第一个字段作为唯一键
     * @see DruidJdbcExecutor#DEFAULT_INSERT_SIZE
     * @notice 已支持操作oracle，但操作oracle操作返回数据为负数
     */
    public static int insertList(DruidDataSource ds, String table, List<String> columns, List<List<Object>> rowList) {
        String sql = JdbcUtils.makeInsertToTableSql(table, columns);
        return doCommit(ds, sql, columns, rowList);
    }

    /**
     * @deprecated 功能重复，可直接调用insertList函数
     * 原始insert to xxx values xxx 提交，调用方需保证唯一键数据不重复
     * @see DruidJdbcExecutor#insertList(DruidDataSource, String, List, List)
     */
    @Deprecated
    public static int insertIntoValuesList(DruidDataSource ds, String table, List<String> columns, List<List<Object>> rowList) {
        String sql = JdbcUtils.makeInsertToTableSql(table, columns);
        return doCommit(ds, sql, columns, rowList);
    }

    /**
     * 连接url中需配置rewriteBatchedStatements=true 开启批量执行
     * 根据pkParam参数查询，如有相同数据则更新，否则新增数据行，每个批次限制在200条数据
     * @see DruidJdbcExecutor#DEFAULT_INSERT_SIZE
     *
     * @notice 已支持操作oracle，但操作oracle操作返回数据为负数
     *
     * @param ds 数据源
     * @param table 表名
     * @param columns 字段名
     * @param rowList 需插入或更新的结构化数据
     * @param pkParam 手动指定唯一键，可支持多个
     * @return mysql数据源可正常返回影响行数，oracle驱动不支持总数返回负数(-2)
     */
    public static int insertOrUpdateByPk(DruidDataSource ds, String table, List<String> columns, List<List<Object>> rowList, String ...pkParam) {
        String dbType = ds.getDbType();
        // 查询是否有值，则更新，否则insert
        int counts;
        if (DbType.mysql.name().equals(dbType)){
            String insertSql = JdbcUtils.makeInsertToTableSql(table, columns);
            String sql = makeInsertOrUpdate(insertSql, columns);
            counts = doCommit(ds, sql, columns, rowList);
        } else {
            counts = doOracleBatchInsert(ds, table, columns, rowList, pkParam);
        }
        return counts;
    }

    private static int doOracleBatchInsert(DataSource ds, String tableName, List<String> columns, List<List<Object>> rowList, String ...pkParam) {

        String oracleInsertOrUpdateSql = getOracleInsertOrUpdateSql(tableName, columns, pkParam);

        return doCommit(ds, oracleInsertOrUpdateSql, columns, rowList, pkParam);
    }

    public static String getOracleInsertOrUpdateSql(String table, List<String> columns, String ...pkParam){
        String sqlTemplate = "MERGE INTO \"{}\" USING DUAL ON ({}) " +
                "WHEN MATCHED THEN UPDATE SET {} WHEN NOT MATCHED THEN INSERT ({}) VALUES ({})";
        String pkName = "\"" + columns.get(0) + "\" =?";
        // 更新语句中不能含有USING中用到的列, 否则报错ORA-38104: Columns referenced in the ON Clause cannot be updated
        String update = columns.stream().skip(1).collect(Collectors.joining("\"=?, \"", "\"", "\"=?"));
        // 手动指定唯一键字段
        if (pkParam != null && pkParam.length > 0){
            List<String> pkParamList = Lists.newArrayList(pkParam);
            pkName = pkParamList.stream().collect(Collectors.joining("\"=? and \"", "\"", "\"=?"));
            update = columns.stream().filter(column -> !pkParamList.contains(column)).collect(Collectors.joining("\"=?, \"", "\"", "\"=?"));
        }
        String insertNames = columns.stream().collect(Collectors.joining("\", \"", "\"", "\""));
        String placeholder = repeatPlaceholder(columns.size());
        return format(sqlTemplate, table, pkName, update, insertNames, placeholder).toUpperCase();
    }

    private static String format(String msg, Object... args) {
        if (args == null || args.length == 0) {
            return msg;
        }
        return MessageFormatter.arrayFormat(msg, args).getMessage();
    }

    private static String repeatPlaceholder(int times) {
        StringBuilder sb = new StringBuilder();
        for (int i = 0; i < times; i++) {
            sb.append("?, ");
        }
        return sb.substring(0, sb.length() - 2);
    }

    private static String makeInsertOrUpdate(String insertSql, List<String> columns) {
        StringBuilder sqlBuilder = new StringBuilder(insertSql);
        sqlBuilder.append(" on duplicate key update ");
        for (String column : columns) {
            sqlBuilder.append(column).append("=").append("values(").append(column).append("),");
        }
        return sqlBuilder.substring(0, sqlBuilder.length() - 1);
    }

    public static int[] updateList(DataSource ds, String sql, List<List<Object>> paramsList) {
        int count = doCommit(ds, sql, null, paramsList);
        return new int[]{count};
    }

    /**
     * 输出表字段，逗号分隔
     */
    public static List<String> getColumns(DataSource ds, String tableName, DruidDataSourceManager.DbType dbType) throws SQLException {

        String executeSql;
        if (dbType == DruidDataSourceManager.DbType.ORACLE){
            executeSql = "SELECT column_name FROM all_tab_cols WHERE table_name = ? order by column_id";
        } else {
            executeSql = "show columns from " + tableName;
        }
        List<Map<String, Object>> result = JdbcUtils.executeQuery(ds, executeSql, tableName);

        if (dbType == DruidDataSourceManager.DbType.ORACLE){
            return result.stream().map(m -> m.get("COLUMN_NAME").toString()).collect(Collectors.toList());
        }
        return result.stream().map(e -> e.get("Field").toString()).collect(Collectors.toList());
    }

    public static int insertList(DruidDataSource dataSource, String table, List<Object> entityList){
        if (CollectionUtils.isEmpty(entityList)){
            return 0;
        }
        List<String> columns = getFieldNames(entityList.get(0));
        List<List<Object>> rowList = fillFieldValues(entityList);
        return DruidJdbcExecutor.insertList(dataSource, table, columns, rowList);
    }

    private static List<List<Object>> fillFieldValues(List<Object> entityList) {
        List<List<Object>> rowList = Lists.newArrayList();
        for (Object o : entityList) {
            List<Object> rows = Lists.newArrayList();
            Field[] declaredFields = o.getClass().getDeclaredFields();
            for (Field field : declaredFields){
                if ("this$0".equals(field.getName())){
                    continue;
                }
                field.setAccessible(true);
                try {
                    rows.add(field.get(o) == null ? "null" : field.get(o));
                } catch (IllegalAccessException e) {
                    throw new RuntimeException(e);
                }
            }
            rowList.add(rows);
        }
        return rowList;
    }

    private static List<String> getFieldNames(Object o) {
        List<String> fields = Lists.newArrayList();
        Field[] declaredFields = o.getClass().getDeclaredFields();
        for (Field field : declaredFields){
            String name = field.getName();
            if ("this$0".equals(name)){
                continue;
            }
            fields.add(name);
        }

        return fields;
    }

    private static int doCommit(DataSource ds, String sql, List<String> columns, List<List<Object>> rowList, String ...pkParam) {
        Connection conn = null;
        try {
            conn = ds.getConnection();
            conn.setAutoCommit(false);
            int counts = doSave(conn, sql, columns, rowList, pkParam);
            conn.commit();
            conn.setAutoCommit(true);
            return counts;
        } catch (SQLException e) {
            log.error("commit sql error:{}", e.getMessage(), e);
            throw new RuntimeException(e);
        } finally {
            JdbcUtils.close(conn);
        }
    }

    private static int doSave(Connection conn, String sql, List<String> columns, List<List<Object>> rowList, String ...pkParam) throws SQLException {
        log.info("SQL:{}", sql);
        // 识别sql判定是否为oracle merge into批量插入更新语句，其需要注入的参数不一样
        boolean isOracleBatchInsert = sql.contains("MERGE INTO");
        PreparedStatement stmt = null;
        int count = 0;
        try {
            stmt = conn.prepareStatement(sql);
            for (int i = 0; i < rowList.size(); i++) {
                List<Object> params = rowList.get(i);
                setParameters(isOracleBatchInsert, stmt, columns, params, pkParam);
                stmt.addBatch();
                if (i != 0 && i % DEFAULT_INSERT_SIZE == 0) {
                    count += total(stmt.executeBatch());
                    stmt.clearBatch();
                }
            }
            count += total(stmt.executeBatch());
            return count;
        } catch (Exception e){
            log.error("batch execute sql error:{}", e.getMessage(), e);
            throw new SQLException(e.getMessage(), e);
        } finally {
            JdbcUtils.close(stmt);
        }
    }

    private static void setParameters(boolean isOracleBatchSave, PreparedStatement stmt, List<String> columns, List<Object> params, String ...pkParam) throws SQLException {
        if (!isOracleBatchSave){
            for (int i = 0, size = params.size(); i < size; ++i) {
                Object param = params.get(i);
                try {
                    stmt.setObject(i + 1, param);
                } catch (SQLException ex) {
                    // 抛出的异常关键信息太少，自定义打印详细内容
                    throw new SQLException("第" + (i+1) + "个字段存储的数值：" + param + " 出错 值类型：" + param.getClass().getCanonicalName(), ex);
                }
            }
        } else {
            int index = 1;
            List<Pair<String, Integer>> pkParamIndexs = Lists.newArrayList();
            // set pk
            if (pkParam == null || pkParam.length == 0){
                stmt.setObject(index, params.get(0));
            } else {
                // 传入pkParam参数名，定位主键id对应的index下标
                for (int i = 0; i < columns.size(); i++) {
                    String column = columns.get(i);
                    if (Arrays.asList(pkParam).contains(column)){
                        pkParamIndexs.add(Pair.of(column, i));
                    }
                }
                // 参数绑定，找出对应index下标的数值
                for (Pair<String, Integer> pkParamPair : pkParamIndexs) {
                    stmt.setObject(index, params.get(pkParamPair.getRight()));
                    index++;
                }

            }
            List<Integer> updateColumnIndexs = pkParamIndexs.stream().map(Pair::getRight).collect(Collectors.toList());
            // set WHEN MATCHED THEN UPDATE SET
            for (int i = 0; i < params.size(); i++){
                Object param = params.get(i);
                if (updateColumnIndexs.contains(i)){
                    continue;
                }
                try {
                    stmt.setObject(index, params.get(i));
                    index++;
                } catch (SQLException ex) {
                    // 抛出的异常关键信息太少，自定义打印详细内容
                    throw new SQLException("WHEN MATCHED THEN UPDATE SET 第" + (i+1) + "个字段存储的数值：" + param + " 出错 值类型：" + param.getClass().getCanonicalName(), ex);
                }
            }
            // set WHEN NOT MATCHED THEN INSERT
            for (int i=0; i < params.size(); i++){
                Object param = params.get(i);
                try {
                    stmt.setObject(index + i, param);
                } catch (SQLException ex) {
                    // 抛出的异常关键信息太少，自定义打印详细内容
                    throw new SQLException("WHEN NOT MATCHED THEN INSERT 第" + (i+1) + "个字段存储的数值：" + param + " 出错 值类型：" + param.getClass().getCanonicalName(), ex);
                }
            }
        }
    }

    private static int total(int[] counts) {
        int total = 0;
        for (int count : counts) {
            total += count;
        }
        return total;
    }
}
