package io.kubesphere.devops.core;

import io.kubesphere.devops.common.ExecutorRule;
import io.kubesphere.devops.common.Rule;
import io.kubesphere.devops.common.context.MergingContext;
import io.kubesphere.devops.common.context.Phase1Context;
import io.kubesphere.devops.core.struct.ColumnMeta;
import io.kubesphere.devops.core.struct.TableMeta;
import io.kubesphere.devops.core.struct.TableMetaCacheFactory;
import io.kubesphere.devops.model.UserMergingRule;
import io.seata.common.thread.NamedThreadFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.cache.support.NullValue;

import javax.sql.DataSource;
import java.math.BigDecimal;
import java.sql.*;
import java.sql.Date;
import java.util.*;
import java.util.concurrent.*;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;

/**
 * <p> UpdateWithShardingRule 分库情况下update 情况处理 update --》 insert delete
 *
 * @author gaochuan
 * @version 0.1.0
 * @date 2021/11/18 11:12
 * @since 2021/11/18
 */
@Slf4j
@Rule
public class UpdateWithShardingRule implements ExecutorRule {
    private final DataSource dynamicRoutingDataSource;
    private final DatasourceConfigMap datasourceConfigMap;

    public UpdateWithShardingRule(DataSource dynamicRoutingDataSource, DatasourceConfigMap datasourceConfigMap) {
        this.dynamicRoutingDataSource = dynamicRoutingDataSource;
        this.datasourceConfigMap = datasourceConfigMap;
    }

    public void process(String sourceIdentificationId, String targetIdentificationId, UserMergingRule rule) throws SQLException {
        int sourceIdDatasourceIndex = PartitionZoneLocator.calculate(StringRemoveBackquote.removeBackquote(sourceIdentificationId));
        int targetIdDatasourceIndex = PartitionZoneLocator.calculate(StringRemoveBackquote.removeBackquote(targetIdentificationId));

        log.info("计算账户所在分片,被合并账户所在分库: {},目标账户所在分库 : {}", sourceIdDatasourceIndex, targetIdDatasourceIndex);
        //假设可以对应sourceIdentificationId index 分库为ZONE2,表所在schema,数据源key = zone2+schema
        DynamicDataSourceContextHolder.routePartition(datasourceConfigMap.getPartitionZone(sourceIdDatasourceIndex), rule);
        //计算被合并数据数量,按需启动执行分片 select count(*) from account where id = sourceIdDatasourceIndex;if count >1000 开启分片模式，每个分片处理数量为1000，持久化分片
        int count = count(sourceIdentificationId, rule);
        if (count == 0) {
            return;
        }
        log.info("查询被合并账户所在分库数据情况,{},开始处理{} 分库数据", count > 3 ? "处理行数超过3开启并行处理" : "处理行数小于3 单线程处理", sourceIdDatasourceIndex);
        List<Boolean> completes = new ArrayList<>();
        List<Phase1Context> phase1Contexts = new ArrayList<>();
        ExecutorService executor = Executors.newFixedThreadPool(5);
        long start = System.currentTimeMillis();
        if (count > 3) {

            for (int i = 0; i <= count / 3; i++) {
                String xid = MergingContext.getXID();
                Phase1Context phase1Context = new Phase1Context(sourceIdentificationId, targetIdentificationId, xid, rule, sourceIdDatasourceIndex, targetIdDatasourceIndex, i + 1);
                phase1Contexts.add(phase1Context);
            }
            CompletableFuture[] cfArr = phase1Contexts.stream().

                    map(t -> CompletableFuture

                            .supplyAsync(() -> loop(t), executor)

                            .whenComplete((result, th) -> {
                                completes.add(result);
                            })).toArray(CompletableFuture[]::new);
            // 开始等待所有任务执行完成
            log.info("主线程阻塞等待分片线程执行完毕，统计执行结果");
            CompletableFuture.allOf(cfArr).join();
            log.info("阻塞结束, 耗时:" + (System.currentTimeMillis() - start));
            if (completes.stream().anyMatch(a -> a == false)) {
                log.info("分片线程存在异常，异常向外抛出，开始执行回滚操作");
                throw new SQLException("分片线程存在异常，异常向外抛出，执行回滚操作");
            }
        } else {
            Phase1Context phase1Context = new Phase1Context(sourceIdentificationId, targetIdentificationId, MergingContext.getXID(), rule, sourceIdDatasourceIndex, targetIdDatasourceIndex, 1);
            loop(phase1Context);
        }
    }

    public String randomDelay() {
        //            loop();
        System.out.println(String.format("%s sleep in %s", Thread.currentThread().getName(), "end"));
        return Thread.currentThread().getName() + " return";
    }

    int count(String sourceIdentificationId, UserMergingRule rule) throws SQLException {
        String sql = String.format(
                "select count(*) rowCount from %s.%s where %s = ?",
                rule.getSchemaName(),
                rule.getTableName(),
                rule.getMergeIdentificationName()
        );
        //select * from table limit (pageNo-1)*pageSize, pageSize;
        try (Connection connection = dynamicRoutingDataSource.getConnection()) {
            try (PreparedStatement ps = connection.prepareStatement(sql)) {
                ps.setString(1, sourceIdentificationId);
                try (ResultSet rs = ps.executeQuery()) {
                    while (rs.next()) {
                        return rs.getInt("rowCount");
                    }
                }
            }
        }
        return 0;

    }

    private boolean loop(Phase1Context phase1Context) {
        MergingContext.bindXID(phase1Context.getXid());
        try {
            //假设可以对应sourceIdentificationId index 分库为ZONE2,表所在schema,数据源key = zone2+schema
            DynamicDataSourceContextHolder.routePartition(datasourceConfigMap.getPartitionZone(phase1Context.getSourceIdDatasourceIndex()), phase1Context.getRule());
            List<Map<String, Object>> records;
            //查询待合并数据,申请连接使用后关闭连接
            try (Connection connection = dynamicRoutingDataSource.getConnection()) {
                TableMeta tableMeta = TableMetaCacheFactory.getTableMetaCache("mysql").getTableMeta(
                        connection, phase1Context.getRule().getTableName(), phase1Context.getRule().getSchemaName());

                records = queryPage(3, phase1Context.getLoopIndex(), connection, phase1Context.getSourceIdentificationId(), phase1Context.getRule(), tableMeta);
            }
            if (null == records || records.size() == 0) {
                return true;
            }

            //构造删除对象主键列表
            List<Object> ids = records.stream().map(a -> a.get(phase1Context.getRule().getTablePkName())).collect(Collectors.toList());

            if (ids != null && ids.size() > 0) {
                DynamicDataSourceContextHolder.routePartition(datasourceConfigMap.getPartitionZone(phase1Context.getSourceIdDatasourceIndex()), phase1Context.getRule());
                //删除源数据所在分库数据
                parallelDelete(ids, phase1Context.getSourceIdentificationId(), phase1Context.getSourceIdDatasourceIndex(), phase1Context.getRule());
            }

            //判断是否有需要新增的记录
            if (records != null && records.size() > 0) {
                //切换到新库持久化合并数据
                DynamicDataSourceContextHolder.routePartition(datasourceConfigMap.getPartitionZone(phase1Context.getTargetIdDatasourceIndex()), phase1Context.getRule());
                // 新增目标库数据
                parallelInsert(records, phase1Context.getSourceIdentificationId(), phase1Context.getTargetIdentificationId(), phase1Context.getSourceIdDatasourceIndex(), phase1Context.getTargetIdDatasourceIndex(), phase1Context.getRule());

            }
        } catch (SQLException sqlException) {
            return false;
        }
        return true;
    }

    List<Map<String, Object>> queryPage(int pageSize, int pageNo, Connection conn, String sourceIdentificationId, UserMergingRule rule, TableMeta tableMeta) throws SQLException {
        //select * from table limit (pageNo-1)*pageSize, pageSize;
        String sql = String.format(
                "select * from %s.%s where %s = ? limit %s, %s",
                rule.getSchemaName(),
                rule.getTableName(),
                rule.getMergeIdentificationName(),
                (pageNo - 1) * pageSize, pageSize
        );
        try (PreparedStatement ps = conn.prepareStatement(sql)) {
            ps.setString(1, sourceIdentificationId);
            try (ResultSet rs = ps.executeQuery()) {
                List<Map<String, Object>> list = convertList(rs);
                return list;
            }
        }

    }

    void processDelete(int sourceIdDatasourceIndex, UserMergingRule rule, List<Object> ids) throws SQLException {
        try (ConnectionProxy connection = (ConnectionProxy) dynamicRoutingDataSource.getConnection()) {
            connection.setAutoCommit(false);
            TableMeta tableMeta = TableMetaCacheFactory.getTableMetaCache("mysql").getTableMeta(
                    connection, rule.getTableName(), connection.getDataSourceProxy().getResourceId());
            try {
                deleteRecord(connection, ids, rule, tableMeta);
                connection.commit();
            } catch (SQLException sqlException) {
                connection.rollback();
                throw sqlException;
            }
        }
    }

    void processInsert(List<? extends Map> records, String sourceIdentificationId, String targetIdentificationId,
                       int sourceIdDatasourceIndex, int targetIdDatasourceIndex, UserMergingRule rule) throws SQLException {
        //更新phrcode
        records.stream().forEach(a -> a.put(rule.getMergeIdentificationName(), targetIdentificationId));
        try (ConnectionProxy connection = (ConnectionProxy) dynamicRoutingDataSource.getConnection()) {
            connection.setAutoCommit(false);
            TableMeta tableMeta = TableMetaCacheFactory.getTableMetaCache("mysql").getTableMeta(
                    connection, rule.getTableName(), connection.getDataSourceProxy().getResourceId());

            try {
                insertRecord(connection, rule, records, tableMeta);
                connection.commit();
            } catch (SQLException sqlException) {
                connection.rollback();
                throw sqlException;
            }

        }

    }

    void parallelDelete(List<Object> ids, String sourceIdentificationId, int sourceIdDatasourceIndex, UserMergingRule rule) throws SQLException {
        //不需要并行，直接删除 "delete from account where id = ?"
        processDelete(sourceIdDatasourceIndex, rule, ids);
    }

    void parallelInsert(List<? extends Map> records,
                        String sourceIdentificationId,
                        String targetIdentificationId,
                        int sourceIdDatasourceIndex,
                        int targetIdDatasourceIndex,
                        UserMergingRule rule) throws SQLException {
        //todo "insert account(id,balance,last_update_date) values (?,?,?)"

        processInsert(records, sourceIdentificationId, targetIdentificationId, sourceIdDatasourceIndex, targetIdDatasourceIndex, rule);
    }

    int deleteRecord(Connection conn, List<Object> ids, UserMergingRule rule, TableMeta tableMeta) throws SQLException {
        return Stream.iterate(0, f -> f + 1).limit(Double.valueOf(Math.ceil(ids.size() / 1000d)).longValue()).map(i -> {
            try {
                return deleteRecordSub(conn, ids.stream().skip(i * 1000).limit(1000).parallel().collect(Collectors.toList()), rule);
            } catch (SQLException e) {
                return 0;
            }
        }).collect(Collectors.summingInt(Integer::intValue));
    }

    List<? extends Map> queryRecords(Connection conn, String sourceIdentificationId, UserMergingRule rule, TableMeta tableMeta) throws SQLException {
        String sql = String.format(
                "select * from %s.%s where %s = ?",
                rule.getSchemaName(),
                rule.getTableName(),
                rule.getMergeIdentificationName()
        );

        try (PreparedStatement ps = conn.prepareStatement(sql)) {
            ps.setString(1, sourceIdentificationId);
            try (ResultSet rs = ps.executeQuery()) {
                List<Map<String, Object>> list = convertList(rs);
                return list;
            }
        }
    }

    public List<Map<String, Object>> convertList(ResultSet rs) {
        List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
        try {
            ResultSetMetaData md = rs.getMetaData();
            int columnCount = md.getColumnCount();
            while (rs.next()) {
                Map<String, Object> rowData = new HashMap<String, Object>();
                for (int i = 1; i <= columnCount; i++) {
                    rowData.put(md.getColumnName(i), rs.getObject(i));
                }
                list.add(rowData);
            }
        } catch (SQLException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } finally {
            try {
                if (rs != null)
                    rs.close();
                rs = null;
            } catch (SQLException e) {
                e.printStackTrace();
            }
        }
        return list;
    }

    int insertRecord(Connection conn, UserMergingRule rule, List<? extends Map> records, TableMeta tableMeta) throws SQLException {
        Map<String, ColumnMeta> columnMetaMap = tableMeta.getAllColumns();
        String sql = String.format("insert into %s.%s (%s) values (%s)",
                rule.getSchemaName(),
                rule.getTableName(),
                columnMetaMap.keySet().stream().map(Function.identity()).collect(Collectors.joining(", ")),
                columnMetaMap.keySet().stream().map(c -> "?").collect(Collectors.joining(", "))
        );

        try (PreparedStatement ps = conn.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS)) {
            for (Map record : records) {
                List<ColumnMeta> ColumnMetas = columnMetaMap.keySet().stream().map(a -> tableMeta.getAllColumns().get(a)).collect(Collectors.toList());
                for (int index = 1; index <= ColumnMetas.size(); index++) {

                    ColumnMeta column = ColumnMetas.get(index - 1);
                    setColumnValue(record, ps, index, column);
                }
                ps.addBatch();
            }
            return Arrays.stream(ps.executeBatch()).sum();
        }
    }

    void setColumnValue(Object value, PreparedStatement ps, int index, ColumnMeta column) throws SQLException {
        if (value == null || value instanceof NullValue) {
            ps.setNull(index, column.getDataType());
        } else {
            setValue(ps, index, value);
        }
    }

    void setValue(PreparedStatement ps, int index, Object value) throws SQLException {
        if (value.getClass() == Boolean.class) {
            ps.setBoolean(index, (Boolean) value);
        } else if (value.getClass() == Long.class) {
            ps.setLong(index, (Long) value);
        } else if (value.getClass() == Short.class) {
            ps.setShort(index, (Short) value);
        } else if (value.getClass() == Integer.class) {
            ps.setInt(index, (Integer) value);
        } else if (value.getClass() == String.class) {
            ps.setString(index, (String) value);
        } else if (value.getClass() == Date.class) {
            ps.setTimestamp(index, new Timestamp(((Date) value).getTime()));
        } else if (value.getClass() == java.util.Date.class) {
            ps.setDate(index, new Date(((java.util.Date) value).getTime()));
        } else if (value.getClass() == Double.class) {
            ps.setDouble(index, (Double) value);
        } else if (value.getClass() == Timestamp.class) {
            ps.setTimestamp(index, (Timestamp) value);
        } else if (value.getClass() == BigDecimal.class) {
            ps.setBigDecimal(index, (BigDecimal) value);
        } else {
            throw new IllegalStateException("unsupported type");
        }
    }

    void setColumnValue(Map<String, Object> row, PreparedStatement ps, int index, ColumnMeta column) throws SQLException {
        Object value = row.get(column.getColumnName());

        setColumnValue(value, ps, index, column);
    }

    int deleteRecordSub(Connection conn, List<Object> ids, UserMergingRule rule) throws SQLException {
        String sql = String.format("delete from %s.%s where %s in (%s)",
                rule.getSchemaName(),
                rule.getTableName(),
                rule.getTablePkName(),
                ids.stream().map(m -> "?").collect(Collectors.joining(", "))
        );
        try (PreparedStatement ps = conn.prepareStatement(sql)) {
            int index = 1;
            for (Object id : ids) {
                if (id instanceof Integer) {
                    ps.setInt(index++, (Integer) id);
                }
                if (id instanceof String) {
                    ps.setString(index++, (String) id);
                }
            }
            return ps.executeUpdate();
        }
    }
}
