package com.wg.db.repository.code;

import cn.hutool.core.date.TimeInterval;
import cn.hutool.core.util.RandomUtil;
import cn.hutool.db.Db;
import cn.hutool.db.sql.SqlExecutor;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.wg.core.thread.fiber.IFiber;
import com.wg.core.thread.fiber.PoolFiber;
import com.wg.db.AsynWriteThreadPool;
import com.wg.db.DbConfig;
import com.wg.db.schema.SchemaTable;
import com.wg.db.sql.MySqlExpert;

import java.io.Serializable;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;

/**
 * 异步仓库实现基类
 *
 * @param <Key> 实体ID类型
 * @param <T>   实体类型
 */
public abstract class AbstractMySqlAsyncRepository<Key extends Serializable, T> extends MySqlRepository<Key, T> {
    private static final Log log = LogFactory.get();
    private final TimeInterval timeInterval = new TimeInterval(true);
    private final IFiber fiber;
    protected int batchSize;

    public AbstractMySqlAsyncRepository(Class<T> entityClass) {
        super(entityClass);

        DbConfig config = DbConfig.getInstance();
        this.batchSize = config.getWriteMaxSize();
        this.fiber = new PoolFiber(AsynWriteThreadPool.getInstance());

        // 设置定时任务
        int interval = config.getWriteInterval() * 1000 + RandomUtil.randomInt(3000);
        this.fiber.scheduleOnInterval(this::flush, 1000, interval);
        this.fiber.start();
    }

    @Override
    public void insert(T entity) {
        if (entity == null) {
            return;
        }

        fiber.enqueue(() -> addEntity(OperationType.Insert, entity));
    }

    @Override
    public void batchInsert(List<T> entities) {
        if (entities == null || entities.isEmpty()) {
            return;
        }

        fiber.enqueue(() -> batchAddEntity(OperationType.Insert, entities));
    }

    @Override
    public void update(T entity) {
        if (entity == null) {
            return;
        }

        fiber.enqueue(() -> addEntity(OperationType.InsertOrUpdate, entity));
    }

    @Override
    public void batchUpdate(List<T> entities) {
        if (entities == null || entities.isEmpty()) {
            return;
        }

        fiber.enqueue(() -> batchAddEntity(OperationType.InsertOrUpdate, entities));
    }

    @Override
    public void delete(T entity) {
        if (entity == null) {
            return;
        }

        fiber.enqueue(() -> addEntity(OperationType.Delete, entity));
    }

    @Override
    public void batchDelete(List<T> entities) {
        if (entities == null || entities.isEmpty()) {
            return;
        }

        fiber.enqueue(() -> batchAddEntity(OperationType.Delete, entities));
    }

    @Override
    public void close() {
        int size = pendingSize();
        if (size == 0) {
            return;
        }

        if (size >= batchSize * 5) {
            batchSize = size / 5;
        } else if (size > batchSize) {
            batchSize = size / 2;
        }

        fiber.enqueue(this::flush);
    }

    private void batchAddEntity(OperationType operationType, List<T> entities) {
        if (entities == null || entities.isEmpty()) {
            return;
        }

        for (T entity : entities) {
            addEntity(operationType, entity);
        }
    }

    protected boolean batchExecuteSql(List<DataWrapper<Key, T>> executeDatas) {
        timeInterval.restart();

        try {
            HashMap<OperationType, List<Object[]>> argsMap = new HashMap<>();
            for (DataWrapper<Key, T> dataWrapper : executeDatas) {
                OperationType operationType = dataWrapper.operationType;
                List<Object[]> argsList = argsMap.computeIfAbsent(operationType, k -> new ArrayList<>());
                argsList.add(dataWrapper.buildParams(schemaTable));
            }

            schemaTable.getDb().tx(db -> {
                for (OperationType operationType : argsMap.keySet()) {
                    List<Object[]> argsList = argsMap.get(operationType);
                    String sql = sqlCommandMap.get(operationType);
                    db.executeBatch(sql, argsList);
                }
            });

            onSuccess(executeDatas);

            final long difftime = timeInterval.interval();
            log.info("批量存档成功: table={} exeCount={}  exe={} ms", schemaTable.getRealTableName(), executeDatas.size(),
                    difftime / 100_0000F);
            return true;
        } catch (Exception e) {
            log.error("批量存档失败: table={} error={}", schemaTable.getRealTableName(), e.getMessage());
            return false;
        }
    }

    protected void executeSql(List<DataWrapper<Key, T>> executeDatas) {
        Db db = schemaTable.getDb();
        Connection conn = null;
        List<DataWrapper<Key, T>> successDatas = new ArrayList<>();
        List<DataWrapper<Key, T>> failDatas = new ArrayList<>();
        try {
            conn = db.getConnection();
            for (DataWrapper<Key, T> dataWrapper : executeDatas) {
                String sql = null;
                Object[] params = null;
                try {
                    sql = sqlCommandMap.get(dataWrapper.operationType);
                    params = dataWrapper.buildParams(schemaTable);
                    SqlExecutor.execute(conn, sql, params);
                    successDatas.add(dataWrapper);
                } catch (Exception e) {
                    log.error("存档失败: table={} sql={} params={} error={}", schemaTable.getRealTableName(), sql, params,
                            e.getMessage());
                    failDatas.add(dataWrapper);
                }
            }
        } catch (Exception e) {
            log.error(e);
        } finally {
            db.closeConnection(conn);
        }

        onSuccess(successDatas);
        onFail(failDatas);
    }

    /**
     * 刷新待处理数据
     */
    private void flush() {
        List<DataWrapper<Key, T>> executeDatas = getAllEntities();
        if (executeDatas == null || executeDatas.isEmpty()) {
            return;
        }

        if (!batchExecuteSql(executeDatas)) {
            log.info("批量存档失败, 准备逐条存档: table={}", schemaTable.getRealTableName());
            executeSql(executeDatas);
        }
    }

    /**
     * 添加实体
     *
     * @param operationType 操作类型
     * @param entity        实体
     */
    private void addEntity(OperationType operationType, T entity) {
        Key key = schemaTable.getEntityId(entity);
        DataWrapper<Key, T> dataWrapper = new DataWrapper<>(key);
        dataWrapper.operationType = operationType;
        dataWrapper.entity = entity;
        addDataWrapper(dataWrapper);
    }

    /**
     * 添加数据包装器
     *
     * @param dataWrapper 数据包装器
     */
    protected abstract void addDataWrapper(DataWrapper<Key, T> dataWrapper);

    /**
     * 获取所有实体
     *
     * @return 实体列表
     */
    protected abstract List<DataWrapper<Key, T>> getAllEntities();

    /**
     * 成功回调
     *
     * @param successDatas 成功数据
     */
    protected abstract void onSuccess(List<DataWrapper<Key, T>> successDatas);

    /**
     * 失败回调
     *
     * @param failDatas 失败数据
     */
    protected abstract void onFail(List<DataWrapper<Key, T>> failDatas);

    protected static class DataWrapper<K, T> {
        private final K key;
        private T entity;
        private OperationType operationType;

        private DataWrapper(K key) {
            this.key = key;
        }

        public K getKey() {
            return key;
        }

        public T getEntity() {
            return entity;
        }

        public Object[] buildParams(SchemaTable schemaTable) {
            switch (operationType) {
                case Delete:
                    return MySqlExpert.genDeleteSqlArgs(schemaTable, entity);
                case Insert:
                    return MySqlExpert.genInsertSqlArgs(schemaTable, entity);
                default:
                    return MySqlExpert.genInsertOrUpdateSqlArgs(schemaTable, entity);
            }
        }
    }
}