package com.lcf.framework.utils;

import cn.hutool.core.collection.CollectionUtil;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.lcf.system.dao.CommonDao;
import com.lcf.system.dto.dict.DictTableDTO;
import com.lcf.system.dto.dict.Pair;
import jakarta.annotation.Resource;
import org.springframework.stereotype.Component;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import java.util.stream.IntStream;

/**
 * 表字段翻译
 *
 * @author lcf
 * @since 2025/3/30 20:42
 */
@Component
public class DictTableUtils {
    @Resource
    private CommonDao commonDao;

    // 缓存结构：Map<表名_主键名_主键值, Map<字段名, 字段值>>
    private final ThreadLocal<Cache<String, Map<String, Object>>> dictTableCache = ThreadLocal.withInitial(() -> Caffeine.newBuilder().build());

    /**
     * 预加载缓存
     */
    public void preCache(DictTableDTO dto) {
        if (dto.getQueryFields().length != dto.getTargetFields().length) {
            throw new IllegalArgumentException("queryFields 与 targetFields 长度不一致");
        }

        if (CollectionUtil.isEmpty(dto.getIds())) {
            return;
        }

        // 查询并缓存整行数据
        Map<String, Map<String, Object>> result = queryTableMapping(dto);

        result.forEach((id, row) -> getCache().asMap().merge(id, row, (oldRow, newRow) -> {
            Map<String, Object> mergedRow = new ConcurrentHashMap<>(oldRow);
            newRow.forEach(mergedRow::putIfAbsent);
            return mergedRow;
        }));
    }

    /**
     * 构建缓存键（标准化主键值格式）
     */
    public static String buildCacheKey(String tableName, String primaryKey, Object id) {
        String normalizedId = String.valueOf(id).trim();
        return String.join("#", tableName, primaryKey, normalizedId);
    }

    /**
     * 执行动态表查询并返回主键映射结构
     *
     * @return Map<表名 # 主键字段名 # 主键值, Map < 目标字段名, 字段值>>
     */
    public Map<String, Map<String, Object>> queryTableMapping(DictTableDTO dto) {
        String tableName = dto.getTableName();
        String primaryKey = dto.getPrimaryKey();
        String[] queryFields = dto.getQueryFields();
        String[] targetFields = dto.getTargetFields();
        // 构建字段别名映射（关键步骤）
        List<Pair<String, String>> fieldAliases = createFieldAliases(queryFields, targetFields);

        // 分批次（每批1000条）
        List<List<Object>> idBatches = splitIds(new ArrayList<>(dto.getIds()));

        List<Map<String, Object>> result = commonDao.selectDynamicData(tableName, primaryKey, fieldAliases, idBatches);

        //  结果转换（ID为键，字段为值）
        return result.stream()
                .filter(row -> row.get(primaryKey) != null)
                .collect(Collectors.toMap(
                        row -> buildCacheKey(tableName, primaryKey, row.get(primaryKey)),
                        row -> mapRowToTargetFields(row, targetFields),
                        (existing, replacement) -> replacement
                ));
    }

    /**
     * 清空缓存
     */
    public void clearCache() {
        getCache().invalidateAll();
        dictTableCache.remove();
    }

    /**
     * 获取缓存实例
     */
    public Object getTargetValue(String key, String targetField) {
        Map<String, Object> row = getCache().getIfPresent(key);
        return row == null ? null : row.get(targetField);
    }

    /**
     * 获取当前请求缓存实例
     */
    private Cache<String, Map<String, Object>> getCache() {
        return dictTableCache.get();
    }

    /**
     * 转换查询行到目标字段（防御性拷贝）
     */
    private Map<String, Object> mapRowToTargetFields(Map<String, Object> row, String[] targetFields) {
        Map<String, Object> mapped = new ConcurrentHashMap<>(targetFields.length);
        for (String target : targetFields) {
            Object value = row.get(target);
            if (value != null) {
                mapped.put(target, value);
            }
        }
        return mapped;
    }

    /**
     * 将主键集合切分为每批次最多1000条
     *
     * @param ids 原始主键集合
     */
    private <T> List<List<T>> splitIds(List<T> ids) {
        int total = ids.size();
        int batchSize = 1000;
        // 计算总批次数
        int batchCount = (total + batchSize - 1) / batchSize;

        List<List<T>> batches = new ArrayList<>(batchCount);
        for (int i = 0; i < total; i += batchSize) {
            int end = Math.min(i + batchSize, total);
            batches.add(ids.subList(i, end));
        }
        return batches;
    }

    /**
     * 构建字段别名映射（数据库字段 -> 实体字段）
     */
    private List<Pair<String, String>> createFieldAliases(String[] queryFields, String[] targetFields) {
        return IntStream.range(0, queryFields.length)
                .mapToObj(i -> new Pair<>(queryFields[i], targetFields[i]))
                .collect(Collectors.toList());
    }
}
