package com.doctcloud.common.elasticsearch.service;

import co.elastic.clients.elasticsearch.ElasticsearchClient;
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
import co.elastic.clients.elasticsearch._types.query_dsl.BoolQuery;
import co.elastic.clients.elasticsearch._types.query_dsl.Query;
import co.elastic.clients.elasticsearch.core.*;
import co.elastic.clients.elasticsearch.core.bulk.BulkOperation;
import co.elastic.clients.elasticsearch.core.search.Hit;

import com.doctcloud.common.elasticsearch.model.EsPageResult;
import org.springframework.stereotype.Component;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.retry.annotation.Backoff;
import org.springframework.retry.annotation.Retryable;
import java.util.function.Consumer;
import javax.annotation.Resource;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import co.elastic.clients.elasticsearch._types.SortOrder; // SortOrder 来源
/**
 * Elasticsearch 通用服务实现类（基于 Elasticsearch Java Client v8+）
 * 适配优化后的 EsPageResult 模型，简化分页结果构建
 */
@Component
public class ElasticSearchService{

    private static final Logger log = LoggerFactory.getLogger(ElasticSearchService.class);
    // 分页默认参数
    private static final int DEFAULT_PAGE = 1;
    private static final int DEFAULT_SIZE = 10;
    private static final int MAX_BATCH_SIZE = 500;
    @Resource
    private ElasticsearchClient esClient;

    // ============================ 文档操作 ============================
    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public <T> void save(String index, String id, T data) {
        try {
            log.info("保存文档：索引[{}], ID[{}]", index, id);
            esClient.index(i -> i.index(index).id(id).document(data));
            log.info("保存文档成功：索引[{}], ID[{}]", index, id);
        } catch (Exception e) {
            log.error("保存文档失败：索引[{}], ID[{}]", index, id, e);
            throw new RuntimeException("Elasticsearch save failed", e);
        }
    }
    // ============================ 文档操作：补充无回调的bulkSave（适配接口） ============================
    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public <T> void bulkSave(String index, Map<String, T> dataMap) {
        // 调用带回调的方法，回调传null（表示不处理失败ID）
        this.bulkSave(index, dataMap, null);
    }
    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public <T> void bulkSave(String index, Map<String, T> dataMap, Consumer<List<String>> failIdCallback) {
        if (dataMap == null || dataMap.isEmpty()) {
            log.warn("批量保存数据为空，跳过操作");
            return;
        }
        // 1. 转换为Entry列表（便于分批）
        List<Map.Entry<String, T>> entryList = new ArrayList<>(dataMap.entrySet());
        // 2. 分批处理（每批500条，可配置为常量）
        int totalBatches = (int) Math.ceil((double) entryList.size() / MAX_BATCH_SIZE);
        List<String> allFailIds = new ArrayList<>(); // 记录所有失败的ID

        log.info("批量保存文档：索引[{}], 总条数[{}], 分[{}]批处理", index, entryList.size(), totalBatches);

        for (int i = 0; i < totalBatches; i++) {
            // 计算当前批的起止索引
            int start = i * MAX_BATCH_SIZE;
            int end = Math.min((i + 1) * MAX_BATCH_SIZE, entryList.size());
            List<Map.Entry<String, T>> batchEntries = entryList.subList(start, end);
            List<String> batchIds = batchEntries.stream().map(Map.Entry::getKey).collect(Collectors.toList());

            try {
                List<BulkOperation> operations = batchEntries.stream()
                        .map(entry -> BulkOperation.of(
                                b -> b.index(idx -> idx  // 变量名改为 idx，与外层 i 区分
                                        .index(index)
                                        .id(entry.getKey())
                                        .document(entry.getValue())
                                )
                        )) // 补全外层 map 的 Lambda 闭合括号
                        .collect(Collectors.toList());

                // 执行Bulk并解析响应
                BulkResponse response = esClient.bulk(b -> b.operations(operations));
                if (response.errors()) {
                    // 提取失败的ID
                    List<String> failIds = response.items().stream()
                            .filter(item -> item.error() != null)
                            .map(item -> item.id())
                            .collect(Collectors.toList());
                    allFailIds.addAll(failIds);
                    log.warn("批量保存文档部分失败：索引[{}], 第[{}]批，失败条数：{}", index, i + 1, failIds.size());
                } else {
                    log.info("批量保存文档：索引[{}], 第[{}]批处理完成（条数：{}）", index, i + 1, batchEntries.size());
                }
            } catch (Exception e) {
                allFailIds.addAll(batchIds); // 异常时，当前批所有ID视为失败
                log.error("批量保存文档失败：索引[{}], 第[{}]批", index, i + 1, e);
            }
        }
        // 回调通知失败的ID（业务层可选择重试或记录日志）
        if (!allFailIds.isEmpty() && failIdCallback != null) {
            failIdCallback.accept(allFailIds);
            throw new RuntimeException(String.format("Elasticsearch bulk save completed with %d failed items", allFailIds.size()));
        }
        log.info("批量保存文档全部完成：索引[{}], 总条数[{}]", index, entryList.size());
    }

    // ============================ 文档操作：批量删除（优化：补充失败回调） ============================
    /**
     * 批量删除文档（适配数据库批量删除场景，支持失败回调）
     * @param index 索引名
     * @param ids 文档ID列表（与数据库主键一一对应）
     * @param failIdCallback 失败ID回调（业务层可接收失败ID并处理）
     */
    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public <T> void bulkDelete(String index, List<String> ids, Consumer<List<String>> failIdCallback) {
        if (ids == null || ids.isEmpty()) {
            log.warn("批量删除ID列表为空，跳过操作");
            return;
        }

        // 分批处理（每批500条，与批量保存一致）
        int totalBatches = (int) Math.ceil((double) ids.size() / MAX_BATCH_SIZE);
        List<String> allFailIds = new ArrayList<>(); // 记录所有批次的失败ID
        log.info("批量删除文档：索引[{}], 总条数[{}], 分[{}]批处理", index, ids.size(), totalBatches);

        for (int i = 0; i < totalBatches; i++) {
            int start = i * MAX_BATCH_SIZE;
            int end = Math.min((i + 1) * MAX_BATCH_SIZE, ids.size());
            List<String> batchIds = ids.subList(start, end);

            try {
                // 构建批量删除操作
                List<BulkOperation> operations = batchIds.stream()
                        .map(id -> BulkOperation.of(b -> b.delete(del -> del  // 变量名用del，避免冲突
                                .index(index)
                                .id(id)
                        )))
                        .collect(Collectors.toList());

                // 执行Bulk并解析响应（精准识别单条失败）
                BulkResponse response = esClient.bulk(b -> b.operations(operations));
                if (response.errors()) {
                    // 提取当前批的失败ID（仅标记单条失败，非整批）
                    List<String> batchFailIds = response.items().stream()
                            .filter(item -> item.error() != null) // 过滤有错误的记录
                            .map(item -> item.id()) // 获取失败ID
                            .collect(Collectors.toList());
                    allFailIds.addAll(batchFailIds);
                    log.warn("批量删除文档部分失败：索引[{}], 第[{}]批，失败条数：{}，失败ID：{}",
                            index, i + 1, batchFailIds.size(), batchFailIds);
                } else {
                    log.info("批量删除文档：索引[{}], 第[{}]批处理完成（条数：{}）",
                            index, i + 1, batchIds.size());
                }
            } catch (Exception e) {
                // 异常时，当前批所有ID视为失败（网络问题等不可抗因素）
                allFailIds.addAll(batchIds);
                log.error("批量删除文档批处理异常：索引[{}], 第[{}]批，异常原因：{}",
                        index, i + 1, e.getMessage(), e);
            }
        }

        // 回调通知所有失败ID（业务层可选择重试或记录日志）
        if (!allFailIds.isEmpty() && failIdCallback != null) {
            failIdCallback.accept(allFailIds);
            throw new RuntimeException(String.format("Elasticsearch bulk delete completed with %d failed items", allFailIds.size()));
        }
        log.info("批量删除文档全部完成：索引[{}], 总条数[{}], 失败条数：{}",
                index, ids.size(), allFailIds.size());
    }

    /**
     * 批量删除文档（无回调重载，适配接口）
     * @param index 索引名
     * @param ids 文档ID列表
     */
    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public <T> void bulkDelete(String index, List<String> ids) {
        this.bulkDelete(index, ids, null); // 调用带回调的方法，回调传null
    }

    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public <T> T get(String index, String id, Class<T> clazz) {
        try {
            log.info("获取文档：索引[{}], ID[{}]", index, id);
            GetResponse<T> response = esClient.get(g -> g.index(index).id(id), clazz);
            if (response.found()) {
                log.info("获取文档成功：索引[{}], ID[{}]", index, id);
                return response.source();
            }
            log.warn("文档不存在：索引[{}], ID[{}]", index, id);
            return null;
        } catch (Exception e) {
            log.error("获取文档失败：索引[{}], ID[{}]", index, id, e);
            throw new RuntimeException("Elasticsearch get failed", e);
        }
    }

    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public void delete(String index, String id) {
        try {
            log.info("删除文档：索引[{}], ID[{}]", index, id);
            esClient.delete(d -> d.index(index).id(id));
            log.info("删除文档成功：索引[{}], ID[{}]", index, id);
        } catch (Exception e) {
            log.error("删除文档失败：索引[{}], ID[{}]", index, id, e);
            throw new RuntimeException("Elasticsearch delete failed", e);
        }
    }

    // ============================ 索引操作 ============================
    public boolean existsIndex(String index) {
        try {
            log.info("检查索引是否存在：索引[{}]", index);
            return esClient.indices().exists(e -> e.index(index)).value();
        } catch (Exception e) {
            log.error("检查索引存在性失败：索引[{}]", index, e);
            throw new RuntimeException("Elasticsearch index exists check failed", e);
        }
    }


    public void deleteIndex(String index) {
        try {
            if (!existsIndex(index)) {
                log.warn("索引不存在，跳过删除：索引[{}]", index);
                return;
            }
            log.warn("删除索引（不可逆）：索引[{}]", index);
            esClient.indices().delete(d -> d.index(index));
            log.info("删除索引成功：索引[{}]", index);
        } catch (Exception e) {
            log.error("删除索引失败：索引[{}]", index, e);
            throw new RuntimeException("Elasticsearch index delete failed", e);
        }
    }

    public <T> List<T> searchAll(String index, Class<T> clazz) throws IOException {
        log.info("全量查询索引所有文档：索引[{}]", index);
        // 构建 match_all 查询（匹配所有文档）
        Query matchAllQuery = Query.of(q -> q.matchAll(m -> m));
//        // 执行无分页搜索 → 未设置 size，使用 ES 默认值 10
//        return executeSearch(index, matchAllQuery, clazz);
        // 关键：设置 size 为 10000（覆盖默认值 10），确保获取所有数据
        SearchResponse<T> response = esClient.search(s -> s
                        .index(index)
                        .query(matchAllQuery)
                        .size(10000), // 显式设置最大返回条数
                clazz);
        log.info("全量搜索完成：索引[{}], 总命中条数[{}], 实际返回条数[{}]",
                index, getTotalHits(response), response.hits().hits().size());
        return parseSearchHits(response);
    }


    public <T> EsPageResult<T> searchWithAllTotal(String index, int page, int size, String sortField, String sortDir, List<String> fetchFields, Class<T> clazz) throws IOException {
        log.info("全量分页查询：索引[{}], 页码[{}], 条数[{}], 排序[{}:{}]",
                index, page, size, sortField, sortDir);

        // 1. 校验分页参数
        int validPage = Math.max(page, DEFAULT_PAGE);
        int validSize = Math.max(size, DEFAULT_SIZE);
        int from = (validPage - 1) * validSize;

        // 2. 构建 match_all 查询（无筛选条件）
        Query matchAllQuery = Query.of(q -> q.matchAll(m -> m));

        // 3. 构建搜索请求（支持排序和字段过滤）
        SearchRequest searchRequest = SearchRequest.of(s -> {
            // 基础参数：索引、查询条件、分页
            s.index(index)
                    .query(matchAllQuery)
                    .from(from)
                    .size(validSize);

            // 排序配置
            if (StringUtils.isNotBlank(sortField)) {
                SortOrder order = "desc".equalsIgnoreCase(sortDir) ? SortOrder.Desc : SortOrder.Asc;
                s.sort(sort -> sort.field(f -> f.field(sortField).order(order)));
            }

            // 字段过滤（只返回指定字段）
            if (fetchFields != null && !fetchFields.isEmpty()) {
                s.source(source -> source.filter(filter -> filter.includes(fetchFields)));
            }

            return s;
        });

        // 4. 执行搜索并解析结果
        SearchResponse<T> response = esClient.search(searchRequest, clazz);
        long total = getTotalHits(response);
        List<T> records = parseSearchHits(response);

        // 5. 构建分页结果（EsPageResult 会自动计算总页数）
        return new EsPageResult<>(validPage, validSize, total, 0, records);
    }

    // ============================ 基础搜索（单字段） ============================
    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public <T> List<T> search(String index, String field, String keyword, Class<T> clazz) throws IOException {
        log.info("单字段搜索：索引[{}], 字段[{}], 关键词[{}]", index, field, keyword);
        Query query = buildMatchQuery(field, keyword);
        return executeSearch(index, query, clazz);
    }

    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public <T> EsPageResult<T> searchWithTotal(String index, String field, String keyword, int page, int size, Class<T> clazz) throws IOException {
        log.info("单字段分页搜索：索引[{}], 字段[{}], 关键词[{}], 页码[{}], 条数[{}]", index, field, keyword, page, size);
        Query query = buildMatchQuery(field, keyword);
        return executeSearchWithTotal(index, query, page, size, clazz);
    }

    // ============================ 基础搜索（优化：支持排序和字段过滤） ============================
    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public <T> EsPageResult<T> searchWithTotal(String index, String field, String keyword,
                                               int page, int size, String sortField, String sortDir,
                                               List<String> fetchFields, Class<T> clazz) throws IOException {
        log.info("单字段分页搜索：索引[{}], 字段[{}], 关键词[{}], 排序[{}:{}], 页码[{}], 条数[{}]",
                index, field, keyword, sortField, sortDir, page, size);

        // 1. 基础参数校验
        int validPage = Math.max(page, DEFAULT_PAGE);
        int validSize = Math.max(size, DEFAULT_SIZE);
        int from = (validPage - 1) * validSize;
        Query query = buildMatchQuery(field, keyword);

        // 2. 构建搜索请求（核心修正：直接构建 SearchRequest，无需 Builder 中间对象）
        SearchRequest searchRequest = SearchRequest.of(s -> {
            // 基础参数：索引、查询条件、分页
            s.index(index)
                    .query(query)
                    .from(from)
                    .size(validSize);

            // 2.1 排序
            if (StringUtils.isNotBlank(sortField)) {
                SortOrder order = "desc".equalsIgnoreCase(sortDir) ? SortOrder.Desc : SortOrder.Asc;
                s.sort(sort -> sort.field(f -> f.field(sortField).order(order)));
            }

            // 2.2 字段过滤（通用写法：通过 filter().includes() 配置需要返回的字段）
            if (fetchFields != null && !fetchFields.isEmpty()) {
                s.source(source -> source
                        .filter(filter -> filter.includes(fetchFields)) // 核心：指定包含的字段列表
                );
            }
            // 返回构建好的 SearchRequest 配置
            return s;
        });

        // 3. 执行搜索（传入构建好的 SearchRequest）
        SearchResponse<T> response = esClient.search(searchRequest, clazz);

        // 4. 解析结果
        long total = getTotalHits(response);
        List<T> records = parseSearchHits(response);
        return new EsPageResult<>(validPage, validSize, total, 0, records);
    }

    // ============================ 多字段搜索（匹配/模糊） ============================
    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public <T> List<T> multiSearch(String index, List<String> fields, String keyword, Class<T> clazz) throws IOException {
        log.info("多字段搜索：索引[{}], 字段[{}], 关键词[{}]", index, fields, keyword);
        Query query = buildMultiMatchQuery(fields, keyword);
        return executeSearch(index, query, clazz);
    }
    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))

    public <T> EsPageResult<T> multiSearchWithTotal(String index, List<String> fields, String keyword, int page, int size, Class<T> clazz) throws IOException {
        log.info("多字段分页搜索：索引[{}], 字段[{}], 关键词[{}], 页码[{}], 条数[{}]", index, fields, keyword, page, size);
        Query query = buildMultiMatchQuery(fields, keyword);
        return executeSearchWithTotal(index, query, page, size, clazz);
    }
    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))

    public <T> List<T> fuzzyMultiSearch(String index, List<String> fields, String keyword, Class<T> clazz) throws IOException {
        log.info("多字段模糊搜索：索引[{}], 字段[{}], 关键词[{}]", index, fields, keyword);
        Query query = buildFuzzyMultiQuery(fields, keyword);
        return executeSearch(index, query, clazz);
    }

    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public <T> EsPageResult<T> fuzzyMultiSearchWithTotal(String index, List<String> fields, String keyword, int page, int size, Class<T> clazz) throws IOException {
        log.info("多字段模糊分页搜索：索引[{}], 字段[{}], 关键词[{}], 页码[{}], 条数[{}]", index, fields, keyword, page, size);
        Query query = buildFuzzyMultiQuery(fields, keyword);
        return executeSearchWithTotal(index, query, page, size, clazz);
    }

    // ============================ 精确匹配搜索（单/多字段） ============================
    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public <T> List<T> termSearch(String index, String field, Object value, Class<T> clazz) throws IOException {
        log.info("单字段精确搜索：索引[{}], 字段[{}], 值[{}]", index, field, value);
        Query query = buildTermQuery(field, value);
        return executeSearch(index, query, clazz);
    }

    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public <T> EsPageResult<T> termSearchWithTotal(String index, String field, Object value, int page, int size, Class<T> clazz) throws IOException {
        log.info("单字段精确分页搜索：索引[{}], 字段[{}], 值[{}], 页码[{}], 条数[{}]", index, field, value, page, size);
        Query query = buildTermQuery(field, value);
        return executeSearchWithTotal(index, query, page, size, clazz);
    }
    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public <T> List<T> termSearch(String index, Map<String, Object> termMap, Class<T> clazz) throws IOException {
        if (termMap.isEmpty()) {
            log.warn("多字段精确搜索条件为空，返回空列表");
            return Collections.emptyList();
        }
        log.info("多字段精确搜索：索引[{}], 条件[{}]", index, termMap);
        Query query = buildMultiTermQuery(termMap);
        return executeSearch(index, query, clazz);
    }

    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public <T> EsPageResult<T> termSearchWithTotal(String index, Map<String, Object> termMap, int page, int size, Class<T> clazz) throws IOException {
        if (termMap.isEmpty()) {
            log.warn("多字段精确分页搜索条件为空，返回空结果");
            return new EsPageResult<>();
        }
        log.info("多字段精确分页搜索：索引[{}], 条件[{}], 页码[{}], 条数[{}]", index, termMap, page, size);
        Query query = buildMultiTermQuery(termMap);
        return executeSearchWithTotal(index, query, page, size, clazz);
    }

    // ============================ 嵌套字段搜索 ============================
    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public <T> EsPageResult<T> nestedSearch(String index, String path, String field, String keyword, int page, int size, Class<T> clazz) throws IOException {
        Query nestedQuery = buildMatchQuery(field, keyword);
        return buildAndExecuteNestedSearch(index, path, nestedQuery, page, size, clazz);
    }

    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public <T> EsPageResult<T> nestedSearch(String index, String path, Query nestedQuery, int page, int size, Class<T> clazz) throws IOException {
        return buildAndExecuteNestedSearch(index, path, nestedQuery, page, size, clazz);
    }

    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public <T> EsPageResult<T> nestedTermSearch(String index, String path, String field, Object value, int page, int size, Class<T> clazz) throws IOException {
        Query nestedQuery = buildTermQuery(field, value);
        return buildAndExecuteNestedSearch(index, path, nestedQuery, page, size, clazz);
    }

    @Retryable(value = {IOException.class, ElasticsearchException.class}, maxAttempts = 3, backoff = @Backoff(delay = 1000, multiplier = 2))
    public <T> EsPageResult<T> nestedFuzzySearch(String index, String path, String field, String keyword, int page, int size, Class<T> clazz) throws IOException {
        Query nestedQuery = buildFuzzyQuery(field, keyword);
        return buildAndExecuteNestedSearch(index, path, nestedQuery, page, size, clazz);
    }

    // ============================ 私有工具方法 ============================
    /**
     * 构建单字段匹配查询
     */
    private Query buildMatchQuery(String field, String keyword) {
        return Query.of(q -> q.match(m -> m.field(field).query(keyword)));
    }

    /**
     * 构建多字段匹配查询
     */
    private Query buildMultiMatchQuery(List<String> fields, String keyword) {
        return Query.of(q -> q.multiMatch(m -> m.fields(fields).query(keyword)));
    }

    /**
     * 构建单字段模糊查询
     */
    private Query buildFuzzyQuery(String field, String keyword) {
        return Query.of(q -> q.fuzzy(f -> f.field(field).value(keyword).fuzziness("AUTO")));
    }

    /**
     * 构建多字段模糊查询（should 逻辑）
     */
    private Query buildFuzzyMultiQuery(List<String> fields, String keyword) {
        List<Query> fuzzyQueries = fields.stream()
                .map(field -> buildFuzzyQuery(field, keyword))
                .collect(Collectors.toList());
        BoolQuery boolQuery = BoolQuery.of(b -> b.should(fuzzyQueries).minimumShouldMatch("1"));
        return Query.of(q -> q.bool(boolQuery));
    }

    /**
     * 构建单字段精确查询
     */
    private Query buildTermQuery(String field, Object value) {
        return Query.of(q -> q.term(t -> t.field(field).value(v -> v.stringValue(value.toString()))));
    }

    /**
     * 构建多字段精确查询（must 逻辑）
     */
    private Query buildMultiTermQuery(Map<String, Object> termMap) {
        List<Query> termQueries = termMap.entrySet().stream()
                .map(entry -> buildTermQuery(entry.getKey(), entry.getValue()))
                .collect(Collectors.toList());
        BoolQuery boolQuery = BoolQuery.of(b -> b.must(termQueries));
        return Query.of(q -> q.bool(boolQuery));
    }

    /**
     * 构建并执行嵌套查询
     */
    private <T> EsPageResult<T> buildAndExecuteNestedSearch(String index, String path, Query nestedQuery, int page, int size, Class<T> clazz) throws IOException {
        log.info("嵌套搜索：索引[{}], 路径[{}], 页码[{}], 条数[{}]", index, path, page, size);
        Query query = Query.of(q -> q.nested(n -> n.path(path).query(nestedQuery)));
        return executeSearchWithTotal(index, query, page, size, clazz);
    }

    /**
     * 执行无分页搜索，返回列表
     */
    private <T> List<T> executeSearch(String index, Query query, Class<T> clazz) throws IOException {
        SearchResponse<T> response = esClient.search(s -> s.index(index).query(query), clazz);
        log.info("搜索完成：索引[{}], 命中条数[{}]", index, getTotalHits(response));
        return parseSearchHits(response);
    }

    /**
     * 执行分页搜索，利用 EsPageResult 构造函数直接构建结果
     */
    private <T> EsPageResult<T> executeSearchWithTotal(String index, Query query, int page, int size, Class<T> clazz) throws IOException {
        // 校验分页参数
        int validPage = Math.max(page, DEFAULT_PAGE);
        int validSize = Math.max(size, DEFAULT_SIZE);
        int from = (validPage - 1) * validSize;

        // 执行搜索
        SearchResponse<T> response = esClient.search(s -> s
                .index(index)
                .query(query)
                .from(from)
                .size(validSize), clazz);

        // 解析结果（利用 EsPageResult 构造函数，自动计算 totalPages）
        long total = getTotalHits(response);
        List<T> records = parseSearchHits(response);
        return new EsPageResult<>(validPage, validSize, total, 0, records);
        // 注意：构造函数中传入 total 后，EsPageResult 的 setTotal 会自动计算 totalPages，因此无需手动传 totalPages
    }

    /**
     * 解析搜索命中结果，提取文档列表
     */
    private <T> List<T> parseSearchHits(SearchResponse<T> response) {
        return response.hits().hits().stream()
                .map(Hit::source)
                .filter(source -> source != null) // 过滤空文档
                .collect(Collectors.toList());
    }

    /**
     * 获取搜索总命中数（避免空指针）
     */
    private long getTotalHits(SearchResponse<?> response) {
        return response.hits().total() != null ? response.hits().total().value() : 0;
    }
}