package com.gitee.dbswitch.data.handler;

import cn.hutool.core.date.StopWatch;
import cn.hutool.core.io.unit.DataSizeUtil;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.gitee.dbswitch.calculate.*;
import com.gitee.dbswitch.common.consts.Constants;
import com.gitee.dbswitch.common.entity.CloseableDataSource;
import com.gitee.dbswitch.common.entity.PatternMapper;
import com.gitee.dbswitch.common.type.CaseConvertEnum;
import com.gitee.dbswitch.common.type.ProductTypeEnum;
import com.gitee.dbswitch.common.util.DatabaseAwareUtils;
import com.gitee.dbswitch.common.util.JdbcTypesUtils;
import com.gitee.dbswitch.common.util.PatterNameUtils;
import com.gitee.dbswitch.core.exchange.BatchElement;
import com.gitee.dbswitch.core.exchange.MemChannel;
import com.gitee.dbswitch.core.task.TaskProcessor;
import com.gitee.dbswitch.data.config.DbswtichPropertiesConfiguration;
import com.gitee.dbswitch.data.domain.APIHttpParam;
import com.gitee.dbswitch.data.domain.APIHttpResult;
import com.gitee.dbswitch.data.domain.APITaskResult;

import com.gitee.dbswitch.data.domain.ReaderTaskResult;
import com.gitee.dbswitch.data.entity.APIDataSourceProperties;
import com.gitee.dbswitch.data.entity.TargetDataSourceProperties;
import com.gitee.dbswitch.data.util.OKHttpUtils;
import com.gitee.dbswitch.provider.ProductFactoryProvider;
import com.gitee.dbswitch.provider.ProductProviderFactory;
import com.gitee.dbswitch.provider.manage.TableManageProvider;
import com.gitee.dbswitch.provider.sync.TableDataSynchronizeProvider;
import com.gitee.dbswitch.provider.transform.RecordTransformProvider;
import com.gitee.dbswitch.provider.write.TableDataWriteProvider;
import com.gitee.dbswitch.schema.ColumnDescription;
import com.gitee.dbswitch.service.DefaultMetadataService;
import com.gitee.dbswitch.service.MetadataService;
import com.google.common.collect.Lists;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import lombok.extern.slf4j.Slf4j;
import org.springframework.util.StringUtils;

import java.sql.Types;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;

/**
 * api http 请求体
 *
 * @author natural
 */
@Slf4j
public class APIReaderTaskThread extends TaskProcessor<APITaskResult> {

    private final long MAX_CACHE_BYTES_SIZE = 128 * 1024 * 1024;
    private final APIHttpParam apiHttpParam;
    private final DbswtichPropertiesConfiguration properties;
    private final TargetDataSourceProperties targetProperties;
    private final APIDataSourceProperties apiDataSourceProperties;
    private final CloseableDataSource targetDataSource;
    private ProductFactoryProvider targetFactoryProvider;
    private ProductTypeEnum targetProductType;
    private RecordTransformProvider transformProvider;
    private TableDataWriteProvider targetWriter;
    private String targetSchemaName;
    private String targetTableName;
    private List<ColumnDescription> targetColumnDescriptions;
    private List<String> targetPrimaryKeys;
    private MetadataService targetMetaDataService;
    private List<String> sourcePrimaryKeys;
    private MemChannel memChannel;
    private int fetchSize = Constants.MINIMUM_FETCH_SIZE;
    private CountDownLatch apiRobotCountDownLatch;


    private final RowChangeTypeEnum IncreaseFlag = RowChangeTypeEnum.VALUE_INSERT;

    // 日志输出字符串使用
    private String tableNameMapString;

    // 统计信息
    AtomicLong totalBytes = new AtomicLong(0);
    AtomicLong totalCount = new AtomicLong(0);

    public APIReaderTaskThread(APIHttpParam apiHttpParam) {
        this.apiHttpParam = apiHttpParam;
        this.memChannel = apiHttpParam.getMemChannel();
        this.properties = apiHttpParam.getConfiguration();
        this.apiDataSourceProperties = this.properties.getApi();
        this.targetProperties = this.properties.getTarget();
        this.targetDataSource = apiHttpParam.getTargetDataSource();
        this.apiRobotCountDownLatch = apiHttpParam.getApiCountDownLatch();
    }

    @Override
    protected void checkInterrupt() {
        super.checkInterrupt();
    }

    @Override
    protected void beforeProcess() {
        this.targetProductType = DatabaseAwareUtils.getProductTypeByDataSource(targetDataSource);
        if (this.targetProductType.isLikeHive()) {
            // !! hive does not support upper table name and upper column name
            properties.getTarget().setTableNameCase(CaseConvertEnum.LOWER);
            properties.getTarget().setColumnNameCase(CaseConvertEnum.LOWER);
        }
        this.targetSchemaName = properties.getTarget().getTargetSchema();
        this.targetTableName = properties.getApi().getTargetTable();
        this.tableNameMapString = String.format("%s.%s --> %s.%s",
                targetSchemaName, targetTableName,
                targetSchemaName, targetTableName);
    }


    @Override
    protected APITaskResult doProcess() {
        String apiURL = this.apiHttpParam.getApiURL();
        String requestMethod = this.apiHttpParam.getRequestMethod();
        List<Map<String, String>> apiParams = this.apiHttpParam.getApiParams();
        List<Map<String, String>> apiHeaders = this.apiHttpParam.getApiHeaders();
        String apiBody = this.apiHttpParam.getApiBody();
        String apiJsonRoot = this.apiHttpParam.getApiJsonRoot();
        String apiJsonPosition = this.apiHttpParam.getApiJsonPosition();

        APITaskResult apiTaskResult = APITaskResult.builder().success(true).build();
        StopWatch stopWatch = new StopWatch(Thread.currentThread().getName());
        stopWatch.start();
        try {
            try {
                // 创建请求Http任务
                CompletableFuture<List<List<APIHttpResult>>> taskA = CompletableFuture.supplyAsync(() -> {
                    log.info("Task 请求Http任务 is running");
                    try {
                        // 使用空列表作为默认值，防止 apiParams 为 null 时导致空指针异常
                        List<Map<String, String>> safeApiParams = (apiParams != null) ? apiParams : Collections.emptyList();
                        List<Map<String, String>> safeApiHeaders = (apiHeaders != null) ? apiHeaders : Collections.emptyList();

                        Map<String, String> paramsTemp = convertListToMap(safeApiParams);
                        Map<String, String> headersTemp = convertListToMap(safeApiHeaders);
                        String s = OKHttpUtils.doRequest(apiURL, paramsTemp, headersTemp, apiBody, requestMethod).toString();
                        JsonElement jsonElement = JsonParser.parseString(s);
                        if ((apiJsonRoot != null && !apiJsonRoot.isEmpty()) && (apiJsonPosition != null && !apiJsonPosition.isEmpty())) {
                            jsonElement = extractJsonObject(jsonElement, apiJsonRoot, Integer.parseInt(apiJsonPosition));
                        }
                        return convertJsonToAPIHttpResult(jsonElement.toString());
                    } catch (Throwable e) {
                        log.warn("[APITask] api request error: {}", e.getMessage());
                    }
                    return null;
                });

                // 创建连接数据库任务
                CompletableFuture<Boolean> taskB = CompletableFuture.supplyAsync(() -> {
                    log.info("Task 连接数据库任务 is running");
                    try {
                        initializeProvidersAndMetadata();
                        return true;
                    } catch (Throwable e) {
                        log.warn("[APITask] connect target datasource error: {}", e.getMessage());
                    }
                    return false;
                });

                // 创建任务C，依赖于任务A和任务B的结果
                CompletableFuture<APITaskResult> taskC = taskA.thenCombine(taskB, (resultFromA, resultFromB) -> {
                    if (!resultFromB && resultFromA != null) {
                        Set<String> fromPatterns = new HashSet<>();
                        for (PatternMapper mapper : this.apiDataSourceProperties.getRegexColumnMapper()) {
                            fromPatterns.add(mapper.getFromPattern());
                        }
                        for (List<APIHttpResult> apiHttpResults : resultFromA) {
                            for (APIHttpResult result : apiHttpResults) {
                                String key = result.getKey();
                                if (!fromPatterns.contains(key)) {
                                    return APITaskResult.builder()
                                            .success(false)
                                            .successCount(0)
                                            .failureCount(1)
                                            .totalBytes(totalBytes.get())
                                            .recordCount(totalCount.get())
                                            .build();
                                }
                            }
                        }
                    }

                    // 打印表名与字段名的映射关系
                    List<String> columnMapperPairs = new ArrayList<>();
                    if (resultFromA != null && resultFromA.get(0) != null) {
                        for (int i = 0; i < resultFromA.get(0).size(); ++i) {
                            String sourceColumnName = resultFromA.get(0).get(i).getKey();
                            String targetColumnName = targetColumnDescriptions.get(i).getFieldName();
                            if (StringUtils.hasLength(targetColumnName)) {
                                columnMapperPairs.add(String.format("%s --> %s", sourceColumnName, targetColumnName));
                            } else {
                                columnMapperPairs.add(String.format(
                                        "%s --> %s",
                                        sourceColumnName,
                                        String.format("<!Field(%s) is Deleted>", (i + 1))
                                ));
                            }
                        }
                    }

                    log.info("Mapping relation : \ntable mapper : {}  \ncolumn mapper :\n\t{} ",
                            tableNameMapString, String.join("\n\t", columnMapperPairs));

                    if ("FullSync".equals(this.apiDataSourceProperties.getSynchronizeType())) {
                        return doFullCoverSynchronize(targetWriter, resultFromA, transformProvider, IncreaseFlag);
                    } else {
                        return doIncreaseSynchronize(targetWriter, resultFromA, transformProvider, IncreaseFlag);
                    }

                });

                // 等待所有任务完成
                CompletableFuture.allOf(taskA, taskB, taskC).join();
                apiTaskResult = taskC.get();
            } catch (Exception t) {
                apiTaskResult.setSuccess(false);
                log.warn(t.getMessage());
            }

        } finally {
            stopWatch.stop();
        }
        return apiTaskResult;
    }

    @Override
    protected void afterProcess() {
        apiRobotCountDownLatch.countDown();
    }

    /**
     * 变化量同步
     *
     * @param tableWriter       表写入提供者
     * @param apiHttpResultList API HTTP结果列表
     * @param transformer       记录转换提供者
     * @param flag              行变化类型枚举
     * @return ReaderTaskResult
     */
    private APITaskResult doIncreaseSynchronize(TableDataWriteProvider tableWriter,
                                                List<List<APIHttpResult>> apiHttpResultList,
                                                RecordTransformProvider transformer, RowChangeTypeEnum flag) {
        return synchronize(tableWriter, apiHttpResultList, transformer, RowChangeTypeEnum.VALUE_INSERT, false);
    }

    /**
     * 全量同步
     *
     * @param tableWriter       表写入提供者
     * @param apiHttpResultList API HTTP结果列表
     * @param transformer       记录转换提供者
     * @param flag              行变化类型枚举
     * @return ReaderTaskResult
     */
    private APITaskResult doFullCoverSynchronize(TableDataWriteProvider tableWriter,
                                                 List<List<APIHttpResult>> apiHttpResultList,
                                                 RecordTransformProvider transformer, RowChangeTypeEnum flag) {
        return synchronize(tableWriter, apiHttpResultList, transformer, flag, true);
    }

    /**
     * 通用同步方法
     *
     * @param tableWriter       表写入提供者
     * @param apiHttpResultList API HTTP结果列表
     * @param transformer       记录转换提供者
     * @param flag              行变化类型枚举
     * @param isFullCover       是否为全量同步
     * @return ReaderTaskResult
     */
    private APITaskResult synchronize(TableDataWriteProvider tableWriter,
                                      List<List<APIHttpResult>> apiHttpResultList,
                                      RecordTransformProvider transformer, RowChangeTypeEnum flag, boolean isFullCover) {
        final int BATCH_SIZE = fetchSize;
        List<String> targetFields = getTargetFields();

        // 准备目的端的数据写入操作
        tableWriter.prepareWrite(targetSchemaName, targetTableName, targetFields);
        TableDataSynchronizeProvider targetSynchronizer = this.targetFactoryProvider.createTableDataSynchronizeProvider();
        TableManageProvider targetTableManager = this.targetFactoryProvider.createTableManageProvider();

        boolean primaryKeyFlag = false;
        List<Object[]> cache = new LinkedList<>();
        long cacheBytes = 0;
        long bytes = 0;

        checkInterrupt(); // 如果需要的中断检查
        for (int j = 0; j < targetFields.size(); j++) {
            Object[] record = new String[apiHttpResultList.get(j).size()];
            for (int i = 0; i < apiHttpResultList.get(j).size(); i++) {
                try {
                    Object value = apiHttpResultList.get(j).get(i).getValue(); // 获取值
                    // 根据 APIHttpResult 的类型和字段，计算字节数
                    // 假设 apiHttpResult.getType() 返回类型信息
                    // 注意：JdbcTypesUtils 的方法可能需要进行调整或自定义
                    bytes += JdbcTypesUtils.getObjectSize(getColumnTypeFromType(apiHttpResultList.get(j).get(i).getType()), value);
                    record[i] = value; // 根据实际需要调整存储方式
                    if(this.targetPrimaryKeys!=null&&!this.targetPrimaryKeys.isEmpty()&&this.targetPrimaryKeys.get(0).equals(targetFields.get(j))){
                        primaryKeyFlag = true;
                    }
                } catch (Exception e) {
                    log.warn("!!! Process APIHttpResult error", e);
                    record[i] = null;
                }
            }
            cache.add(transformer.doTransform(targetSchemaName, targetTableName, targetFields, record));
        }

        cacheBytes += bytes;
        totalCount.incrementAndGet();
        if (cache.size() >= BATCH_SIZE || cacheBytes >= MAX_CACHE_BYTES_SIZE) {
            flushCache(tableWriter, targetFields, cache, cacheBytes);
            cacheBytes = 0;
        }
        if (!cache.isEmpty()) {
            if (isFullCover) {
                targetTableManager.truncateTableData(targetSchemaName, targetTableName);
                flushCache(tableWriter, targetFields, cache, cacheBytes);
            } else {
                if(primaryKeyFlag){
                    flushCacheWithFlag(targetSynchronizer, targetFields, cache, cacheBytes, RowChangeTypeEnum.VALUE_CHANGED);
                }else {
                    flushCacheWithFlag(targetSynchronizer, targetFields, cache, cacheBytes, flag);
                }

            }
        }

        log.info("[{}Sync] handle read table [{}] total record count: {}, total bytes = {}",
                isFullCover ? "FullCover" : "Increase", tableNameMapString, totalCount.get(), DataSizeUtil.format(totalBytes.get()));

        return APITaskResult.builder()
                .success(true)
                .failureCount(0)
                .successCount(1)
                .totalBytes(totalBytes.get())
                .recordCount(totalCount.get())
                .build();
    }

    private void initializeProvidersAndMetadata() {
        this.targetFactoryProvider = ProductProviderFactory
                .newProvider(targetProductType, targetDataSource);
        this.targetWriter = targetFactoryProvider.createTableDataWriteProvider(
                properties.getTarget().getWriterEngineInsert());
        this.transformProvider = targetFactoryProvider.createRecordTransformProvider();
        // 根据表的列名映射转换准备目标端表的字段信息
        this.targetMetaDataService = new DefaultMetadataService(targetDataSource, targetProductType);
        this.targetColumnDescriptions = targetMetaDataService.queryTableColumnMeta(targetSchemaName, targetTableName);
        this.targetColumnDescriptions = targetColumnDescriptions.stream()
                .filter(entry -> {
                    List<PatternMapper> patternMappers = apiDataSourceProperties.getRegexColumnMapper();
                    return patternMappers.stream()
                            .anyMatch(mapper -> entry.getFieldName().equals(mapper.getToValue()));
                })
                .map(column -> {
                    String newName = properties.getTarget().getColumnNameCase()
                            .convert(
                                    PatterNameUtils.getFinalName(
                                            column.getFieldName(),
                                            apiDataSourceProperties.getRegexColumnMapper())
                            );
                    ColumnDescription description = column.copy();
                    description.setFieldName(newName);
                    description.setLabelName(newName);
                    return description;
                }).collect(Collectors.toList());
        // 读取目标表的字段信息
        this.sourcePrimaryKeys = targetMetaDataService
                .queryTablePrimaryKeys(targetSchemaName, targetTableName);
        // 找到表的主键
        this.targetPrimaryKeys = sourcePrimaryKeys.stream()
                .map(name ->
                        properties.getTarget().getColumnNameCase()
                                .convert(
                                        PatterNameUtils.getFinalName(
                                                name,
                                                apiDataSourceProperties.getRegexColumnMapper())
                                )
                ).collect(Collectors.toList());
    }

    private void flushCache(TableDataWriteProvider tableWriter, List<String> targetFields, List<Object[]> cache, long cacheBytes) {
        final long finalCacheBytes = cacheBytes;
        this.memChannel.add(
                BatchElement.builder()
                        .tableNameMapString(tableNameMapString)
                        .handler((arg1, arg2, logger) -> {
                            long ret = tableWriter.write(arg1, arg2);
                            logger.info("[Sync] handle write table [{}] batch record count: {}, the bytes size: {}",
                                    tableNameMapString, ret, DataSizeUtil.format(finalCacheBytes));
                            return ret;
                        })
                        .arg1(new ArrayList<>(targetFields))
                        .arg2(new ArrayList<>(cache))
                        .build()
        );
        cache.clear();
        totalBytes.addAndGet(cacheBytes);
    }

    private void flushCacheWithFlag(TableDataSynchronizeProvider targetSynchronizer, List<String> targetFields, List<Object[]> cache, long cacheBytes, RowChangeTypeEnum flag) {
        final long finalCacheBytes = cacheBytes;
        targetSynchronizer.prepareInsert(targetSchemaName, targetTableName, targetFields);
        switch (flag) {
            case VALUE_INSERT:
                this.memChannel.add(BatchElement.builder()
                        .tableNameMapString(tableNameMapString)
                        .handler((arg1, arg2, logger) -> {
                            long ret = targetSynchronizer.executeInsert(arg2);
                            logger.info("[Sync] handle write table [{}] batch record count: {}, the bytes size: {}",
                                    tableNameMapString, ret, DataSizeUtil.format(finalCacheBytes));
                            return ret;
                        })
                        .arg1(new ArrayList<>(targetFields))
                        .arg2(Lists.newArrayList(cache))
                        .build());
                break;
            case VALUE_CHANGED:targetSynchronizer.prepare(targetSchemaName, targetTableName, targetFields, targetPrimaryKeys);
                this.memChannel.add(BatchElement.builder()
                        .tableNameMapString(tableNameMapString)
                        .handler((arg1, arg2, logger) -> {
                            long ret = targetSynchronizer.executeUpdate(arg2);
                            logger.info("[Sync] handle write table [{}] batch record count: {}, the bytes size: {}",
                                    tableNameMapString, ret, DataSizeUtil.format(finalCacheBytes));
                            return ret;
                        })
                        .arg1(new ArrayList<>(targetFields))
                        .arg2(new ArrayList<>(cache))
                        .build());
                break;
            default:
                break;
        }

        cache.clear();
        totalBytes.addAndGet(cacheBytes);
    }

    private int getColumnTypeFromType(String type) {
        switch (type.toUpperCase()) {
            case "STRING":
            case "CHAR":
            case "NCHAR":
            case "VARCHAR":
            case "LONGVARCHAR":
            case "NVARCHAR":
            case "LONGNVARCHAR":
            case "CLOB":
            case "NCLOB":
                return Types.VARCHAR;
            case "BYTE":
                return Types.TINYINT;
            case "SHORT":
                return Types.SMALLINT;
            case "INTEGER":
                return Types.INTEGER;
            case "LONG":
                return Types.BIGINT;
            case "FLOAT":
                return Types.FLOAT;
            case "DOUBLE":
                return Types.DOUBLE;
            case "BOOLEAN":
                return Types.BOOLEAN;
            case "LOCALTIME":
                return Types.TIME;
            case "LOCALDATE":
                return Types.DATE;
            case "TIMESTAMP":
                return Types.TIMESTAMP;
            case "BYTEARRAY":
            case "BINARY":
                return Types.BINARY;
            // 添加其他类型的映射
            default:
                return Types.OTHER;
        }
    }

    public APITaskResult exceptProcess(Throwable t) {
        return APITaskResult.builder()
                .success(false)
                .throwable(t)
                .build();
    }

    public static List<List<APIHttpResult>> convertJsonToAPIHttpResult(String jsonString) {
        List<List<APIHttpResult>> resultList = new ArrayList<>();
        ObjectMapper objectMapper = new ObjectMapper();

        try {
            JsonNode rootNode = objectMapper.readTree(jsonString);

            if (rootNode.isObject()) {
                // 处理 JSON 对象
                List<APIHttpResult> singleResultList = new ArrayList<>();
                Iterator<String> fieldNames = rootNode.fieldNames();
                while (fieldNames.hasNext()) {
                    String key = fieldNames.next();
                    JsonNode valueNode = rootNode.get(key);
                    String value = valueNode.asText();
                    String type = valueNode.getNodeType().toString();

                    APIHttpResult apiHttpResult = APIHttpResult.builder()
                            .key(key)
                            .value(value)
                            .type(type)
                            .build();

                    singleResultList.add(apiHttpResult);
                }
                resultList.add(singleResultList); // 将 JSON 对象的结果添加为一个列表
            } else if (rootNode.isArray()) {
                // 处理 JSON 数组
                for (JsonNode objNode : rootNode) {
                    List<APIHttpResult> jsonObjectResults = new ArrayList<>();
                    if (objNode.isObject()) {
                        Iterator<String> fieldNames = objNode.fieldNames();
                        while (fieldNames.hasNext()) {
                            String key = fieldNames.next();
                            JsonNode valueNode = objNode.get(key);
                            String value = valueNode.asText();
                            String type = valueNode.getNodeType().toString();

                            APIHttpResult apiHttpResult = APIHttpResult.builder()
                                    .key(key)
                                    .value(value)
                                    .type(type)
                                    .build();

                            jsonObjectResults.add(apiHttpResult);
                        }
                        resultList.add(jsonObjectResults); // 添加每个 JSON 对象的结果列表
                    } else {
                        // 处理 JSON 数组中的其他数据类型
                        String value = objNode.asText();
                        String type = objNode.getNodeType().toString();

                        APIHttpResult apiHttpResult = APIHttpResult.builder()
                                .key("value")
                                .value(value)
                                .type(type)
                                .build();

                        List<APIHttpResult> singleValueResult = new ArrayList<>();
                        singleValueResult.add(apiHttpResult);
                        resultList.add(singleValueResult); // 将值作为一个单独的列表
                    }
                }
            } else {
                throw new JsonProcessingException("Unsupported JSON type.") {
                };
            }
        } catch (JsonProcessingException e) {
            System.err.println("Error processing JSON: " + e.getMessage());
        }
        return resultList;
    }



    public static Map<String, String> convertListToMap(List<Map<String, String>> list) {
        return list.stream()
                .collect(Collectors.toMap(
                        entry -> entry.get("keyName"),
                        entry -> entry.get("valueName")
                ));
    }

    private static JsonObject extractJsonObject(JsonElement jsonElement, String apiJsonKey, int apiKeyPosition) {
        // 确保传入的 JsonElement 是一个 JsonObject
        if (jsonElement.isJsonObject()) {
            JsonObject jsonObject = jsonElement.getAsJsonObject();
            // 检查是否包含指定的 apiJsonKey
            if (jsonObject.has(apiJsonKey)) {
                JsonArray jsonArray = jsonObject.getAsJsonArray(apiJsonKey);
                // 检查位置是否在数组范围内
                if (apiKeyPosition > 0 && apiKeyPosition <= jsonArray.size()) {
                    JsonElement targetElement = jsonArray.get(apiKeyPosition - 1);
                    // 检查目标元素是否为对象
                    if (targetElement.isJsonObject()) {
                        JsonObject targetObject = targetElement.getAsJsonObject();
                        // 如果目标对象中包含嵌套的对象，递归查找
                        if (targetObject.has("data")) {
                            JsonArray nestedArray = targetObject.getAsJsonArray("data");
                            if (!nestedArray.isEmpty()) {
                                JsonElement nestedElement = nestedArray.get(0);
                                if (nestedElement.isJsonObject()) {
                                    return nestedElement.getAsJsonObject(); // 返回第一个嵌套对象
                                }
                            }
                        }

                        return targetObject; // 返回找到的目标对象
                    }
                }
            } else {
                log.warn("Key not found: " + apiJsonKey);
            }
        } else {
            log.warn("Input JsonElement is not a JsonObject");
        }

        return new JsonObject(); // 返回空的 JSON 对象
    }

    private static JsonObject processJson(JsonElement jsonElement) {
        if (jsonElement.isJsonArray()) {
            JsonArray jsonArray = jsonElement.getAsJsonArray();
            if (!jsonArray.isEmpty()) {
                JsonElement firstElement = jsonArray.get(0);
                // 仅处理数组的第一个元素，不进行递归
                if (firstElement.isJsonObject()) {
                    return firstElement.getAsJsonObject(); // 返回当前对象
                } else {
                    return new JsonObject(); // 返回一个空的 JSON 对象
                }
            } else {
                return new JsonObject(); // 返回一个空的 JSON 对象
            }
        } else if (jsonElement.isJsonObject()) {
            // 可以在这里处理具体的 JSON 对象
            return jsonElement.getAsJsonObject(); // 返回当前对象
        } else {
            return new JsonObject(); // 返回一个空的 JSON 对象
        }
    }

    /**
     * 获取目标字段列表
     *
     * @return 目标字段列表
     */
    private List<String> getTargetFields() {
        List<String> targetFields = new ArrayList<>();
        this.targetMetaDataService = new DefaultMetadataService(targetDataSource, targetProductType);
        checkInterrupt();
        for (ColumnDescription tcd : targetColumnDescriptions) {
            if (!StringUtils.isEmpty(tcd.getFieldName())) {
                targetFields.add(tcd.getFieldName());
            }
        }
        return targetFields;
    }

}
