package com.hifar.test.syncData.service.impl;

import cn.hutool.core.util.ZipUtil;
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.hifar.fw.json.JsonUtils;
import com.hifar.fw.utils.lang.NumberUtils;
import com.hifar.fw.utils.lang.StringUtils;
import com.hifar.mybatisplus.PublicMapper;
import com.hifar.plat.maindatasource.service.IMainDataSourceService;
import com.hifar.sys.MinioUtil;
import com.hifar.test.entrust.service.IHfEnvEntrustService;
import com.hifar.test.syncData.constant.SyncDataConstant;
import com.hifar.test.syncData.enums.SyncDataStatusEnum;
import com.hifar.test.syncData.mapper.HfEnvSyncDataImportTaskMapper;
import com.hifar.test.syncData.pojo.HfEnvSyncDataImportTableInfo;
import com.hifar.test.syncData.pojo.HfEnvSyncDataImportTask;
import com.hifar.test.syncData.service.IHfEnvSyncDataImportTableInfoService;
import com.hifar.test.syncData.service.IHfEnvSyncDataImportTaskService;
import com.hifar.utils.FileUtil;
import com.hifar.utils.ToolKit;
import com.tssk.kylx.fw.utils.id.IdUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import javax.sql.DataSource;
import java.io.File;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * @author zhuWeiWei
 * @description 数据同步-导入数据
 * @date 2025/6/10 16:31
 */
@Service
@Slf4j
public class HfEnvSyncDataImportTaskServiceImpl extends ServiceImpl<HfEnvSyncDataImportTaskMapper, HfEnvSyncDataImportTask>
        implements IHfEnvSyncDataImportTaskService {
    @Autowired
    private IHfEnvSyncDataImportTableInfoService importTableInfoService;
    @Autowired
    private PublicMapper publicMapper;

    @Autowired
    private IHfEnvEntrustService envEntrustService;

    @Autowired
    private DataSource dataSource;


    @Override
    public File extractSyncImportFile(HfEnvSyncDataImportTask dataImportTask) {
        String tempPath = FileUtil.getTempPath();
        String baseTempPath = tempPath + File.separator + IdUtils.uuid32();
        File baseTempFile = new File(baseTempPath);
        if (!baseTempFile.exists()) {
            baseTempFile.mkdirs();
        }
        String taskId = dataImportTask.getId();
        String filePath = dataImportTask.getFilePath();
        String bucketPre = dataImportTask.getBucketPre();

        String zipDownloadPath = tempPath + File.separator + IdUtils.uuid32() + ".zip";

        try {
            MinioUtil.downloadFileToLocal(bucketPre, filePath, zipDownloadPath);
            // 设置下载完成更新进度为 10%
            updateSchedule(taskId, new BigDecimal("10"), SyncDataStatusEnum.RUN.getValue());
            File unzip = ZipUtil.unzip(new File(zipDownloadPath));
            // 设置解压完成更新进度为 20%
            updateSchedule(taskId, new BigDecimal("20"), SyncDataStatusEnum.RUN.getValue());

            UpdateWrapper<HfEnvSyncDataImportTask> taskUpdateWrapper = new UpdateWrapper<>();
            taskUpdateWrapper.lambda()
                    .set(HfEnvSyncDataImportTask::getTempPath, zipDownloadPath)
                    .eq(HfEnvSyncDataImportTask::getId, taskId);
            this.update(taskUpdateWrapper);
            return unzip;
        } catch (Exception e) {
            log.error("下载或解压文件失败，taskId: {}, filePath: {}", taskId, filePath, e);
            this.taskError(taskId, "下载或解压文件失败: " + e.getMessage());
        } finally {
            File zipFile = new File(zipDownloadPath);
            if (zipFile.exists()) {
                zipFile.delete();
            }
        }
        return null;
    }


    @Override
    public void taskError(String taskId, String remarks) {
        UpdateWrapper<HfEnvSyncDataImportTask> updateWrapper = new UpdateWrapper<>();
        updateWrapper.lambda()
                .set(HfEnvSyncDataImportTask::getEndTime, new Date())
                .set(HfEnvSyncDataImportTask::getStatus, SyncDataStatusEnum.ERROR.getValue())
//                .set(HfEnvSyncDataExportTask::getSchedule, BigDecimal.ZERO)
                .set(HfEnvSyncDataImportTask::getRemarks, remarks)
                .eq(HfEnvSyncDataImportTask::getId, taskId);
        this.update(updateWrapper);
    }


    @Override
    public void updateSchedule(String taskId, BigDecimal schedule, String status) {
        baseMapper.updateSchedule(taskId, schedule, status);
    }


    @Override
    public void taskFinish(String taskId) {
        UpdateWrapper<HfEnvSyncDataImportTask> updateWrapper = new UpdateWrapper<>();
        updateWrapper.lambda()
                .set(HfEnvSyncDataImportTask::getEndTime, new Date())
                .set(HfEnvSyncDataImportTask::getStatus, SyncDataStatusEnum.SUCCESS.getValue())
                .set(HfEnvSyncDataImportTask::getSchedule, new BigDecimal("100"))
                .eq(HfEnvSyncDataImportTask::getId, taskId);
        this.update(updateWrapper);
        HfEnvSyncDataImportTask importTask = getById(taskId);
        String tempPath = importTask.getTempPath();
        File tempFile = new File(tempPath);
        if (tempFile.exists()) {
            try {
                FileUtils.deleteDirectory(tempFile);
            } catch (IOException e) {
                log.warn("清理临时目录失败，taskId: {}, tempPath: {}", taskId, tempPath, e);
            }
        }
        /**
         * 关联委托关系
         */
        envEntrustService.syncEntrustStatus();
    }


    @Override
    public void importDbData(String taskId, File file) {
        String dbPath = file.getAbsolutePath() + File.separator + SyncDataConstant.DB_DATA_PATH;
        File dbFileDir = new File(dbPath);
        if (!dbFileDir.exists()) {
            log.error("数据库数据目录不存在，taskId: {}, path: {}", taskId, dbPath);
            this.updateSchedule(taskId, new BigDecimal("80"), SyncDataStatusEnum.RUN.getValue());
            this.importFileData(taskId, file);
            return;
        }
        try {
            File[] dbFileArr = dbFileDir.listFiles((item, name) -> name.endsWith(".json"));

            // 检查文件列表是否为空
            if (dbFileArr == null || dbFileArr.length == 0) {
                log.warn("没有找到数据文件，taskId: {}", taskId);
                this.updateSchedule(taskId, new BigDecimal("80"), SyncDataStatusEnum.RUN.getValue());
                this.importFileData(taskId, file);
                return;
            }

            int dbSize = dbFileArr.length;
            log.info("开始导入数据库数据，taskId: {}, 文件数量: {}", taskId, dbSize);

            for (int i = 0; i < dbFileArr.length; i++) {
                File dbFile = dbFileArr[i];
                String dbFileName = dbFile.getName();

                // 安全截取表名，避免越界
                int dotIndex = dbFileName.indexOf(".");
                if (dotIndex <= 0) {
                    log.warn("文件名格式不正确，跳过: {}", dbFileName);
                    continue;
                }
                String tableName = dbFileName.substring(0, dotIndex);

                this.importDbData(taskId, tableName, dbFile);

                // 修复进度计算：使用BigDecimal除法，避免整数除法导致进度为0
                // 进度范围：20% ~ 80%，总共60%的进度空间
                BigDecimal currentProgress = new BigDecimal("20").add(
                        new BigDecimal("60")
                                .multiply(new BigDecimal(i + 1))
                                .divide(new BigDecimal(dbSize), 2, RoundingMode.HALF_UP)
                );
                this.updateSchedule(taskId, currentProgress, SyncDataStatusEnum.RUN.getValue());
            }

            // 数据同步完 更新进度至80%
            this.updateSchedule(taskId, new BigDecimal("80"), SyncDataStatusEnum.RUN.getValue());
            log.info("数据库数据导入完成，taskId: {}", taskId);

            // 导入附件信息
            this.importFileData(taskId, file);
        } catch (Exception e) {
            log.error("导入数据库数据失败，taskId: {}", taskId, e);
            this.taskError(taskId, "导入数据失败: " + e.getMessage());
        }
    }

    @Override
    public void importFileData(String taskId, File file) {
        // 导入附件
        String fileBasePath = file.getAbsolutePath() + File.separator + SyncDataConstant.FILE_DATA_PATH;
        File fileDir = new File(fileBasePath);
        if (!fileDir.exists()) {
            log.info("附件目录不存在，跳过附件导入，taskId: {}", taskId);
            // 保存附件统计信息（0个附件）
            saveFileImportInfo(taskId, 0, 0, 0);
            this.taskFinish(taskId);
            return;
        }

        // 第一层路径为桶前缀
        File[] bucketFileArr = fileDir.listFiles();
        if (Objects.isNull(bucketFileArr) || bucketFileArr.length == 0) {
            log.info("没有附件需要导入，taskId: {}", taskId);
            // 保存附件统计信息（0个附件）
            saveFileImportInfo(taskId, 0, 0, 0);
            this.taskFinish(taskId);
            return;
        }

        log.info("开始导入附件，taskId: {}, 桶数量: {}", taskId, bucketFileArr.length);

        // 附件统计（使用AtomicInteger保证线程安全）
        AtomicInteger totalFileCount = new AtomicInteger(0);
        AtomicInteger successFileCount = new AtomicInteger(0);
        AtomicInteger failFileCount = new AtomicInteger(0);

        for (File bucketFileDir : bucketFileArr) {
            if (bucketFileDir.isFile()) {
                continue;
            }

            String bucketPre = bucketFileDir.getName();
            // 替换Assert为if判断，避免抛出未捕获异常
            if (StringUtils.isBlank(bucketPre)) {
                log.warn("桶前缀为空，跳过该目录，taskId: {}", taskId);
                continue;
            }

            String fileRootPath = fileBasePath + File.separator + bucketPre;
            try {
                Files.walk(Paths.get(bucketFileDir.getAbsolutePath()))
                        .filter(Files::isRegularFile)
                        .forEach(item -> {
                            File tempFile = item.toFile();
                            String absolutePath = tempFile.getAbsolutePath();
                            Path fileName = item.getFileName();
                            String objectName = absolutePath.substring(absolutePath.indexOf(fileRootPath) + fileRootPath.length() + 1).replace("\\", "/");
                            totalFileCount.incrementAndGet();
                            try {
                                log.debug("文件【{}】上传中...", fileName);
                                // 修复：Path类型需要转换为String才能判断文件扩展名
                                if (objectName.endsWith(".pdf")) {
                                    MinioUtil.uploadLocalFileToMinio(bucketPre, objectName, absolutePath, "application/pdf");
                                } else {
                                    MinioUtil.uploadLocalFileToMinio(bucketPre, objectName, absolutePath);
                                }
                                successFileCount.incrementAndGet();
                                log.debug("文件【{}】上传成功", fileName);
                            } catch (Exception e) {
                                failFileCount.incrementAndGet();
                                log.error("文件上传失败，taskId: {}, objectName: {}", taskId, objectName, e);
                                // 单个文件失败不影响整体流程
                            }
                        });
            } catch (IOException e) {
                log.error("遍历附件目录失败，taskId: {}, bucketPre: {}", taskId, bucketPre, e);
                // 遍历失败不影响其他桶的处理
            }
        }

        int total = totalFileCount.get();
        int success = successFileCount.get();
        int fail = failFileCount.get();

        log.info("附件导入完成，taskId: {}, 总计：{}个，成功：{}个，失败：{}个",
                taskId, total, success, fail);

        // 保存附件统计信息
        saveFileImportInfo(taskId, total, success, fail);

        this.taskFinish(taskId);
    }

    /**
     * 保存附件导入统计信息
     */
    private void saveFileImportInfo(String taskId, int totalCount, int successCount, int failCount) {
        try {
            HfEnvSyncDataImportTableInfo fileInfo = new HfEnvSyncDataImportTableInfo();
            fileInfo.setTaskId(taskId);
            fileInfo.setTableName("__FILE_IMPORT__"); // 特殊表名，标识附件统计记录
            fileInfo.setDescription("附件导入统计");
            fileInfo.setDataSource("file");
            fileInfo.setInsertNum(totalCount); // 使用insertNum存储附件总数
            fileInfo.setUpdateNum(successCount); // 使用updateNum存储成功数
            // 状态：全部成功为1，有失败的为2
            String status = (failCount == 0 && totalCount > 0) ? "1" : (totalCount > 0 ? "2" : "1");
            fileInfo.setStatus(status);
            if (failCount > 0) {
                fileInfo.setRemarks(String.format("附件导入完成，总计：%d个，成功：%d个，失败：%d个", totalCount, successCount, failCount));
            }
            importTableInfoService.save(fileInfo);
            log.info("附件统计信息保存成功，taskId: {}, 总计：{}，成功：{}，失败：{}，状态：{}",
                    taskId, totalCount, successCount, failCount, status);
        } catch (Exception e) {
            log.error("保存附件统计信息失败，taskId: {}, 错误：{}", taskId, e.getMessage(), e);
        }
    }


    @Override
    public void importDbData(String taskId, String tableName, File file) {
        log.debug("表【{}】数据导入中...", tableName);
        String tableData = ToolKit.readFile(file);
        List<Map<String, Object>> dataDbList = JsonUtils.toArray(tableData);

        Integer insertNum = 0;
        Integer updateNum = 0;

        List<Map<String, Object>> insertMapList = new ArrayList<>();
        List<Map<String, Object>> updateMapList = new ArrayList<>();
        for (Map<String, Object> dbMap : dataDbList) {
            String id = StringUtils.trimNull(dbMap.get("id"));
            int count = publicMapper.queryExist(tableName, id);
            if (count > 0) {
                updateNum++;
//                this.updateFromMapById(tableName, dbMap, id);
                updateMapList.add(dbMap);
            } else {
                insertNum++;
//                this.insertFromMap(tableName, dbMap);
                insertMapList.add(dbMap);
            }
        }
        this.insertFromMap(tableName, insertMapList);
        this.updateFromMapById(tableName, updateMapList, "id");


        HfEnvSyncDataImportTableInfo syncDataImportTableInfo = new HfEnvSyncDataImportTableInfo();
        syncDataImportTableInfo.setTaskId(taskId);
        syncDataImportTableInfo.setTableName(tableName);
        syncDataImportTableInfo.setDataSource("master");
        syncDataImportTableInfo.setInsertNum(insertNum);
        syncDataImportTableInfo.setUpdateNum(updateNum);
        syncDataImportTableInfo.setStatus("1"); // 状态：1-成功 2-失败
        importTableInfoService.save(syncDataImportTableInfo);
        log.debug("表【{}】导入信息已保存，插入：{}条，更新：{}条", tableName, insertNum, updateNum);
    }

    /**
     * 根据id 修改数据
     *
     * @param tableName 表名称
     * @param dataMap   数据
     * @param dataId    表id
     */
    private void updateFromMapById(String tableName, Map<String, Object> dataMap, String dataId) {
        this.preprocessMap(tableName, dataMap);
        publicMapper.updateFromMapById(tableName, dataMap, dataId);
    }

    private void updateFromMapById(String tableName, List<Map<String, Object>> dataList, String idColumn) {
        if (dataList.isEmpty()) {
            return;
        }
        Map<String, Integer> tableColumnTypeMap = this.queryTableColumnType(tableName);
        for (Map<String, Object> dataMap : dataList) {
            this.preprocessMap(tableName, tableColumnTypeMap, dataMap);
            publicMapper.updateFromMapById(tableName, dataMap, StringUtils.trimNull(dataMap.get(idColumn)));
        }
    }

    /**
     * 插入数据
     *
     * @param tableName 表名称
     * @param dataMap   数据
     */
    private void insertFromMap(String tableName, Map<String, Object> dataMap) {
        this.preprocessMap(tableName, dataMap);
        publicMapper.insertFromMap(tableName, dataMap);
    }

    private void insertFromMap(String tableName, List<Map<String, Object>> dataList) {
        if (dataList.isEmpty()) {
            return;
        }
        Map<String, Integer> tableColumnTypeMap = this.queryTableColumnType(tableName);
        for (Map<String, Object> dataMap : dataList) {
            this.preprocessMap(tableName, tableColumnTypeMap, dataMap);
            publicMapper.insertFromMap(tableName, dataMap);
        }
    }


    private Map<String, Integer> queryTableColumnType(String tableName) {
        Map<String, Integer> columnTypes = new HashMap<>();
        try (Connection conn = dataSource.getConnection()) {
            DatabaseMetaData metaData = conn.getMetaData();
            ResultSet columns = metaData.getColumns(null, null, tableName, null);
            while (columns.next()) {
                String columnName = columns.getString("COLUMN_NAME");
                int dataType = columns.getInt("DATA_TYPE");
                columnTypes.put(columnName, dataType);
            }
        } catch (Exception e) {
            log.error("查询表结构失败，tableName: {}", tableName, e);
        }
        return columnTypes;
    }

    /**
     * 转换map数据格式
     *
     * @param tableName 表名称
     * @param dataMap   表数据
     */
    private void preprocessMap(String tableName, Map<String, Object> dataMap) {
        Map<String, Integer> columnTypes = this.queryTableColumnType(tableName);
        this.preprocessMap(tableName, columnTypes, dataMap);
    }

    /**
     * 转换map数据格式
     *
     * @param tableName   表名称
     * @param columnTypes 表数据类型
     * @param dataMap     数据
     */
    private void preprocessMap(String tableName, Map<String, Integer> columnTypes, Map<String, Object> dataMap) {
        for (Map.Entry<String, Object> entry : dataMap.entrySet()) {
            String key = entry.getKey();
            String column = ToolKit.camelToSnake(key);
            Object value = entry.getValue();

            if (!columnTypes.containsKey(column)) {
                throw new IllegalArgumentException("Column " + column + " not found in table " + tableName);
            }
            int expectedType = columnTypes.get(column);

            // 类型转换处理
            switch (expectedType) {
                // TIMESTAMP (93) 和其他日期时间类型 (2001)
                case 93:
                case 2001:
                    // 处理日期字段：需要判断值是否为空，以及是否可以转换为Long类型
                    if (Objects.nonNull(value)) {
                        try {
                            // 尝试将值转换为时间戳
                            long timestamp = NumberUtils.parseLong(value);
                            dataMap.put(key, new Date(timestamp));
                        } catch (Exception e) {
                            // 如果转换失败，记录警告并设置为null
                            log.warn("日期字段转换失败，表: {}, 字段: {}, 值: {}, 错误: {}",
                                    tableName, key, value, e.getMessage());
                            dataMap.put(key, null);
                        }
                    } else {
                        // 值为null，保持null
                        dataMap.put(key, null);
                    }
                    break;
                default:
                    // 其他类型暂不处理
                    break;
            }
        }
    }

}
