package com.haike.pushjobs.jobs;

import cn.hutool.core.date.DateUtil;
import cn.hutool.core.io.FileUtil;
import cn.hutool.core.lang.Assert;
import cn.hutool.core.map.MapUtil;
import cn.hutool.extra.ftp.Ftp;
import cn.hutool.extra.ftp.FtpMode;
import cn.hutool.json.JSONUtil;
import com.baomidou.dynamic.datasource.toolkit.DynamicDataSourceContextHolder;
import com.haike.pushjobs.config.GAOLANGANFtpConfig;
import com.haike.pushjobs.mapper.SqlStringMapper;
import com.haike.pushjobs.utils.CSVUtils;
import lombok.extern.slf4j.Slf4j;
import lombok.var;
import org.quartz.DisallowConcurrentExecution;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import java.io.File;
import java.nio.charset.Charset;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;


/**
 * 预报增水上传到ftp
 * 1. 从 数据库下载 并生成csv文件
 * 2. 将csv文件上传到ftp
 */
@Slf4j
@DisallowConcurrentExecution
@Component
public class Download_theaty_station_dataToFtp_Job implements Job {

    @Autowired
    SqlStringMapper sqlStringMapper;

    @Autowired
    private GAOLANGANFtpConfig GAOLANGANFtpConfig;

    String description = "";

    @Override
    public void execute(JobExecutionContext context) throws JobExecutionException {
        long funStartTime = System.currentTimeMillis();
        description = context.getJobDetail().getDescription();
        try {
            String param = context.getMergedJobDataMap().getString("param");
            log.info(description + "开始 - 参数: {}", param);

            Map<String, Object> paramMap = JSONUtil.toBean(param, Map.class);

            runTask(paramMap);

        } catch (Exception e) {
            log.error(description + "执行出错", e);
        } finally {
            log.info(description + "执行完成: {} 毫秒", System.currentTimeMillis() - funStartTime);
            log.info("[{}]清除数据源上下文", description);
            DynamicDataSourceContextHolder.clear();
        }
    }

    //region 开始下载
    public void runTask(Map<String, Object> paramMap) {
        String currentDate = DateUtil.format(new Date(), "yyyy-MM-dd HH:mm:ss");
        var is_replace = MapUtil.getBool(paramMap, "is_replace", false);

        var source_db = paramMap.get("source_db").toString();
        var source_sql = paramMap.get("source_sql").toString();
        var start_time = paramMap.get("start_time").toString();
        var end_time = paramMap.get("end_time").toString();
        Assert.notNull(source_db);
        Assert.notNull(source_sql);

        // start_time为空时, 设置为3天前
        if (start_time == null || start_time.isEmpty()) {
            start_time = DateUtil.format(DateUtil.offsetDay(DateUtil.date(), -1), "yyyy-MM-dd HH:mm:ss");
        }
        // end_time为空时, 设置为昨天
        if (end_time == null || end_time.isEmpty()) {
            end_time = DateUtil.format(DateUtil.date(), "yyyy-MM-dd HH:mm:ss");
        }
        log.info("[{}]开始时间: {}, 结束时间: {}", description, start_time, end_time);

        //region 读取数据源表
        log.info("[{}]切换数据库:{}.", description, source_db);
        DynamicDataSourceContextHolder.push(source_db);

        String sql = source_sql
                .replace("{currentDate}", currentDate)
                .replace("{start_time}", start_time)
                .replace("{end_time}", end_time);
        var list = sqlStringMapper.selectData(sql, null);

        if (list == null || list.size() == 0) {
            log.info("[{}]执行完成. 无新数据,不进行推送.", description);
            return;
        }
        //endregion


        // 使用从配置类读取的FTP服务器配置
        String host = GAOLANGANFtpConfig.getHost();     // FTP服务器主机地址
        int port = GAOLANGANFtpConfig.getPort();        // 端口
        String username = GAOLANGANFtpConfig.getUsername(); // 用户名
        String password = GAOLANGANFtpConfig.getPassword(); // 密码

        // 创建FTP连接配置
        cn.hutool.extra.ftp.FtpConfig config = new cn.hutool.extra.ftp.FtpConfig(host, port, username, password, Charset.defaultCharset());

        // list 按 station_id 和小时级 data_time 分组
        Map<String, List<Map<String, Object>>> groupedData = list.stream().collect(Collectors.groupingBy(map -> {
            String stationId = map.get("station_id").toString();
            var dataTimeStr = DateUtil.format(MapUtil.getDate(map, "data_time"), "yyyy-MM-dd HH:00:00");
            return stationId + "|" + dataTimeStr;
        }));
        for (Map.Entry<String, List<Map<String, Object>>> entry : groupedData.entrySet()) {
            log.info("分组键:" + entry.getKey());
            log.info("分组数据量:" + entry.getValue().size() + "条");
            var stationHourList = entry.getValue();
            var item = stationHourList.get(0);

//                    log.info("[{}]开始处理数据: {}", description, JSONUtil.toJsonStr(item));
            try {
                var datetime_hour = DateUtil.parse(item.get("data_time").toString());
                String temp_file_path = "theaty_station_data_" + item.get("station_id").toString() + "_" + DateUtil.format(datetime_hour, "yyyyMMddHH") + ".csv";
                String dir = "/GFS+WRF+ADCIRC+SWAN/data_GaoLanGang/" + DateUtil.format(datetime_hour, "yyyyMM");
                //region 插入到Ftp
                // 建立FTP连接
                try (Ftp ftp = new Ftp(config, FtpMode.Passive)) {
                    if (!ftp.exist(dir)) {
                        ftp.mkdir(dir);
                    }

                    if (ftp.exist(dir + "/" + temp_file_path) && !is_replace) {
                        log.info("[跳过]Ftp中该文件已存在: {}", temp_file_path);
                        continue;
                    }
                    CSVUtils.writeCSV(stationHourList, temp_file_path);

                    ftp.upload(dir, new File(temp_file_path));
                    log.info("[{}]数据已推送到FTP.", description);
                    FileUtil.del(new File(temp_file_path).getAbsoluteFile());
                    log.info("[{}]临时文件已删除.", description);
                } catch (Exception e) {
                    log.error("FTP操作出错: " + e.getMessage(), e);
                }
                //endregion
            } catch (Exception e) {
                log.error("下载数据到csv文件并上传到FTP失败: {}. 数据:{}", e.getMessage(), JSONUtil.toJsonStr(item), e);
            }

        }


    }
//endregion
}