/**
 * Copyright(c) 2025 hbswxx.com, All Rights Reserved. Author: kouqiang Create date: 2025/5/30
 */
package cn.sw.syncdata.job;

import cn.sw.syncdata.entity.HeartInfo;
import cn.sw.syncdata.job.heart.ThreadCreateHeart;
import cn.sw.syncdata.local.mapper.ILocalDataBaseHeaderMapper;
import cn.sw.syncdata.service.HeaderService;
import cn.sw.syncdata.threadpool.ThreadPoolManager;
import lombok.extern.slf4j.Slf4j;
import org.mybatis.logging.Logger;
import org.mybatis.logging.LoggerFactory;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import java.util.List;

/**
 * description:
 *
 * @author kouqiang
 * @date 2025/5/30 09:05
 */
@Service
@Slf4j
public class CreateHeaderJob {

    private final int limit = 5000000;
    @Resource
    private HeaderService headerService;
    @Resource
    private ILocalDataBaseHeaderMapper localDataBaseHeaderMapper;

    /***
     * description:  生成任务
     * @author kouqiang
     * @date 2025/5/30 09:08
     * @param startId 开始的id
     * @param sdate 生成的日期区间
     * @param edate
     * @param total 总数
     * @return: void
     */

    public void createHeaderJob(int startId, String sdate, String edate, int total) {
        //判断需要执行的任务线程数
//        int limit=total/10;
        int numJob = total / limit + 1;
        //进行查询，如果不足10万条，则从头开始查询
        System.out.println("开始创建心跳数据，合计任务数量" + numJob);
        //测试数据使用30万条
        for (int i = 0; i < numJob; i++) {
            String filename = "d:\\tmp\\heart" + i + ".csv";
            List<HeartInfo> result = headerService.queryRemoteHeaderInfo(startId, 200000);
            log.info("查询数据"+result.size()+"条，进行任务生成处理");
            if (!result.isEmpty()) {
                ThreadCreateHeart threadCreateHeart = new ThreadCreateHeart(result, sdate, edate, limit, filename);
                ThreadPoolManager.submitThread(threadCreateHeart);
            }
            startId = result.get(result.size() - 1).getId();
            if (result.size() < limit) {
                startId = 0;
            }
        }
    }
}
