package org.zjvis.datascience.service.csv.pool;

import lombok.Data;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.zjvis.datascience.common.pool.DefaultThreadFactoryImpl;
import org.zjvis.datascience.service.SftpConnectService;

import javax.annotation.PostConstruct;
import java.util.concurrent.*;

/**
 * @description Csv文件解析 线程池
 * @date 2021-12-29
 */
@Component
@Data
public class CsvParsePool {
    private final static Logger logger = LoggerFactory.getLogger(CsvParsePool.class);
    private ThreadPoolExecutor executor;
    @Value("${csvParsePool.corePoolSize:10}") // 20 100
    private Integer corePoolSize;
    @Value("${csvParsePool.maximumPoolSize:160}")
    private Integer maximumPoolSize;
    @Value("${csvParsePool.keepAliveTime:120}")
    private Integer keepAliveTime;
    @Value("${csvParsePool.blockQueueSize:10000}")
    private Integer blockQueueSize;

    @PostConstruct
    public void init() {
        BlockingQueue<Runnable> workQueue = new ArrayBlockingQueue<>(blockQueueSize);
        ThreadFactory threadFactory = new DefaultThreadFactoryImpl("csvparse", false, 0);
        RejectedExecutionHandler handler = new ThreadPoolExecutor.DiscardPolicy();
        executor = new ThreadPoolExecutor(corePoolSize, maximumPoolSize, keepAliveTime, TimeUnit.SECONDS,
                workQueue, threadFactory, handler);
        executor.allowCoreThreadTimeOut(true);
    }

    public ThreadPoolExecutor getExecutor() {
        return executor;
    }
}
