package ltd.hxya.novel.analysis.config;

import com.baomidou.dynamic.datasource.plugin.MasterSlaveAutoRoutingPlugin;
import lombok.extern.slf4j.Slf4j;
import ltd.hxya.novel.analysis.event.CrawlFinisherEvent;
import ltd.hxya.novel.analysis.mapper.HiveRowDataMapper;
import ltd.hxya.novel.analysis.service.impl.HiveRowDataServiceImpl;
import ltd.hxya.novel.entity.rowdata.NovelRowData;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import javax.annotation.PostConstruct;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

@Configuration
@Slf4j
public class AnalysisConfig {

    @Autowired
    private HiveRowDataMapper hiveRowDataMapper;

    @Autowired
    private ThreadPoolExecutor threadPoolExecutor;

    private Integer dataSize=0;

    private Boolean isFinish=false;

    @Autowired
    private ApplicationEventPublisher publisher;
    //在项目启动之后，使用线程池的调度，作为定时任务

    @Value("${analysis.is-finish.delayed}")
    private Integer delayed;

    @PostConstruct
    public void isCrawlFinish(){
        ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(5);
        executor.scheduleWithFixedDelay(()->{
                //从hive数据库中查询信息，并判断信息的数量与上一次查询的数量是否有变化，如果又增加
            try {
                List<NovelRowData> novelRowDataList = hiveRowDataMapper.getList();
                int size = novelRowDataList.size();

                if (size>dataSize){
                    dataSize=size;
                    log.info("爬虫还未结束，等待下一次执行");
                    return;
                }
                if (size<dataSize||size==0||isFinish==null) {
                    return;
                }

                //数据的条数在规定的时间内无变化，认定为已经爬取完毕
                if (isFinish){
                    log.info("爬取小说部分已经结束，开始进行分类筛选");
                    publisher.publishEvent(new CrawlFinisherEvent(this));
                    isFinish=null;
                    return;
                }
                log.info("爬取小说部分可能结束，等待下一次确认");
                isFinish=true;
            }catch (Exception exception){
                exception.printStackTrace();
            }

        },0,delayed, TimeUnit.MINUTES);
    }
}
