package com.zhz.elasticjob.config;

import com.dangdang.ddframe.job.api.dataflow.DataflowJob;
import com.dangdang.ddframe.job.config.JobCoreConfiguration;
import com.dangdang.ddframe.job.config.dataflow.DataflowJobConfiguration;
import com.dangdang.ddframe.job.lite.api.listener.ElasticJobListener;
import com.dangdang.ddframe.job.lite.api.strategy.JobShardingStrategy;
import com.dangdang.ddframe.job.lite.config.LiteJobConfiguration;
import com.dangdang.ddframe.job.lite.spring.api.SpringJobScheduler;
import com.dangdang.ddframe.job.reg.base.CoordinatorRegistryCenter;
import com.zhz.elasticjob.annotation.ElasticDataflowJob;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Configuration;

import javax.annotation.PostConstruct;
import java.util.Map;

/**
 * @program: schedule
 * @description: 配置流失作业的bean
 * @author: zhanghz001
 * @create: 2021-08-05 09:59
 **/
@Slf4j
@Configuration
//就是zookeeper的配置类需要配置
@ConditionalOnClass(CoordinatorRegistryCenter.class)
public class ElasticDataflowJobConfiguration {
    @Autowired
    private ApplicationContext applicationContext;
    
    @Autowired
    private CoordinatorRegistryCenter coordinatorRegistryCenter;
    
    @PostConstruct
    public void initDataflowJob() {
        // 获取spring容器中，标记有 @ElasticDataflowJob注解的Bean
        Map<String, Object> dataflowJobMap =
                applicationContext.getBeansWithAnnotation(ElasticDataflowJob.class);
        
        // 判断Bean是否实现了DataflowJob接口
        for (String beanId : dataflowJobMap.keySet()) {
            Object job = dataflowJobMap.get(beanId);
            //需要是DataflowJob的实现,这个注解才有效
            if (job instanceof DataflowJob) {
                DataflowJob dataflowJob = (DataflowJob) job;
                
                // 获取Bean上面@ElasticDataflowJob注解
                ElasticDataflowJob elasticDataflowJob = dataflowJob
                        .getClass()
                        .getAnnotation(ElasticDataflowJob.class);
                // 读取注解中的属性信息
                String cron = elasticDataflowJob.cron();
                log.info("dataflowJob 的 cron 的表达式 : {}", cron);
                String jobName = elasticDataflowJob.name();
                boolean overwrite = elasticDataflowJob.overwrite();
                int shardingTotalCount = elasticDataflowJob.shardingTotalCount();
                boolean streamingProcess = elasticDataflowJob.streamingProcess();
                boolean failover = elasticDataflowJob.failover();
                Class<? extends ElasticJobListener>[] listeners = elasticDataflowJob.listeners();
                Class<? extends JobShardingStrategy> jobShardingStrategy = elasticDataflowJob.jobShardingStrategy();
                boolean monitorExecution = elasticDataflowJob.monitorExecution();
                initDataflowJobBean(jobName, cron, shardingTotalCount,
                        streamingProcess, overwrite, dataflowJob,
                        failover, listeners, jobShardingStrategy, monitorExecution);
                
            }
            
        }
    }
    
    /**
     * 定义作业核心配置
     * 整合elastic-job的dataflowJob的作业配置到spring的容器中
     *
     * @param jobName
     * @param cron
     * @param shardingTotalCount
     * @param streamingProcess
     * @param overwrite
     * @param dataflowJob
     * @param failover
     * @param listeners
     * @param jobShardingStrategy
     * @param monitorExecution
     */
    @SneakyThrows
    public void initDataflowJobBean(String jobName, String cron, int shardingTotalCount,
                                    boolean streamingProcess, boolean overwrite,
                                    DataflowJob dataflowJob, boolean failover,
                                    Class<? extends ElasticJobListener>[] listeners,
                                    Class<? extends JobShardingStrategy> jobShardingStrategy, boolean monitorExecution) {
        // 定义作业核心配置
        JobCoreConfiguration dataflowCoreConfig = JobCoreConfiguration.newBuilder(
                jobName,
                cron,
                shardingTotalCount
        )
                .failover(failover)
                .build();
        
        // 定义DATAFLOW类型配置
        DataflowJobConfiguration dataflowJobConfig = new DataflowJobConfiguration(
                dataflowCoreConfig,
                dataflowJob.getClass().getCanonicalName(),
                streamingProcess
        );
        
        //把所有的类都实例化
        ElasticJobListener[] elasticJobListeners = new ElasticJobListener[listeners.length];
        for (int i = 0; i < listeners.length; i++) {
            Class<? extends ElasticJobListener> listener = listeners[i];
            elasticJobListeners[i] = listener.getConstructor().newInstance();
        }
        
        // 定义Lite作业根配置
        LiteJobConfiguration dataflowJobRootConfig = LiteJobConfiguration
                .newBuilder(dataflowJobConfig)
                .overwrite(overwrite)
                .jobShardingStrategyClass(jobShardingStrategy.getCanonicalName())
                .monitorExecution(monitorExecution)
                
                .build();
        
        // 使用Java的API启动任务
        new SpringJobScheduler(dataflowJob, coordinatorRegistryCenter,
                dataflowJobRootConfig, elasticJobListeners)
                .init();
    }
}
