package com.haopt.elasticjob.config;

import com.dangdang.ddframe.job.api.dataflow.DataflowJob;
import com.dangdang.ddframe.job.config.JobCoreConfiguration;
import com.dangdang.ddframe.job.config.dataflow.DataflowJobConfiguration;
import com.dangdang.ddframe.job.lite.api.strategy.JobShardingStrategy;
import com.dangdang.ddframe.job.lite.config.LiteJobConfiguration;
import com.dangdang.ddframe.job.lite.spring.api.SpringJobScheduler;
import com.dangdang.ddframe.job.reg.base.CoordinatorRegistryCenter;
import com.haopt.elasticjob.annotation.ElasticDataFlowJob;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Configuration;

import javax.annotation.PostConstruct;
import java.util.Map;

/**
 * DataFlowJob的配置类,用于获取所有ElasticDataFlowJob注解
 * @author haopt
 */
@Configuration
@ConditionalOnClass(CoordinatorRegistryCenter.class)
public class ElasticDataFlowJobConfiguration {
    @Autowired
    private ApplicationContext applicationContext;

    @Autowired
    private CoordinatorRegistryCenter coordinatorRegistryCenter;

    @PostConstruct
    public void initJob() {
        //1.获取spring容器中加了ElasticDataFlowJob注解的对象
        Map<String, Object> beans = applicationContext.getBeansWithAnnotation(ElasticDataFlowJob.class);
        //2.迭代
        for (String id : beans.keySet()) {
            Object o = beans.get(id);
            if(o instanceof DataflowJob) {
                DataflowJob bean = (DataflowJob) o;
                ElasticDataFlowJob annotation = bean.getClass().getAnnotation(ElasticDataFlowJob.class);
                String name = annotation.name();
                String cron = annotation.cron();
                Class<? extends JobShardingStrategy> shardingStrategyClass = annotation.jobShardingStrategyClass();
                boolean monitorExecution = annotation.monitorExecution();
                boolean failover = annotation.failover();
                boolean overwrite = annotation.overwrite();
                int shardingTotalCount = annotation.shardingTotalCount();
                boolean streamingProcess = annotation.streamingProcess();
                // 定义作业核心配置
                JobCoreConfiguration dataflowCoreConfig = JobCoreConfiguration.newBuilder(name, cron, shardingTotalCount)
                        .failover(failover)
                        .build();
                // 定义DATAFLOW类型配置
                DataflowJobConfiguration dataflowJobConfig = new DataflowJobConfiguration(dataflowCoreConfig, bean.getClass().getCanonicalName(), streamingProcess);
                // 定义Lite作业根配置
                LiteJobConfiguration dataflowJobRootConfig = LiteJobConfiguration.newBuilder(dataflowJobConfig)
                        .overwrite(overwrite)
                        .jobShardingStrategyClass(shardingStrategyClass.getCanonicalName())
                        .monitorExecution(monitorExecution)
                        .build();
                new SpringJobScheduler(bean,coordinatorRegistryCenter, dataflowJobRootConfig).init();
            }
        }


    }
}
