package com.example.autoconfig;

import com.dangdang.ddframe.job.api.ElasticJob;
import com.dangdang.ddframe.job.api.dataflow.DataflowJob;
import com.dangdang.ddframe.job.config.JobCoreConfiguration;
import com.dangdang.ddframe.job.config.dataflow.DataflowJobConfiguration;
import com.dangdang.ddframe.job.config.simple.SimpleJobConfiguration;
import com.dangdang.ddframe.job.event.rdb.JobEventRdbConfiguration;
import com.dangdang.ddframe.job.lite.api.JobScheduler;
import com.dangdang.ddframe.job.lite.config.LiteJobConfiguration;
import com.dangdang.ddframe.job.lite.spring.api.SpringJobScheduler;
import com.dangdang.ddframe.job.reg.base.CoordinatorRegistryCenter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Configuration;

import javax.annotation.PostConstruct;
import javax.annotation.Resource;
import javax.sql.DataSource;
import java.util.Map;

/**
 * @ClassName DataflowJobAutoConfig
 * @Description TODO
 * @Author wyp
 * @Date 2023/8/16 10:13
 * @Version 1.0
 */
@Configuration
@ConditionalOnBean(CoordinatorRegistryCenter.class)
@AutoConfigureAfter(ZookeeperAutoConfig.class)
public class DataflowJobAutoConfig {


    @Autowired
    private ApplicationContext applicationContext;

    @Resource
    private CoordinatorRegistryCenter zkCenter;

    @Autowired
    private DataSource dataSource;

    @PostConstruct
    public void initDataflow() {
        Map<String, Object> beans = applicationContext.getBeansWithAnnotation(ElasticDataflowJob.class);
        for (Map.Entry<String, Object> entry : beans.entrySet()) {
            Object instance = entry.getValue();
            Class<?>[] interfaces = instance.getClass().getInterfaces();
            for (Class<?> anInterface : interfaces) {
                if (anInterface == DataflowJob.class) {
                    ElasticDataflowJob annotation = instance.getClass().getAnnotation(ElasticDataflowJob.class);
                    String jobName = annotation.jobName();
                    String cron = annotation.cron();
                    int shardingCount = annotation.shardingCount();
                    boolean ovverwrite = annotation.ovverwrite();
                    boolean streamprocessing = annotation.streamprocessing();
                    Class<?> jobStrategy = annotation.jobStrategy();
                    boolean isJobEvent = annotation.isJobEvent();
//job核心配置
                    var jcc = JobCoreConfiguration.newBuilder(jobName, cron, shardingCount)
                            .build();

                    //job类型配置
                    var jtc = new DataflowJobConfiguration(jcc, instance.getClass().getCanonicalName(), streamprocessing);

                    //job的根配置
                    var ljc = LiteJobConfiguration
                            .newBuilder(jtc)
                            .jobShardingStrategyClass(jobStrategy.getCanonicalName())
                            .overwrite(ovverwrite)
                            .build();
                    if (isJobEvent) {
                        JobEventRdbConfiguration jec = new JobEventRdbConfiguration(dataSource);
                        //new JobScheduler(zkCenter, ljc).init();
                        new SpringJobScheduler((ElasticJob) instance, zkCenter, ljc, jec).init();
                    } else {
                        new SpringJobScheduler((ElasticJob) instance, zkCenter, ljc).init();
                    }
                }
            }
        }
    }
}
