package dubbo.server2.impl.elasticjob;

import com.dangdang.ddframe.job.api.dataflow.DataflowJob;
import com.dangdang.ddframe.job.api.simple.SimpleJob;
import com.dangdang.ddframe.job.config.JobCoreConfiguration;
import com.dangdang.ddframe.job.config.dataflow.DataflowJobConfiguration;
import com.dangdang.ddframe.job.config.simple.SimpleJobConfiguration;
import com.dangdang.ddframe.job.lite.api.JobScheduler;
import com.dangdang.ddframe.job.lite.config.LiteJobConfiguration;
import com.dangdang.ddframe.job.lite.spring.api.SpringJobScheduler;
import com.dangdang.ddframe.job.reg.zookeeper.ZookeeperRegistryCenter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import javax.annotation.PostConstruct;
import javax.annotation.Resource;


@Configuration
public class ElasticJobConfig {
    @Autowired
    private ZookeeperRegistryCenter regCenter;
    @Resource
    private MyDataflowJob myDataflowJob;
    @Resource
    private MySimpleJob mySimpleJob;
    public ElasticJobConfig() {
    }

    //初始化所有Job调度器
    @Bean(initMethod = "init")
    public JobScheduler initMyDataflowJob(final MyDataflowJob myDataflowJob, @Value("${myDataflowJob.cron}") final String cron, @Value("${myDataflowJob.shardingTotalCount}") final int shardingTotalCount,
                                @Value("${myDataflowJob.shardingItemParameters}") final String shardingItemParameters) {
        return new SpringJobScheduler(myDataflowJob, regCenter, getDataflowJobJobConfiguration(myDataflowJob.getClass(), cron, shardingTotalCount, shardingItemParameters));
    }
    @Bean(initMethod = "init")
    public JobScheduler initMySimpleJob(final MySimpleJob mySimpleJob, @Value("${mySimpleJob.cron}") final String cron, @Value("${mySimpleJob.shardingTotalCount}") final int shardingTotalCount,
                                @Value("${mySimpleJob.shardingItemParameters}") final String shardingItemParameters) {
        return new SpringJobScheduler(mySimpleJob, regCenter, getSimpleJobJobConfiguration(mySimpleJob.getClass(), cron, shardingTotalCount, shardingItemParameters));
    }
    /**
     * 可通过DataflowJobConfiguration配置是否流式处理。
     流式处理数据只有fetchData方法的返回值为null或集合长度为空时，作业才停止抓取，否则作业将一直运行下去；
     非流式处理数据则只会在每次作业执行过程中执行一次fetchData方法和processData方法，随即完成本次作业。
     如果采用流式作业处理方式，建议processData处理数据后更新其状态，避免fetchData再次抓取到，从而使得作业永不停止。
     流式数据处理参照TbSchedule设计，适用于不间歇的数据处理。
     */
    private LiteJobConfiguration getDataflowJobJobConfiguration(final Class<? extends DataflowJob> jobClass, final String cron, final int defaultShardTotal, final String defaultShardPrams) {
        return LiteJobConfiguration.newBuilder(new DataflowJobConfiguration(JobCoreConfiguration.newBuilder(
                jobClass.getName(), cron, defaultShardTotal).shardingItemParameters(defaultShardPrams).build(), jobClass.getCanonicalName(),true)).overwrite(true).build();
    }
    private LiteJobConfiguration getSimpleJobJobConfiguration(final Class<? extends SimpleJob> jobClass, final String cron, final int defaultShardTotal, final String defaultShardPrams) {
        return LiteJobConfiguration.newBuilder(new SimpleJobConfiguration(JobCoreConfiguration.newBuilder(
                jobClass.getName(), cron, defaultShardTotal).shardingItemParameters(defaultShardPrams).build(), jobClass.getCanonicalName())).overwrite(true).build();
    }

}