package com.github.pig.daemon.job.api;

import com.dangdang.ddframe.job.api.dataflow.DataflowJob;
import com.dangdang.ddframe.job.config.JobCoreConfiguration;
import com.dangdang.ddframe.job.config.dataflow.DataflowJobConfiguration;
import com.dangdang.ddframe.job.event.JobEventConfiguration;
import com.dangdang.ddframe.job.lite.api.listener.ElasticJobListener;
import com.dangdang.ddframe.job.lite.config.LiteJobConfiguration;
import com.dangdang.ddframe.job.lite.spring.api.SpringJobScheduler;
import com.dangdang.ddframe.job.reg.zookeeper.ZookeeperRegistryCenter;

import javax.annotation.PostConstruct;
import javax.annotation.Resource;


/**
 * @author hankeke on 2018/8/8.
 * @version 1.0
 * @desc 流式任务类型：业务实现两个接口：
 *      抓取(fetchData)和处理(processData)数据
 * a.流式处理数据只有fetchData方法的返回值为null或集合长度为空时，作业才停止抓取，否则作业将一直运行下去；
 * b.非流式处理数据则只会在每次作业执行过程中执行一次fetchData方法和processData方法，随即完成本次作业
 */
public abstract class DataflowJobApi<T> implements DataflowJob<T> {

    @Resource
    protected ZookeeperRegistryCenter zookeeperRegistryCenter;

    @Resource
    protected JobEventConfiguration jobEventConfiguration;

    /**
     * 作业启动时间的cron表达式
     *
     * @return
     */
    abstract protected String getCron();

    /**
     * 作业 Listener
     *
     * @return
     */
    protected ElasticJobListener[] getJobListener(){
        return new ElasticJobListener[0];
    }

    /**
     * 作业分片总数,default 1;
     *
     * @return
     */
    protected int getShardingTotalCount()
    {
        return 1;
    }

    /**
     * 设置分片序列号和个性化参数对照表.
     *
     * <p>
     * 分片序列号和参数用等号分隔, 多个键值对用逗号分隔. 类似map. 分片序列号从0开始, 不可大于或等于作业分片总数. 如: 0=a,1=b,2=c
     * </p>
     *
     * @return
     */
    protected String getShardingItemParameters()
    {
        return "";
    }

    /**
     * 执行任务的Class
     * @return
     */
    abstract  protected Class getJobClass();

    @PostConstruct
    public void simpleJobScheduler() {
        new SpringJobScheduler(this,
                zookeeperRegistryCenter,
                getLiteJobConfiguration(this.getJobClass(), getCron(),getShardingTotalCount(), getShardingItemParameters()),
                jobEventConfiguration,
                getJobListener()).init();
    }

    @SuppressWarnings("rawtypes")
    protected LiteJobConfiguration getLiteJobConfiguration(final Class<? extends DataflowJobApi> jobClass,
                                                           final String cron,
                                                           final int shardingTotalCount,
                                                           final String shardingItemParameters) {
        return LiteJobConfiguration
                .newBuilder(
                        new DataflowJobConfiguration(
                                JobCoreConfiguration.newBuilder(jobClass.getName(), cron, shardingTotalCount).shardingItemParameters(shardingItemParameters).build(),
                                jobClass.getCanonicalName(), true))
                .overwrite(true).build();
    }

}
