package me.spring.cloud.common.components.elasticjob.lite.parser;


import com.dangdang.ddframe.job.config.JobCoreConfiguration;
import com.dangdang.ddframe.job.config.JobTypeConfiguration;
import com.dangdang.ddframe.job.config.dataflow.DataflowJobConfiguration;
import com.dangdang.ddframe.job.config.script.ScriptJobConfiguration;
import com.dangdang.ddframe.job.config.simple.SimpleJobConfiguration;
import com.dangdang.ddframe.job.event.rdb.JobEventRdbConfiguration;
import com.dangdang.ddframe.job.executor.handler.JobProperties.JobPropertiesEnum;
import com.dangdang.ddframe.job.lite.config.LiteJobConfiguration;
import com.dangdang.ddframe.job.lite.spring.api.SpringJobScheduler;
import com.dangdang.ddframe.job.reg.zookeeper.ZookeeperRegistryCenter;
import me.spring.cloud.common.components.elasticjob.lite.annotation.ElasticJobConf;
import me.spring.cloud.common.components.elasticjob.lite.base.ElasticJobAttributeTag;
import me.spring.cloud.common.components.elasticjob.lite.service.ElasticJobService;
import me.spring.cloud.common.components.elasticjob.lite.util.BeanDefinitionUtil;
import me.spring.cloud.common.components.properties.CustomerPropertyPlaceholderHelper;
import me.spring.cloud.common.components.util.constant.CharConstant;
import me.spring.cloud.common.components.util.lang.StringUtil;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import org.springframework.beans.factory.support.ManagedList;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.core.env.Environment;
import org.springframework.util.StringUtils;

/**
 * Job解析类
 * <p>从注解中解析任务信息初始化<p>
 */
public class ElasticJobConfParser implements ApplicationContextAware {

  private Logger logger = LoggerFactory.getLogger(ElasticJobConfParser.class);

  @Autowired
  private ZookeeperRegistryCenter zookeeperRegistryCenter;

  private String prefix = "elastic.job.";

  private Environment environment;

  private CustomerPropertyPlaceholderHelper propertyPlaceholderHelper;

  @Autowired(required = false)
  private ElasticJobService elasticJobService;

  public ElasticJobConfParser() {
    this.propertyPlaceholderHelper = new CustomerPropertyPlaceholderHelper("${", "}");
  }

  @Override
  public void setApplicationContext(ApplicationContext ctx) throws BeansException {
    environment = ctx.getEnvironment();
    Map<String, Object> beanMap = ctx.getBeansWithAnnotation(ElasticJobConf.class);
    for (Object confBean : beanMap.values()) {
      Class<?> clz = confBean.getClass();
      String jobTypeName = confBean.getClass().getInterfaces()[0].getSimpleName();
      ElasticJobConf conf = clz.getAnnotation(ElasticJobConf.class);

      String jobClass = clz.getName();
      String jobName = propertyPlaceholderHelper.resolvePlaceholder(environment, conf.name());
      String jobNum = getEnvironmentStringValue(jobName, ElasticJobAttributeTag.jobNum, conf.jobNum());
      String suffix = getEnvironmentStringValue(jobName, ElasticJobAttributeTag.SUFFIX, conf.suffix());
      String jobName_ = jobName;
      if (!StringUtil.isEmpty(suffix)) {
        jobName_ = jobName + CharConstant.DASH + suffix;
      }
      int tmp_job_num = Integer.parseInt(jobNum);
      if (tmp_job_num > 1) {
        for (int i = 1; i <= tmp_job_num; i++) {
          this.createJobBean(jobName_ + "_" + i, jobName_ + "_" + i, jobTypeName, jobClass, conf, confBean, ctx);
        }
      } else {
        this.createJobBean(jobName, jobName_, jobTypeName, jobClass, conf, confBean, ctx);
      }

    }

    //开启任务监听,当有任务添加时，监听zk中的数据增加，自动在其他节点也初始化该任务
    if (elasticJobService != null) {
      elasticJobService.monitorJobRegister();
    }
  }

  private void createJobBean(String jobName, String jobName_, String jobTypeName, String jobClass, ElasticJobConf conf,
      Object confBean, ApplicationContext ctx) {
    String cron = getEnvironmentStringValue(jobName, ElasticJobAttributeTag.CRON, conf.cron());
    String shardingItemParameters = getEnvironmentStringValue(jobName,
        ElasticJobAttributeTag.SHARDING_ITEM_PARAMETERS,
        conf.shardingItemParameters());
    String description = getEnvironmentStringValue(jobName, ElasticJobAttributeTag.DESCRIPTION, conf.description());
    String jobParameter = getEnvironmentStringValue(jobName, ElasticJobAttributeTag.JOB_PARAMETER,
        conf.jobParameter());
    String jobExceptionHandler = getEnvironmentStringValue(jobName, ElasticJobAttributeTag.JOB_EXCEPTION_HANDLER,
        conf.jobExceptionHandler());
    String executorServiceHandler = getEnvironmentStringValue(jobName,
        ElasticJobAttributeTag.EXECUTOR_SERVICE_HANDLER,
        conf.executorServiceHandler());

    String jobShardingStrategyClass = getEnvironmentStringValue(jobName,
        ElasticJobAttributeTag.JOB_SHARDING_STRATEGY_CLASS,
        conf.jobShardingStrategyClass());
    boolean eventTraceRdb = getEnvironmentBooleanValue(jobName, ElasticJobAttributeTag.EVENT_TRACE_RDB,
        conf.eventTraceRdb());
    String scriptCommandLine = getEnvironmentStringValue(jobName, ElasticJobAttributeTag.SCRIPT_COMMAND_LINE,
        conf.scriptCommandLine());

    boolean failover = getEnvironmentBooleanValue(jobName, ElasticJobAttributeTag.FAILOVER, conf.failover());
    boolean misfire = getEnvironmentBooleanValue(jobName, ElasticJobAttributeTag.MISFIRE, conf.misfire());
    boolean overwrite = getEnvironmentBooleanValue(jobName, ElasticJobAttributeTag.OVERWRITE, conf.overwrite());
    boolean disabled = getEnvironmentBooleanValue(jobName, ElasticJobAttributeTag.DISABLED, conf.disabled());
    boolean monitorExecution = getEnvironmentBooleanValue(jobName, ElasticJobAttributeTag.MONITOR_EXECUTION,
        conf.monitorExecution());
    boolean streamingProcess = getEnvironmentBooleanValue(jobName, ElasticJobAttributeTag.STREAMING_PROCESS,
        conf.streamingProcess());

    int shardingTotalCount = getEnvironmentIntValue(jobName, ElasticJobAttributeTag.SHARDING_TOTAL_COUNT,
        conf.shardingTotalCount());
    int monitorPort = getEnvironmentIntValue(jobName, ElasticJobAttributeTag.MONITOR_PORT, conf.monitorPort());
    int maxTimeDiffSeconds = getEnvironmentIntValue(jobName, ElasticJobAttributeTag.MAX_TIME_DIFF_SECONDS,
        conf.maxTimeDiffSeconds());
    int reconcileIntervalMinutes = getEnvironmentIntValue(jobName, ElasticJobAttributeTag.RECONCILE_INTERVAL_MINUTES,
        conf.reconcileIntervalMinutes());

    // 核心配置
    JobCoreConfiguration coreConfig =
        JobCoreConfiguration.newBuilder(jobName_, cron, shardingTotalCount)
            .shardingItemParameters(shardingItemParameters)
            .description(description)
            .failover(failover)
            .jobParameter(jobParameter)
            .misfire(misfire)
            .jobProperties(JobPropertiesEnum.JOB_EXCEPTION_HANDLER.getKey(), jobExceptionHandler)
            .jobProperties(JobPropertiesEnum.EXECUTOR_SERVICE_HANDLER.getKey(), executorServiceHandler)
            .build();

    // 不同类型的任务配置处理
    LiteJobConfiguration jobConfig = null;
    JobTypeConfiguration typeConfig = null;
    if (jobTypeName.equals("SimpleJob")) {
      typeConfig = new SimpleJobConfiguration(coreConfig, jobClass);
    }

    if (jobTypeName.equals("DataflowJob")) {
      typeConfig = new DataflowJobConfiguration(coreConfig, jobClass, streamingProcess);
    }

    if (jobTypeName.equals("ScriptJob")) {
      typeConfig = new ScriptJobConfiguration(coreConfig, scriptCommandLine);
    }

    jobConfig = LiteJobConfiguration.newBuilder(typeConfig)
        .overwrite(overwrite)
        .disabled(disabled)
        .monitorPort(monitorPort)
        .monitorExecution(monitorExecution)
        .maxTimeDiffSeconds(maxTimeDiffSeconds)
        .jobShardingStrategyClass(jobShardingStrategyClass)
        .reconcileIntervalMinutes(reconcileIntervalMinutes)
        .build();

    List<BeanDefinition> elasticJobListeners = getTargetElasticJobListeners(conf);

    // 构建SpringJobScheduler对象来初始化任务
    BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(SpringJobScheduler.class);
    factory.setScope(BeanDefinition.SCOPE_PROTOTYPE);
    if ("ScriptJob".equals(jobTypeName)) {
      factory.addConstructorArgValue(null);
    } else {
      factory.addConstructorArgValue(confBean);
    }
    factory.addConstructorArgValue(zookeeperRegistryCenter);
    factory.addConstructorArgValue(jobConfig);

    // 任务执行日志数据源，以名称获取
    if (eventTraceRdb) {
      BeanDefinitionBuilder rdbFactory = BeanDefinitionBuilder.rootBeanDefinition(JobEventRdbConfiguration.class);
      rdbFactory.addConstructorArgReference(ElasticJobAttributeTag.ELASTIC_JOB_LOG_DS_NAME);
      factory.addConstructorArgValue(rdbFactory.getBeanDefinition());
    }

    factory.addConstructorArgValue(elasticJobListeners);
    DefaultListableBeanFactory defaultListableBeanFactory = (DefaultListableBeanFactory) ctx
        .getAutowireCapableBeanFactory();
    defaultListableBeanFactory.registerBeanDefinition(jobName_ + "SpringJobScheduler", factory.getBeanDefinition());
    SpringJobScheduler springJobScheduler = (SpringJobScheduler) ctx.getBean(jobName_ + "SpringJobScheduler");
    springJobScheduler.init();
    logger.info("【" + jobName_ + "】\t" + jobClass + "\tinit success");
  }

  private List<BeanDefinition> getTargetElasticJobListeners(ElasticJobConf conf) {
    List<BeanDefinition> result = new ManagedList<BeanDefinition>(2);
    String listeners = getEnvironmentStringValue(conf.name(), ElasticJobAttributeTag.LISTENER, conf.listener());
    if (StringUtils.hasText(listeners)) {
      BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(listeners);
      factory.setScope(BeanDefinition.SCOPE_PROTOTYPE);
      result.add(factory.getBeanDefinition());
    }

    String distributedListeners = getEnvironmentStringValue(conf.name(), ElasticJobAttributeTag.DISTRIBUTED_LISTENER,
        conf.distributedListener());
    long startedTimeoutMilliseconds = getEnvironmentLongValue(conf.name(),
        ElasticJobAttributeTag.DISTRIBUTED_LISTENER_STARTED_TIMEOUT_MILLISECONDS, conf.startedTimeoutMilliseconds());
    long completedTimeoutMilliseconds = getEnvironmentLongValue(conf.name(),
        ElasticJobAttributeTag.DISTRIBUTED_LISTENER_COMPLETED_TIMEOUT_MILLISECONDS,
        conf.completedTimeoutMilliseconds());

    return BeanDefinitionUtil
        .getBeanDefinitions(result, distributedListeners, startedTimeoutMilliseconds, completedTimeoutMilliseconds);
  }

  /**
   * 获取配置中的任务属性值，environment没有就用注解中的值
   * @param jobName 任务名称
   * @param fieldName 属性名称
   * @param defaultValue 默认值
   * @Return
   */
  private String getEnvironmentStringValue(String jobName, String fieldName, String defaultValue) {
    String key = prefix + jobName + "." + fieldName;
    String value = environment.getProperty(key);
    if (StringUtils.hasText(value)) {
      return value;
    }
    return defaultValue;
  }

  private int getEnvironmentIntValue(String jobName, String fieldName, int defaultValue) {
    String key = prefix + jobName + "." + fieldName;
    String value = environment.getProperty(key);
    if (StringUtils.hasText(value)) {
      return Integer.valueOf(value);
    }
    return defaultValue;
  }

  private long getEnvironmentLongValue(String jobName, String fieldName, long defaultValue) {
    String key = prefix + jobName + "." + fieldName;
    String value = environment.getProperty(key);
    if (StringUtils.hasText(value)) {
      return Long.valueOf(value);
    }
    return defaultValue;
  }

  private boolean getEnvironmentBooleanValue(String jobName, String fieldName, boolean defaultValue) {
    String key = prefix + jobName + "." + fieldName;
    String value = environment.getProperty(key);
    if (StringUtils.hasText(value)) {
      return Boolean.valueOf(value);
    }
    return defaultValue;
  }
}
