
package org.hbhk.aili.job.configuration;

import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;

import org.hbhk.aili.job.event.HttpCommonJob;
import org.hbhk.aili.job.event.HttpJobConfig;
import org.hbhk.aili.job.event.MyElasticJobListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.jdbc.core.JdbcTemplate;

import com.dangdang.ddframe.job.api.ElasticJob;
import com.dangdang.ddframe.job.api.dataflow.DataflowJob;
import com.dangdang.ddframe.job.api.script.ScriptJob;
import com.dangdang.ddframe.job.api.simple.SimpleJob;
import com.dangdang.ddframe.job.config.JobCoreConfiguration;
import com.dangdang.ddframe.job.config.JobTypeConfiguration;
import com.dangdang.ddframe.job.config.dataflow.DataflowJobConfiguration;
import com.dangdang.ddframe.job.config.script.ScriptJobConfiguration;
import com.dangdang.ddframe.job.config.simple.SimpleJobConfiguration;
import com.dangdang.ddframe.job.event.JobEventConfiguration;
import com.dangdang.ddframe.job.executor.handler.JobProperties;
import com.dangdang.ddframe.job.executor.handler.impl.DefaultExecutorServiceHandler;
import com.dangdang.ddframe.job.executor.handler.impl.DefaultJobExceptionHandler;
import com.dangdang.ddframe.job.lite.api.JobScheduler;
import com.dangdang.ddframe.job.lite.api.listener.ElasticJobListener;
import com.dangdang.ddframe.job.lite.config.LiteJobConfiguration;
import com.dangdang.ddframe.job.reg.base.CoordinatorRegistryCenter;
import com.google.common.base.Optional;
import com.google.common.collect.Maps;

public class HttpElasticScheduler implements InitializingBean, ApplicationContextAware {

	Logger log = LoggerFactory.getLogger(HttpElasticScheduler.class);

	@Autowired
	private CoordinatorRegistryCenter registryCenter;

	@Autowired(required = false)
	private JdbcTemplate JdbcTemplate;

	String sql = "select job_name,cron ,param,url  from job_service where is_delete = ?";
	public static Map<String, HttpJobConfig> httpJobCache = Maps.newConcurrentMap();

	public static ApplicationContext applicationContext;

	private ScheduledExecutorService scheduExec = Executors.newScheduledThreadPool(1);

	@Override
	public void afterPropertiesSet() throws Exception {
		init();
		scheduExec.scheduleAtFixedRate(new Runnable() {
			@Override
			public void run() {
				try {
					init();
				} catch (Exception e) {
					log.error(e.getMessage(), e);
				}
			}
		}, 5, 5, TimeUnit.MINUTES);

	}

	LiteJobConfiguration buildJobConfig(JobCoreConfiguration jobConfig, ElasticJob elasticJob,
			ElasticConfig elasticConfig) {
		JobTypeConfiguration typeConfiguration = null;
		String jobClass = elasticJob.getClass().getCanonicalName();
		if (elasticJob instanceof SimpleJob) {
			typeConfiguration = new SimpleJobConfiguration(jobConfig, jobClass);
		} else if (elasticJob instanceof DataflowJob) {
			typeConfiguration = new DataflowJobConfiguration(jobConfig, jobClass, elasticConfig.isStreamingProcess());
		} else if (elasticJob instanceof ScriptJob) {
			typeConfiguration = new ScriptJobConfiguration(jobConfig, elasticConfig.getScriptCommandLine());
		}
		return LiteJobConfiguration.newBuilder(typeConfiguration).monitorExecution(elasticConfig.isMonitorExecution())
				.monitorPort(elasticConfig.getMonitorPort()).maxTimeDiffSeconds(elasticConfig.getMaxTimeDiffSeconds())
				.jobShardingStrategyClass(elasticConfig.getJobShardingStrategyClass())
				.reconcileIntervalMinutes(elasticConfig.getReconcileIntervalMinutes())
				.disabled(elasticConfig.isDisabled()).overwrite(elasticConfig.isOverwrite()).build();
	}

	static class SpringJobScheduler extends JobScheduler {
		private final ElasticJob elasticJob;

		public SpringJobScheduler(CoordinatorRegistryCenter regCenter, LiteJobConfiguration liteJobConfig,
				ElasticJob elasticJob, ElasticJobListener... elasticJobListeners) {
			super(regCenter, liteJobConfig, elasticJobListeners);
			this.elasticJob = elasticJob;
		}

		public SpringJobScheduler(CoordinatorRegistryCenter regCenter, LiteJobConfiguration liteJobConfig,
				JobEventConfiguration jobEventConfiguration, ElasticJob elasticJob,
				ElasticJobListener... elasticJobListeners) {
			super(regCenter, liteJobConfig, jobEventConfiguration, elasticJobListeners);
			this.elasticJob = elasticJob;
		}

		@Override
		protected Optional<ElasticJob> createElasticJobInstance() {
			return Optional.of(elasticJob);
		}
	}

	public void init() throws Exception {
		if (JdbcTemplate == null) {
			log.info("注册http job JdbcTemplate is  null");
			return;
		}
		List<HttpJobConfig> jobRpcs = null;
		try {
			jobRpcs = JdbcTemplate.query(sql, new Object[] { "0" }, new HttpJobConfig());
			if (jobRpcs == null) {
				log.info("注册http job  is  null");
				return;
			}
		} catch (Exception e) {
			log.info("注册http job 异常", e);
			return;
		}

		for (HttpJobConfig jobRpc : jobRpcs) {
			ElasticJob elasticJob = new HttpCommonJob();
			ElasticConfig elasticConfig = new ElasticConfig();
			String beanName = jobRpc.getName();

			if (httpJobCache.containsKey(beanName)) {
				return;
			}

			String cron = jobRpc.getCron();
			elasticConfig.setCron(cron);
			elasticConfig.setName(beanName);
			elasticConfig.setDescription(beanName);
			elasticConfig.setJobExceptionHandler(DefaultJobExceptionHandler.class);
			elasticConfig.setMaxTimeDiffSeconds(-1);
			elasticConfig.setMonitorPort(-1);
			elasticConfig.setShardingCount(1);
			elasticConfig.setShardingItemParameters("");
			elasticConfig.setFailover(true);
			elasticConfig.setMisfire(false);
			elasticConfig.setMonitorExecution(true);
			elasticConfig.setExecutorServiceHandler(DefaultExecutorServiceHandler.class);

			String jobName = elasticConfig.getName();
			String corn = elasticConfig.getCron();
			int shardingCount = elasticConfig.getShardingCount();

			JobCoreConfiguration jobConfig = JobCoreConfiguration.newBuilder(jobName, corn, shardingCount)
					.description(elasticConfig.getDescription()).failover(elasticConfig.isFailover())
					.misfire(elasticConfig.isMisfire())
					.shardingItemParameters(elasticConfig.getShardingItemParameters())
					.jobParameter(elasticConfig.getJobParameter())
					.jobProperties(JobProperties.JobPropertiesEnum.JOB_EXCEPTION_HANDLER.getKey(),
							elasticConfig.getJobExceptionHandler().getCanonicalName())
					.jobProperties(JobProperties.JobPropertiesEnum.EXECUTOR_SERVICE_HANDLER.getKey(),
							elasticConfig.getExecutorServiceHandler().getCanonicalName())
					.build();
			new SpringJobScheduler(registryCenter, buildJobConfig(jobConfig, elasticJob, elasticConfig), elasticJob,
					new MyElasticJobListener()).init();
			// 注册rpc调用
			String address = jobRpc.getAddress();
			httpJobCache.put(beanName, jobRpc);
			log.info("注册rpc job:" + beanName + ",address:" + address );
		}
	}

	@Override
	public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
		HttpElasticScheduler.applicationContext = applicationContext;
	}
}
