package com.luweijie.controller;

import org.quartz.JobDataMap;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.scheduling.quartz.QuartzJobBean;


public class StartBatchJob extends QuartzJobBean {
	
	private Logger logger = LoggerFactory.getLogger(getClass());
	
	@Override
	protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
		JobDataMap jobDataMap = context.getJobDetail().getJobDataMap();
		String jobName = (String)jobDataMap.get("jobName");
		try {
		/*	String clusterIps = StartBatchJobUtil.properties.getProperty("mc.batch.cluster.ip");
			int h;
			int hash = (jobName == null) ? 0 : (h = jobName.hashCode()) ^ (h >>> 16);
			int index = (clusterIps.split(",").length - 1) & hash;
			String ips = clusterIps.split(",")[index];
			if(StartBatchJobUtil.getIPV4Address().contains(ips)) {
				
			}else {
				logger.info("----------------"+jobName+"批量不在该服务器，分发到对应服务器-----------");
				HashMap<String, String> head = new HashMap<String, String>();
				head.put("Content-Type", "application/json");
				HttpResponse resp = HttpUtils.doGet("http://"+ips+":8900", "/restart/"+jobName, "GET",head,null);
				String respStr = HttpUtils.readStreamAsStr(resp.getEntity().getContent());
				logger.info("返回消息："+respStr);
			}*/
			logger.info("----------------"+jobName+"批量开始跑批-----------");
		}catch (Exception e) {
			logger.info("----------------"+jobName+"批量报错-----------"+e.getMessage());
		}
	}


}
