package cn.cloudbae.schedule;

import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import com.dangdang.ddframe.job.api.ShardingContext;
import com.dangdang.ddframe.job.api.dataflow.DataflowJob;

import cn.cloudbae.domain.DfJob;
import cn.cloudbae.kit.PortKit;
import cn.cloudbae.kit.Randomkit;

public class FirstDataFlowJob implements DataflowJob<DfJob>{

	private Log logger = LogFactory.getLog(this.getClass());
	/**
	 * 查询数据
	 */
	@Override
	public List<DfJob> fetchData(ShardingContext shardingContext) {
		SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
		String now = sdf.format(new Date());
        String port = String.valueOf(PortKit.getPort()); //获得运行时端口
        logger.info("fetchData " + now + " at　" +port + ", " 
        		+ shardingContext.getShardingItem()+"="+shardingContext.getShardingParameter());
		
		int shardingItem = shardingContext.getShardingItem();
		int shardingTotalCount = shardingContext.getShardingTotalCount();
		//每条记录最多检查5次就不再select了
		String sql = "select * from df_job where "
				+ "job_name=? and job_result!=? and job_count<=5 "
				+ "and id%"+shardingTotalCount+"=?";
		List<DfJob> list = DfJob.dao.find(sql, "firstDataFlowJob", "ok", shardingItem);
		return list;
	}

	/**
	 * 处理数据
	 */
	@Override
	public void processData(ShardingContext shardingContext, List<DfJob> data) {
		SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
		String now = sdf.format(new Date());
        String port = String.valueOf(PortKit.getPort()); //获得运行时端口
        logger.info("processData " + now + " at　" +port + ", " 
        		+ shardingContext.getShardingItem()+"="+shardingContext.getShardingParameter());
		if(data==null) {
			return;
		}
		for(DfJob dfJob : data) {
			//产生一个随机数[1，12]，如果能被4整除，则表明任务执行成功。这里模拟了定时任务的业务逻辑
			int num = Randomkit.nextInt(1, 13);
			if(num%4==0) {
				dfJob.setJobResult("ok");
			} else {
				dfJob.setJobResult("fail");
			}
			dfJob.setModifyTime(new Date());
			dfJob.setJobCount(dfJob.getJobCount()+1);
			dfJob.setPort(PortKit.getPort());
			dfJob.setShardingItem(shardingContext.getShardingItem());
			dfJob.setShardingParameter(shardingContext.getShardingParameter());
			dfJob.update();
		}
	}
    
}
