package cn.tiansu.eway.datafile.manager;

import java.util.Set;

import org.apache.log4j.Logger;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;

import cn.tiansu.eway.kylinengine.service.KYLINEngineService;

public class MakeDataFileJob implements Job
{
	private static final Logger log = Logger.getLogger(MakeDataFileJob.class);

	@Override
	public void execute(JobExecutionContext arg0) throws JobExecutionException
	{
		FileListener l = FileListener.getInstance();

		// 将临时文件夹下的数据移到data目录下
		log.info("==将临时文件夹下的数据移到data目录下==");
		l.removeTempToDataFile();

		// 创建各个租户下面的维度数据文件
		log.info("==创建各个租户下面的维度数据文件==");
		l.dimDataToFile();

		log.info("==删除不需要的备份数据文件==");
		boolean success = l.cleanBakFile(l.getBakPath());

		if (success)
		{
			log.info("==将被清理过的数据文件覆盖到hive中==");
			Set<String> tenantSet = KYLINEngineService.getInstance()
					.getTenantName();
			l.cleanExpireHiveData(tenantSet);
		}

	}

}
