package com.orange.quartz.job;

import java.io.IOException;
import java.util.List;
import java.util.Map;

import org.apache.hadoop.fs.FileSystem;
import org.apache.log4j.Logger;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;

import com.orange.expireFile.entity.ExpireFile;
import com.orange.expireFile.format.ExpireFileFormat;
import com.orange.hdfs.commons.HDFSCommons;
import com.orange.hdfs.utils.HDFSUtils;
import com.orange.quartz.schedule.ScheduleJob;
import com.orange.utils.PathUtils;

/**
 * @author: HH
 * @Description: 具体job执行类（所有的Job都用这一个Job，区别在execute里面根据jobName关联出不同的ExpireFile信息
 *               ，从而进行不同的处理）
 * @date 2017年9月18日 下午5:53:46
 */
public class QuartzJobFactory implements Job {

	private static final Logger logger = Logger.getLogger(QuartzJobFactory.class);

	public void execute(JobExecutionContext context) throws JobExecutionException {
		Map<String, ExpireFile> expireFileMap = ExpireFileFormat.getExpireFileMap();
		ScheduleJob scheduleJob = (ScheduleJob) context.getMergedJobDataMap().get("scheduleJob");
		String jobName = scheduleJob.getJobName();
		logger.info("JobName:" + jobName);
		logger.info("JobDesc:" + scheduleJob.getDesc());
		ExpireFile expireFile = expireFileMap.get(jobName);
		// jobName为Map<String, ExpireFile>的key，不同的jobName对应不同的ExpireFile信息
		String filePath = PathUtils.formatHPath(expireFile.getExpireFilePath());
		
		//hdfs部分
		FileSystem fs = HDFSCommons.getHDFileSystem();
		//文件存在？目录？
		if(HDFSUtils.checkFileExits(fs, filePath) && HDFSUtils.isDirectory(fs, filePath)){
			//获取所有过期文件List
			List<String> fileList = HDFSUtils.listAllExipreFilesWithNoRecursion(fs, filePath, expireFile.getExpireFileTime());
			//根据过期文件List删除文件
			for (String path : fileList) {
				try {
					HDFSUtils.delFile(fs, path);
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
		}
		logger.info("JobName:" + jobName + "--任务执行完毕!");
	}

}
