package com.znz.analysis.main;

import java.util.List;

import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.spark.api.java.JavaRDD;
import org.springframework.context.support.ClassPathXmlApplicationContext;

import com.znz.analysis.constant.properties.ExternalInterfaceProperties;
import com.znz.analysis.constant.properties.HDFSProperties;
import com.znz.analysis.hdfs.HDFSFile;
import com.znz.analysis.spark.rdd.WebVisitLogRDD;
import com.znz.analysis.spark.sparkContext.SparkContext;

/**
 * 定时执行任务
 * 
 * @author 作者 :TP
 * @version 创建时间：2017年6月26日 下午2:19:25
 */
public class Quartz {
	private Logger logger = Logger.getLogger(getClass().getName());

	/**
	 * Quartz定时任务调用方法
	 */
	protected void execute() {
		logger.setLevel(Level.INFO);
		logger.info("Start to run DataAnalysis");
		try {
			// 1.获得单例ClassPathXmlApplicationContext
			ClassPathXmlApplicationContext context = ApplicationContextFactory.getInstance();
			// 2.加载bean
			HDFSFile hdfsFile = (HDFSFile) context.getBean(HDFSFile.class);
			WebVisitLogRDD webVisitLogRDD = (WebVisitLogRDD) context.getBean(WebVisitLogRDD.class);
			ExternalInterfaceProperties apolloProperties = (ExternalInterfaceProperties) context.getBean(ExternalInterfaceProperties.class);
			SparkContext sc = (SparkContext) context.getBean(SparkContext.class);
			HDFSProperties hdfsProperties = (HDFSProperties) context.getBean(HDFSProperties.class);
			// 3.初始化JavaSparkContext
			sc.create();
			// 4.调用API获取apollo相关所有domain
			List<String> domains = apolloProperties.getDomainsFromApollo();
			// 5.从HDFS读取文件
			List<String> hdfsPaths = hdfsFile.getYesterdayFiles(hdfsProperties.getRoot(), domains);
			for (String hdfsPath : hdfsPaths) {
				JavaRDD<String> inputRDD = sc.textFile(hdfsPath);
				webVisitLogRDD.handler(inputRDD, hdfsPath);
			}
			webVisitLogRDD.clearIpMap();
			// 6.关闭SparkContxt
			sc.close();
			logger.info("Stop DataAnalysis success");
		} catch (Exception e) {
			logger.error(e.getMessage(), e);
		}
	}

}
