package com.znz.analysis.main;

import java.io.IOException;
import java.util.List;

import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.spark.api.java.JavaRDD;
import org.springframework.context.support.ClassPathXmlApplicationContext;

import com.maxmind.geoip2.exception.GeoIp2Exception;
import com.znz.analysis.constant.enumeration.HDFSFileType;
import com.znz.analysis.constant.properties.ApolloProperties;
import com.znz.analysis.constant.properties.HDFSProperties;
import com.znz.analysis.hdfs.HDFSFile;
import com.znz.analysis.spark.rdd.WebVisitLogRDD;
import com.znz.analysis.spark.sparkContext.SparkContext;

public class App {

	private static final Logger logger = Logger.getLogger(WebVisitLogRDD.class.getName());

	public static void main(String[] args) throws IOException, GeoIp2Exception {
		logger.setLevel(Level.INFO);
		logger.info("Start to run DataAnalysis");
		// 1.初始化ApplicationContext容器，同时加载bean
		ClassPathXmlApplicationContext context = ApplicationContextFactory.getInstance();

		// 2.根据配置文件，决定是否在首次执行的时候处理文件
		HDFSProperties hdfsProperties = (HDFSProperties) context.getBean(HDFSProperties.class);
		if (hdfsProperties.isManual()) {
			// 3.加载bean
			HDFSFile hdfsFile = (HDFSFile) context.getBean(HDFSFile.class);
			WebVisitLogRDD webVisitLogRDD = (WebVisitLogRDD) context.getBean(WebVisitLogRDD.class);
			ApolloProperties apolloProperties = (ApolloProperties) context.getBean(ApolloProperties.class);
			SparkContext sc = (SparkContext) context.getBean(SparkContext.class);
			// 4.初始化JavaSparkContext
			sc.create();
			// 5.调用API获取apollo相关所有domain
			List<String> domains = apolloProperties.getDomainsFromApollo();
			// 6.分割配置文件中指定的目录
			String[] dirs = hdfsProperties.getManual_dir().split(",");
			// 7.从HDFS读取文件
			for (String dir : dirs) {
				String file_full_path = hdfsProperties.getRoot() + dir;
				List<String> hdfsPaths = hdfsFile.getPaths(file_full_path, domains, HDFSFileType.FILE);
				hdfsPaths.forEach(i -> {
					JavaRDD<String> inputRDD = sc.textFile(i);
					webVisitLogRDD.handler(inputRDD, i);
				});
			}
			// 8.关闭SparkContxt
			sc.close();
		}
		logger.info("Stop DataAnalysis success");
	}
}
