package demo.utils;

import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.PatternLayout;
import org.apache.log4j.RollingFileAppender;

import java.util.HashMap;
import java.util.Map;

/**
 * @author mandy.hu
 *
 */
 
public class XdcsSparkLoggerUtil {
	
	private static Map<String, Logger> xdcsLoggerMap = new HashMap<String, Logger>();

	public static Logger getLoggerByName(String logDir,String metric) {
		String loggerName = "xdcs-spark-"+metric+"-logger";
		if (xdcsLoggerMap.get(loggerName)!=null) {
			return xdcsLoggerMap.get(loggerName);
		}
		Logger logger = Logger.getLogger(loggerName);
		logger.removeAllAppenders();
		logger.setLevel(Level.INFO);
		logger.setAdditivity(false);
		// 生成新的Appender
		RollingFileAppender appender = new RollingFileAppender();
		PatternLayout layout = new PatternLayout();
		// log的输出形式
		String conversionPattern = "%m%n";
		layout.setConversionPattern(conversionPattern);
		appender.setLayout(layout);
		// log输出路径
		appender.setFile(logDir + "/spark-logs/" + metric +".out");
		// log的文字码
		appender.setEncoding("UTF-8");
		// true:在已存在log文件后面追加 false:新log覆盖以前的log
		appender.setAppend(true);
		appender.setBufferedIO(false);
//		appender.setBufferSize(8192);//8k

		// 适用当前配置
		appender.activateOptions();
		appender.setMaxBackupIndex(3);
		appender.setMaximumFileSize(1024*1024*1024);
		String appenderName = "sparkFileAppender";
		appender.setName(appenderName);
		// 将新的Appender加到Logger中
		logger.addAppender(appender);
		xdcsLoggerMap.put(loggerName, logger);
		return logger;
	}
	
	    
	    
} 
