package demo.utils;

import demo.vo.PlayStatTypeEnum;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

/**
 * @author mandy.hu
 *
 */
public class XdcsSparkFileUtils implements Serializable {

	private static final long serialVersionUID = -174595134246809490L;

	private static final Logger logger = LoggerFactory.getLogger(XdcsSparkFileUtils.class);
	
	private static Map<String, org.apache.log4j.Logger> loggerMap = new HashMap<String, org.apache.log4j.Logger>();
	
	static{
		for (PlayStatTypeEnum s : PlayStatTypeEnum.values()){
			//logger.info("yyyy-");
			loggerMap.put(s.getMetric(), XdcsSparkLoggerUtil.getLoggerByName(XdcsSparkConfig.outputFileOfDataPoint(),s.getMetric()));
		}

		loggerMap.put("compassDataPoint", XdcsSparkLoggerUtil.getLoggerByName(XdcsSparkConfig.outputFileOfDataPoint(),"compassDataPoint"));

		loggerMap.put("sparkTimeDuation", XdcsSparkLoggerUtil.getLoggerByName(XdcsSparkConfig.outputFileOfDataPoint(),"sparkTimeDuation"));
		loggerMap.put("sparkTimeStoreDuation", XdcsSparkLoggerUtil.getLoggerByName(XdcsSparkConfig.outputFileOfDataPoint(),"sparkTimeStoreDuation"));

		loggerMap.put("sparkFlowTest", XdcsSparkLoggerUtil.getLoggerByName(XdcsSparkConfig.outputFileOfDataPoint(),"sparkFlowTest"));
	}

	public static Properties load(String file) {

		Properties properties = new Properties();
		InputStream resourceAsStream = XdcsSparkFileUtils.class.getClassLoader().getResourceAsStream(file);
		try {
			properties.load(resourceAsStream);
		} catch (IOException e) {
			logger.error("XdcsSparkFileUtils load", e);
			System.exit(-1);
		} finally {
			try {
				resourceAsStream.close();
			} catch (IOException e) {
				logger.error("XdcsSparkFileUtils load", e);
			}
		}
		return properties;
	}
	
	 public static Map<String,String> loadConfs(String confFile){
		    Properties properties = XdcsSparkFileUtils.load(confFile);
	        Map<String,String> sparkConf = new HashMap<String, String>();

	        for (Map.Entry<Object, Object> entry : properties.entrySet()) {
	            try {
	                String key = (String) entry.getKey();
	                String value = (String) entry.getValue();
	                sparkConf.put(key, value);
	            } catch (Exception e) {
	                logger.error("error while parse spark conf file", e);
	            }
	        }
	        return sparkConf;
	    }

	public static void append1(String content, String file) {

		File outputFile = null;
		FileWriter fileWriter = null;
		BufferedWriter bufferedWriter = null;
		try {
			outputFile = new File(file);
			fileWriter = new FileWriter(outputFile, true);
			bufferedWriter = new BufferedWriter(fileWriter);
			bufferedWriter.write(content + System.getProperty("line.separator"));
		} catch (IOException e) {
			logger.error("XdcsSparkFileUtils append", e);
		} finally {
			if (bufferedWriter != null) {
				try {
					bufferedWriter.close();
				} catch (IOException e) {
					logger.error("XdcsSparkFileUtils append", e);
				}
			}
			if (fileWriter != null) {
				try {
					fileWriter.close();
				} catch (IOException e) {
					logger.error("XdcsSparkFileUtils append", e);
				}
			}
		}
	}

	public static void append(String dataPoint,String metric) {
		//logger.info("ffff-"+metric+"-"+dataPoint);
		loggerMap.get(metric).info(dataPoint);
		//append1(dataPoint, XdcsSparkConfig.outputFileOfDataPoint());
	}
}
