package demo.utils;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

/**
 * @author mandy.hu
 *
 */
public class XdcsSparkConfig implements Serializable {

	private static final Logger logger = LoggerFactory.getLogger(XdcsSparkConfig.class);

	private static final long serialVersionUID = 1L;

	private static final String[] configFiles = { "/spark.properties", "/xdcs-spark.properties","/spark-kafka-play.properties"};

	private static Properties properties;

	private static Map<String, String> sparkConfig;

	public static Properties getProperties() {

		return properties;
	}

	static {
		properties = new Properties();
		for (String cf : configFiles) {
			InputStream is = XdcsSparkConfig.class.getResourceAsStream(cf);
			if (is != null) {
				try {
					properties.load(is);
				} catch (IOException ioe) {
					logger.error("can't load file " + cf, ioe);
				} finally {
					try {
						is.close();
					} catch (IOException ioe) {
						logger.error("io exception", ioe);
					}
				}
			} else {
				logger.info("can't find resource " + cf);
			}
		}

		sparkConfig = new HashMap<String, String>();
		for (Object oKey : properties.keySet()) {
			String sKey = (String) oKey;
			if (sKey.startsWith("spark.")) {
				sparkConfig.put(sKey, properties.getProperty(sKey));
			}
		}
	}

	public static String get(String name) {

		return properties.getProperty(name);
	}

	public static String get(String name, String defaultValue) {

		String ret = properties.getProperty(name);
		return ret == null ? defaultValue : ret;
	}

	public static boolean getBoolean(String name) {

		return "true".equals(get(name, "false").trim());
	}

	public static int getInt(String item, int defaultValue) {

		int ret;
		try {
			String strInt = properties.getProperty(item);
			if (strInt != null)
				ret = Integer.parseInt(strInt.trim());
			else
				ret = defaultValue;
		} catch (Exception ignore) {
			logger.error("can't parse config value for '" + item + "' use default value " + defaultValue);
			ret = defaultValue;
		}
		return ret;
	}

	public static Map<String, String> getSparkConfig() {

		return sparkConfig;
	}

	public static Map<String, String> getSection(String sectionName) {

		Map<String, String> section = new HashMap<String, String>();
		for (Object oKey : properties.keySet()) {
			String sKey = (String) oKey;
			if (sKey.startsWith(sectionName)) {
				section.put(sKey, properties.getProperty(sKey));
			}
		}
		return section;
	}

	public static String outputFileOfDataPoint() {

		return properties.getProperty("datapoint.output.file.name");
	}

	public static boolean isOutputToFile() {

		return "true".equalsIgnoreCase(properties.getProperty("datapoint.output.to.file").trim());
	}

	public static boolean isDataPointToKafka() {

		return "true".equalsIgnoreCase(properties.getProperty("datapoint.output.to.kafka").trim());
	}

	public static int getRepartitionNum() {

		return Integer.valueOf(properties.getProperty("spark.repartition.num"));
	}
}
