package com.flute.haflute.jobbox.common;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;

import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.input.SAXBuilder;

/**
 * 读取分布式平台的配置，平台启动时调用，从硬盘读取数据，初始化系统环境。
 */
public class CloudInfoLoader {

	private String cloudConfFile = "conf/cloudinfo.xml";

	/** Hadoop配置的标识 */
	private String hadoopConf = "hadoop";

	/** 平台配置的标识 */
	private String cloudConfig = "cloud";

	/** 开始读取配置 */
	public void load() throws JDOMException, IOException {
		loadConfiguration();
	}

	private void loadConfiguration() throws JDOMException, IOException {
		SAXBuilder builder = new SAXBuilder();
		Document doc = builder.build(new File(CloudContext.getRootDir()
				+ cloudConfFile));
		Element root = doc.getRootElement();
		Element hadoop = root.getChild(hadoopConf);
		if (hadoop != null) {
			loadHadoopClusterConf(hadoop);
		}

		Element cloud = root.getChild(cloudConfig);
		loadCloudInfo(cloud);
	}

	/**
	 * 读取分布式平台的参数配置
	 */
	@SuppressWarnings("unchecked")
	private void loadCloudInfo(Element cloud) throws IOException {
		Element mastersEle = cloud.getChild("masters");
		List<Element> masters = mastersEle.getChildren();
		for (Element e : masters) {
			// 主节点的地址，端口配置等
			if ("master".equals(e.getName())) {
				String master = e.getAttributeValue("ip");
				if (null == master || "".equals(master))
					master = e.getAttributeValue("hostname");
				CloudContext.setCloudMaster(master);

				String rmiport = e.getAttributeValue("rmiport");
				if (null != rmiport && !"".equalsIgnoreCase(rmiport)) {
					CloudContext.setRmiPort(Integer.parseInt(rmiport));
				}
			}
		}

		// 所有节点的共有信息
		Element slavesElem = cloud.getChild("nodes");
		List<Element> slaves = slavesElem.getChildren();
		HashMap<String, String> slavesMap = new HashMap<String, String>();
		CloudContext.setDefinedNodeSet(slavesMap);
		for (Element e : slaves) {
			if ("node".equals(e.getName())) {
				slavesMap.put(e.getAttributeValue("hostname"),
						e.getAttributeValue("ip"));
			}
			if ("nodesfile".equals(e.getName())) {
				readNodesFromFile(e.getAttributeValue("filename"));
			}
		}

		// 心跳配置
		Element heartbeatEle = cloud.getChild("heartbeat");
		String checkInterval = heartbeatEle.getAttributeValue("checkinterval");
		String failedTimes = heartbeatEle.getAttributeValue("failedtimes");
		String tryTimes = heartbeatEle.getAttributeValue("trytimes");
		String listenPort = heartbeatEle.getAttributeValue("listenport");
		if (checkInterval != null) {
			CloudContext.setHBCheckInterval(Integer.parseInt(checkInterval));
		}
		if (failedTimes != null) {
			CloudContext.setHBFailedTimes(Integer.parseInt(failedTimes));
		}
		if (tryTimes != null) {
			CloudContext.setHBMaxHelloTimes4Child(Integer.parseInt(tryTimes));
		}
		if (listenPort != null) {
			CloudContext.setHBListenPort(Integer.parseInt(listenPort));
		}
	}

	/**
	 * 从配置文件中读取预定义的节点
	 * 
	 * @param fileName
	 */
	private void readNodesFromFile(String fileName) throws IOException {
		File nodesFile = new File(CloudContext.getRootDir() + fileName);
		if (!nodesFile.exists()) {
			System.err.println("no such file: " + nodesFile);
			nodesFile = new File(fileName);
		}
		if (!nodesFile.exists())
			throw new FileNotFoundException("no such file: " + nodesFile);

		BufferedReader reader = new BufferedReader(new FileReader(nodesFile));
		String node = null;
		while ((node = reader.readLine()) != null) {
			CloudContext.addDefinedNode(node, node);
		}
		reader.close();
	}

	/**
	 * 从配置文件中读取Hadoop的配置
	 */
	@SuppressWarnings("unchecked")
	private void loadHadoopClusterConf(Element hadoop) {
		String isNeedIntegration = hadoop.getAttributeValue("integrate");
		if ("true".equalsIgnoreCase(isNeedIntegration)) {
			Properties props = new Properties();
			List<Element> hadoop_prop = hadoop.getChildren();
			for (Element e : hadoop_prop) {
				String name = e.getAttributeValue("name");
				String value = e.getAttributeValue("value");
				if (name == null || value == null)
					continue;
				props.setProperty(name, value);
			}
			CloudContext.setHadoopProperties(props);
		}
	}

	public static void main(String[] args) {
		try {
			new CloudInfoLoader().load();
			System.out.println(CloudContext.getHadoopProperties());
		} catch (IOException e) {
			e.printStackTrace();
		} catch (JDOMException e) {
			e.printStackTrace();
		}
	}
}
