package edu.ruc.cluster.hadoop;

import javax.xml.parsers.DocumentBuilderFactory;

import org.w3c.dom.Element;
import java.io.File;
import java.io.IOException;
import java.util.Map;
import java.util.HashMap;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.xml.sax.SAXException;

import edu.ruc.cluster.util.Const;
import edu.ruc.cluster.util.DefaultLogHandler;

public class HadoopConfig {

	private DefaultLogHandler logHandler = null;
	private Document doc = null;
	private String fileName = null;
	private Node root = null;
	
	private String masterHost = null;
	private String hadoopDir = null;
	
	private Map<String, String> properties;
	
	public HadoopConfig(String filepath, Map<String, String> info){
		
		properties = new HashMap<String, String>();
		properties = info;    // shallow copy, which is enough because we don't need to change properties
		logHandler = DefaultLogHandler.getInstance();
		fileName = filepath;				
		
		try{
			
			DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance();
			doc = docFactory.newDocumentBuilder().parse(filepath);	
		
		}catch(ParserConfigurationException e){
			logHandler.error(this.getClass().getSimpleName() + " " + e.getMessage(), e);
		}catch(IOException e){
			logHandler.error(this.getClass().getSimpleName() + " " + e.getMessage(), e);
		}catch(SAXException e){
			logHandler.error(this.getClass().getSimpleName() + " " + e.getMessage(), e);
		}
		
//		root = doc.getFirstChild();
		root = doc.getElementsByTagName("configuration").item(0);
	}
	public void close(){
		try{
			TransformerFactory transformerFactory = TransformerFactory.newInstance();
			Transformer transformer = transformerFactory.newTransformer();
			DOMSource source = new DOMSource(doc);
			StreamResult result = new StreamResult(new File(fileName));
				
				// set indent on output xml file
			transformer.setOutputProperty(OutputKeys.INDENT, "yes");
			transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "4");
			transformer.transform(source, result);
		}catch(TransformerException e){
			logHandler.error(this.getClass().getSimpleName() + " " + e.getMessage(), e);
		}
	}
	private Element getProperty(String name, String value){
		Element property = doc.createElement("property");
		Element nameElem = doc.createElement("name");
		Element valueElem = doc.createElement("value");
		nameElem.appendChild(doc.createTextNode(name));
		valueElem.appendChild(doc.createTextNode(value));
		
		property.appendChild(nameElem);
		property.appendChild(valueElem);
//		= " <property>" +
//				"         <name>" + name + "</name>" +
//				"         <value>" + value + "</value>" +
//				"</property>";
		return property;
	}
	
	private Element getFSDefaultName(String masterhost){
		return getProperty(HadoopConst.FS_DEFAULT_NAME, "hdfs://" + masterhost + ":" + HadoopConst.MASTER_PORT_VALUE);
	}
	
	private Element getDirConfig(String name){
		String append = "";
		if(name.equals(HadoopConst.HADOOP_TMP_DIR))
			append = "/hadoopdata";
		else if(name.equals(HadoopConst.FS_CHECKPOINT_DIR))
			append = "/dfs/namesecondary";
		else if(name.equals(HadoopConst.DFS_NAME_DIR))
			append = "/hadoopdata/dfs/name";
		else if(name.equals(HadoopConst.DFS_DATA_DIR))
			append = "/hadoopdata/dfs/data";
		else if(name.equals(HadoopConst.MAPRED_LOCAL_DIR))
			append = "/hadoopdata/dfs/mapred/local";
		
		return getProperty(name, hadoopDir + append);
	}
	
	private Element getDFSReplication(int replic){
		return getProperty(HadoopConst.DFS_REPLICATION, String.valueOf(replic));
	}
	
	private Element getMapredJobtracker(String masterhost){
		return getProperty(HadoopConst.MAPRED_JOB_TRACKER, masterhost + ":" + HadoopConst.JOB_TRACKER_PORT);
	}
	
	/*
	private Element getMapredChildJavaOpts(String opts){
		return getProperty(HadoopConst.MAPRED_CHILD_JAVA_OPTS, opts);
	}
	*/
	
	private Element getMapredTaskTimeout(long timeout){
		return getProperty(HadoopConst.MAPRED_TASK_TIMEOUT, String.valueOf(timeout));
	}
	
	private void getproperty() throws RuntimeException{
		masterHost = properties.get(HadoopConst.MASTER_HOST_TAG);
		hadoopDir = properties.get(Const.HADOOP_DST_TAG);

		if(hadoopDir == null){
			logHandler.error("Missing property value for " + Const.HADOOP_DST_TAG);
			throw new RuntimeException();
		}
	}
	private void dealcoresite(){
		if(properties.isEmpty()){
			logHandler.error("empty property file!");
			return;
		}

		if(masterHost == null || hadoopDir == null){
			logHandler.error("Missing property value for " + HadoopConst.MASTER_HOST_TAG + 
					" or " + Const.HADOOP_SRC_TAG);
			return;
		}
		Element fs = getFSDefaultName(masterHost);
		Element tmp = getDirConfig(HadoopConst.HADOOP_TMP_DIR);
		Element checkpoint = getDirConfig(HadoopConst.FS_CHECKPOINT_DIR);
		
		root.appendChild(fs);
		root.appendChild(tmp);
		root.appendChild(checkpoint);
	}
	private void dealhdfssite(){
		
		Element replic = getDFSReplication(2);
		Element namedir = getDirConfig(HadoopConst.DFS_NAME_DIR);
		Element datadir = getDirConfig(HadoopConst.DFS_DATA_DIR);
		
		root.appendChild(replic);
		root.appendChild(namedir);
		root.appendChild(datadir);
		
	}
	private void dealmapredsite(){
		
		if(masterHost == null || hadoopDir == null){
			logHandler.error("Missing property value for " + HadoopConst.MASTER_HOST_TAG + 
					" or " + Const.HADOOP_SRC_TAG);
			return;
		}
		Element tracker = getMapredJobtracker(masterHost);
		Element localdir = getDirConfig(HadoopConst.MAPRED_LOCAL_DIR);
		Element timeout = getMapredTaskTimeout(6000000);
		
		root.appendChild(tracker);
		root.appendChild(localdir);
		root.appendChild(timeout);
	}
	public void update(){
		getproperty();
		if(fileName.contains(HadoopConst.CORE_SITE_FILE))
			dealcoresite();
		else if(fileName.contains(HadoopConst.HDFS_SITE_FILE))
			dealhdfssite();
		else if(fileName.contains(HadoopConst.MAPRED_SITE_FILE))
			dealmapredsite();
		close();
		logHandler.info("Done");
	}
	
	/*
	public static void main(String[] args){
		Map<String, String> property = new HashMap<String, String>();
		property.put(HadoopConst.MASTER_HOST_TAG, "dlnode123");
		property.put(Const.HADOOP_DST_TAG, "/aaa/bbb/ccc");
		String filepath = "hdfs-site.xml";
//				"mapred-site.xml";
//				"core-site.xml";
		try{
			Runtime.getRuntime().exec("cp " + filepath + ".backup " + filepath);		
			HadoopConfig hc = new HadoopConfig(filepath, property);
			hc.update();
		}catch(IOException e){
			e.printStackTrace();
		}
	}
	*/
	
}
