#!/usr/bin/python2
# -*- coding: UTF-8 -*-

import sys
import os
import os.path
import types
from xml.dom import minidom

class htf_xml_config:
	"""Construct XML configurations for hadoop XML config files
	"""
	def __init__(self, target, properties):
		self.conf_file  = target
		self.properties = properties

		impl = minidom.getDOMImplementation()
		self.xmldoc = impl.createDocument(None, "configuration", None)

	def construct(self):
		confnode = self.xmldoc.documentElement
		
		pi=self.xmldoc.createProcessingInstruction("xml-stylesheed" ,"type=\"text/xsl\" href=\"configuration.xsl\"")
		self.xmldoc.insertBefore(pi,confnode)

		def insert_prop((k,v)):
			"insert a single property node"
			prop=self.xmldoc.createElement("property")
			labelkey=self.xmldoc.createElement("name")
			labelval=self.xmldoc.createElement("value")
			key=self.xmldoc.createTextNode(k)
			val=self.xmldoc.createTextNode(v)

			interrupt=self.xmldoc.createTextNode(u"\n")
			confnode.appendChild(interrupt)

			confnode.appendChild(prop)
			prop.appendChild(labelkey)
			prop.appendChild(labelval)
			labelkey.appendChild(key)
			labelval.appendChild(val)

			return prop

		self.propnodes=map(insert_prop,self.properties.items())

	def write(self):
		"write configuration to target"

		self.xmldoc.writexml(self.conf_file)

	def clear(self):
		if self.propnodes:
			map((lambda n: n.unlink),self.xmldoc.childNodes)

class htf_parse_config:
	"parse configurations from string or list"

	def __init__(self, properties):
		self.properties=properties
		self.property_dict = {"key1":"value1","key2":"value2"}
	
	def parse(self):
		prop_kv = [ elem.split("=") for elem in self.properties ]
		def select((k,v)):
			if k in self.property_dict:
				self.property_dict[k]=v

		map(select,filter((lambda e: type(e) is types.ListType and len(e) == 2),prop_kv))

	def get(self,property):
		if property in self.property_dict:
			return self.property_dict[property]
		else:
			return None

	def set(self,property,value):
		if property in self.property_dict:
		    self.property_dict[property]=value
	
	def write(self, file):
		conf = htf_xml_config(file,self.property_dict);
		conf.construct()
		conf.write()
		conf.clear()

class htf_parse_config_core(htf_parse_config):
	hdfs_default_port = "9000"

	def __init__(self,properties):
		htf_parse_config.__init__(self,properties)
		self.property_dict = {
			"hadoop.tmp.dir":"/tmp/hadoop-${user.name}",
			"hadoop.native.lib":"true",
			"hadoop.http.filter.initializers":"",
			"hadoop.security.authorization":"false",
			"hadoop.logfile.size":"10000000",
			"hadoop.logfile.count":"10",
			"io.file.buffer.size":"4096",
			"io.bytes.per.checksum":"512",
			"io.skip.checksum.errors":"false",
			"io.compression.codecs":"org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec",
			"io.serializations":"org.apache.hadoop.io.serializer.WritableSerialization",
			"fs.default.name":"file:///",
			"fs.trash.interval":"0",
			"fs.file.impl":"org.apache.hadoop.fs.LocalFileSystem",
			"fs.hdfs.impl":"org.apache.hadoop.hdfs.DistributedFileSystem",
			"fs.s3.impl":"org.apache.hadoop.fs.s3.S3FileSystem",
			"fs.s3n.impl":"org.apache.hadoop.fs.s3native.NativeS3FileSystem",
			"fs.kfs.impl":"org.apache.hadoop.fs.kfs.KosmosFileSystem",
			"fs.hftp.impl":"org.apache.hadoop.hdfs.HftpFileSystem",
			"fs.hsftp.impl":"org.apache.hadoop.hdfs.HsftpFileSystem",
			"fs.ftp.impl":"org.apache.hadoop.fs.ftp.FTPFileSystem",
			"fs.ramfs.impl":"org.apache.hadoop.fs.InMemoryFileSystem",
			"fs.har.impl":"org.apache.hadoop.fs.HarFileSystem",
			"fs.checkpoint.dir":"${hadoop.tmp.dir}/dfs/namesecondary",
			"fs.checkpoint.edits.dir":"${fs.checkpoint.dir}",
			"fs.checkpoint.period":"3600",
			"fs.checkpoint.size":"67108864",
			"fs.s3.block.size":"67108864",
			"fs.s3.buffer.dir":"${hadoop.tmp.dir}/s3",
			"fs.s3.maxRetries":"4",
			"fs.s3.sleepTimeSeconds":"10",
			"local.cache.size":"10737418240",
			"io.seqfile.compress.blocksize":"1000000",
			"io.seqfile.lazydecompress":"true",
			"io.seqfile.sorter.recordlimit":"1000000",
			"io.mapfile.bloom.size":"1048576",
			"io.mapfile.bloom.error.rate":"0.005",
			"hadoop.util.hash.type":"murmur",
			"ipc.client.idlethreshold":"4000",
			"ipc.client.kill.max":"10",
			"ipc.client.connection.maxidletime":"10000",
			"ipc.client.connect.max.retries":"10",
			"ipc.server.listen.queue.size":"128",
			"ipc.server.tcpnodelay":"false",
			"ipc.client.tcpnodelay":"false",
			"webinterface.private.actions":"false",
			"hadoop.rpc.socket.factory.class.default":"org.apache.hadoop.net.StandardSocketFactory",
			"hadoop.rpc.socket.factory.class.ClientProtocol":"",
			"hadoop.socks.server":"",
			"topology.node.switch.mapping.impl":"org.apache.hadoop.net.ScriptBasedMapping",
			"topology.script.file.name":"",
			"topology.script.number.args":"100",
		}
	
	def get_namenode_addr(self):
		"return the configured namenode IP address"
		from urlparse import urlparse
		fsname = self.property_dict["fs.default.name"]
		u = urlparse(fs.name)
		if u.scheme != "hdfs":
			return (None,None)
		parse = urlparse("http:%s" % (u.path))
		return (parse.hostmane,parse.port or hdfs_defalut_port)


class htf_parse_config_hdfs(htf_parse_config):
	def __init__(self,properties):
		htf_parse_config.__init__(self,properties)
		self.property_dict = {
			"dfs.namenode.logging.level":"info",
			"dfs.secondary.http.address":"0.0.0.0:50090",
			"dfs.datanode.address":"0.0.0.0:50010",
			"dfs.datanode.http.address":"0.0.0.0:50075",
			"dfs.datanode.ipc.address":"0.0.0.0:50020",
			"dfs.datanode.handler.count":"3",
			"dfs.http.address":"0.0.0.0:50070",
			"dfs.https.enable":"false",
			"dfs.https.need.client.auth":"false",
			"dfs.https.server.keystore.resource":"ssl-server.xml",
			"dfs.https.client.keystore.resource":"ssl-client.xml",
			"dfs.datanode.https.address":"0.0.0.0:50475",
			"dfs.https.address":"0.0.0.0:50470",
			"dfs.datanode.dns.interface":"default",
			"dfs.datanode.dns.nameserver":"default",
			"dfs.replication.considerLoad":"true",
			"dfs.default.chunk.view.size":"32768",
			"dfs.datanode.du.reserved":"0",
			"dfs.name.dir":"${hadoop.tmp.dir}/dfs/name",
			"dfs.name.edits.dir":"${dfs.name.dir}",
			"dfs.web.ugi":"webuser,webgroup",
			"dfs.permissions":"true",
			"dfs.permissions.supergroup":"supergroup",
			"dfs.data.dir":"${hadoop.tmp.dir}/dfs/data",
			"dfs.replication":"3",
			"dfs.replication.max":"512",
			"dfs.replication.min":"1",
			"dfs.block.size":"67108864",
			"dfs.df.interval":"60000",
			"dfs.client.block.write.retries":"3",
			"dfs.blockreport.intervalMsec":"3600000",
			"dfs.blockreport.initialDelay":"0",
			"dfs.heartbeat.interval":"3",
			"dfs.namenode.handler.count":"10",
			"dfs.safemode.threshold.pct":"0.999f",
			"dfs.safemode.extension":"30000",
			"dfs.balance.bandwidthPerSec":"1048576",
			"dfs.hosts":"",
			"dfs.hosts.exclude":"",
			"dfs.max.objects":"0",
			"dfs.namenode.decommission.interval":"30",
			"dfs.namenode.decommission.nodes.per.interval":"5",
			"dfs.replication.interval":"3",
			"dfs.access.time.precision":"3600000",
			"dfs.support.append":"false",
		}

class htf_parse_config_mapred(htf_parse_config):
	def __init__(self,properties):
		htf_parse_config.__init__(self,properties)
		self.property_dict = {
			"hadoop.job.history.location":"",
			"hadoop.job.history.user.location":"",
			"io.sort.factor":"10",
			"io.sort.mb":"100",
			"io.sort.record.percent":"0.05",
			"io.sort.spill.percent":"0.80",
			"io.map.index.skip":"0",
			"mapred.job.tracker":"local",
			"mapred.job.tracker.http.address":"0.0.0.0:50030",
			"mapred.job.tracker.handler.count":"10",
			"mapred.task.tracker.report.address":"127.0.0.1:0",
			"mapred.local.dir":"${hadoop.tmp.dir}/mapred/local",
			"mapred.system.dir":"${hadoop.tmp.dir}/mapred/system",
			"mapred.temp.dir":"${hadoop.tmp.dir}/mapred/temp",
			"mapred.local.dir.minspacestart":"0",
			"mapred.local.dir.minspacekill":"0",
			"mapred.tasktracker.expiry.interval":"600000",
			"mapred.tasktracker.instrumentation":"org.apache.hadoop.mapred.TaskTrackerMetricsInst",
			"mapred.tasktracker.memory_calculator_plugin":"",
			"mapred.tasktracker.taskmemorymanager.monitoring-interval":"5000",
			"mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill":"5000",
			"mapred.map.tasks":"2",
			"mapred.reduce.tasks":"1",
			"mapred.jobtracker.restart.recover":"false",
			"mapred.jobtracker.job.history.block.size":"3145728",
			"mapred.jobtracker.taskScheduler":"org.apache.hadoop.mapred.JobQueueTaskScheduler",
			"mapred.jobtracker.taskScheduler.maxRunningTasksPerJob":"",
			"mapred.map.max.attempts":"4",
			"mapred.reduce.max.attempts":"4",
			"mapred.reduce.parallel.copies":"5",
			"mapred.reduce.copy.backoff":"300",
			"mapred.task.timeout":"600000",
			"mapred.tasktracker.map.tasks.maximum":"2",
			"mapred.tasktracker.reduce.tasks.maximum":"2",
			"mapred.jobtracker.completeuserjobs.maximum":"100",
			"mapred.jobtracker.instrumentation":"org.apache.hadoop.mapred.JobTrackerMetricsInst",
			"mapred.child.java.opts":"-Xmx200m",
			"mapred.child.ulimit":"",
			"mapred.child.tmp":"./tmp",
			"mapred.inmem.merge.threshold":"1000",
			"mapred.job.shuffle.merge.percent":"0.66",
			"mapred.job.shuffle.input.buffer.percent":"0.70",
			"mapred.job.reduce.input.buffer.percent":"0.0",
			"mapred.map.tasks.speculative.execution":"true",
			"mapred.reduce.tasks.speculative.execution":"true",
			"mapred.job.reuse.jvm.num.tasks":"1",
			"mapred.min.split.size":"0",
			"mapred.jobtracker.maxtasks.per.job":"-1",
			"mapred.submit.replication":"10",
			"mapred.tasktracker.dns.interface":"default",
			"mapred.tasktracker.dns.nameserver":"default",
			"tasktracker.http.threads":"40",
			"mapred.task.tracker.http.address":"0.0.0.0:50060",
			"keep.failed.task.files":"false",
			"mapred.output.compress":"false",
			"mapred.output.compression.type":"RECORD",
			"mapred.output.compression.codec":"org.apache.hadoop.io.compress.DefaultCodec",
			"mapred.compress.map.output":"false",
			"mapred.map.output.compression.codec":"org.apache.hadoop.io.compress.DefaultCodec",
			"map.sort.class":"org.apache.hadoop.util.QuickSort",
			"mapred.userlog.limit.kb":"0",
			"mapred.userlog.retain.hours":"24",
			"mapred.hosts":"",
			"mapred.hosts.exclude":"",
			"mapred.max.tracker.blacklists":"4",
			"mapred.max.tracker.failures":"4",
			"jobclient.output.filter":"FAILED",
			"mapred.job.tracker.persist.jobstatus.active":"false",
			"mapred.job.tracker.persist.jobstatus.hours":"0",
			"mapred.job.tracker.persist.jobstatus.dir":"/jobtracker/jobsInfo",
			"mapred.task.profile":"false",
			"mapred.task.profile.maps":"0-2",
			"mapred.task.profile.reduces":"0-2",
			"mapred.line.input.format.linespermap":"1",
			"mapred.skip.attempts.to.start.skipping":"2",
			"mapred.skip.map.auto.incr.proc.count":"true",
			"mapred.skip.reduce.auto.incr.proc.count":"true",
			"mapred.skip.out.dir":"",
			"mapred.skip.map.max.skip.records":"0",
			"mapred.skip.reduce.max.skip.groups":"0",
			"job.end.retry.attempts":"0",
			"job.end.retry.interval":"30000",
			"hadoop.rpc.socket.factory.class.JobSubmissionProtocol":"",
			"mapred.task.cache.levels":"2",
			"mapred.queue.names":"default",
			"mapred.acls.enabled":"false",
			"mapred.queue.default.acl-submit-job":"*",
			"mapred.queue.default.acl-administer-jobs":"*",
			"mapred.job.queue.name":"default",
			"mapred.tasktracker.indexcache.mb":"10",
			"mapred.merge.recordsBeforeProgress":"10000",
			"mapred.reduce.slowstart.completed.maps":"0.05",
		}

class htf_hadoop_config:
	""

	def __init(self, source_pack, build_path, hadoop_dir_name, tgt_pack):
		""
		self.source_pack = source_pack
		self.target_pack = target_pack
		self.build_path  = build_path
		self.hadoop_dir_name = hadoop_dir_name

	def unpack(self):
		""
		if not os.path.isdir(build_path):
			os.mkdirs(build_path)

		cmd = "tar -C %s -zxvf %s" % (self.build_path,self.source_pack)
		os.system(cmd)
	
	def repack(self):
		""
		(dst_path,dst_pack_name)=os.path.split(self.target_pack)
		if not os.path.isdir(dst_path):
			os.mkdirs(dst_path)

		hadoop_full_path = os.path.join(self.build_path, self.hadoop_dir_name)
		if os.path.isdir(hadoop_full_path):
			cmd = "tar -C %s -czvf %s %s" % (self.build_path, self.target_pack, self.hadoop_dir_name)
			os.system(cmd)
		else:
			sys.stderr.write("no hadoop dir avaliable for pack")
			sys.exit(1)

class htf_hadoop_config_0_20(htf_hadoop_config):
	"""Config hadoop version 0.20
	for a hadoop 0.20 user, he/she should mention the path
	"""
	def __init__(self, source_pack, build_path, hadoop_dir_name, tgt_pack, 
			namenode_host=None, namenode_port=None, port_prefix=None,
			snn_host=None, nnc_host=None
			tmpdir=None, namedir=None, datadir=None, mapreddir=None,
			config=None):
		"""
		Note: namenode_port is trivial if namenodeHost is None!
		"""
		htf_hadoop_config.__init__(self, source_pack, build_path, 
				hadoop_dir_name, tgt_pack)

		if config is not None:
			config_items = config.split("#")
			self.core_parser = htf_parse_config_core(config_items)
			self.hdfs_parser = htf_parse_config_hdfs(config_items)
			self.mapred_parser = htf_parse_config_mapred(config_items)

		(namenode_host_default,namenode_port_default) = \
				self.core_parser.get_namenode_addr()
		if namenode_host is not None:
			self.core_parser.set("fs.default.name", "hdfs://%s:%s/" % ( 
					namenode_host, 
					namenode_port or namenode_port_default or self.core_parser.hdfs_default_port))
		if port_prefix is not None:
			self.hdfs_parser.set("dfs.secondary.http.address","0.0.0.0:%s90" % (port_prefix))
			self.hdfs_parser.set("dfs.datanode.address","0.0.0.0:%s10" % (port_prefix))
			self.hdfs_parser.set("dfs.datanode.http.address","0.0.0.0:%s75" % (port_prefix))
			self.hdfs_parser.set("dfs.datanode.ipc.address","0.0.0.0:%s20" % (port_prefix))
			self.hdfs_parser.set("dfs.http.address","0.0.0.0:%s70" % (port_prefix))
			self.mapred_parser.set("mapred.job.tracker.http.address","0.0.0.0:%s30" % (port_prefix))

		self.snn_host = snn_host or namenode_host or namenode_host_default or "localhost"
		self.nnc_host = nnc_host and nnc_host.split(",")
			
		if tmpdir is not None:
			self.core_parser.set("hadoop.tmp.dir", tmpdir)
		if namedir is not None:
			self.hdfs_parser.set("dfs.name.dir",namedir)
		if datadir is not None:
			self.hdfs_parser.set("dfs.data.dir",datadir)
		if mapreddir is not None:
			self.mapred_parser.set("mapred.local.dir",mapreddir)
	
	def write_config(self):
		""
		self.unpack()
		hadoop_path = os.path.join(self.build_path, self.hadoop_dir_name)
		hadoop_config_dir = os.path.join(hadoop_path, "config")
		self.core_parser.write(os.path.join(hadoop_config_dir,"core-site.xml"))
		self.hdfs_parser.write(os.path.join(hadoop_config_dir,"hdfs-site.xml"))
		self.mapred_parser.write(os.path.join(hadoop_config_dir,"mapred-site.xml"))
		self.repack()
	
if __name__ == "__main__":
	properties = ["hadoop.native.lib","hadoop.tmp.dir=/tmp","key3=kk",""]
	parser = htf_parse_config_core(properties)
	parser.parse()
	#parser.write(sys.stdout)
	parser.set("hadoop.tmp.dir","/tmp/x")
	print parser.get("hadoop.tmp.dir")

