#!/usr/bin/env python
import sys
import os
import subprocess
import xml.etree.cElementTree as ET
import shlex
from optparse import OptionParser

# set the environment variables
def setEnv():
    home = os.getenv("HOME")
    hadoop_home = "/opt/hadoop"
    config_subdir = "/hadoop/etc/hadoop"
    log_subdir = "/hadoop/log"
    # export vars
    os.environ["HADOOP_HOME"] = hadoop_home
    os.environ["HADOOP_MAPRED_HOME"] = hadoop_home
    os.environ["HADOOP_COMMON_HOME"] = hadoop_home
    os.environ["HADOOP_HDFS_HOME"] = hadoop_home
    os.environ["YARN_HOME"] = hadoop_home
    os.environ["HADOOP_CONF_DIR"] = home + config_subdir
    os.environ["YARN_CONF_DIR"] = home + config_subdir
    os.environ["HADOOP_LOG_DIR"] = home + log_subdir
    os.environ["YARN_LOG_DIR"] = home + log_subdir
    os.environ["JAVA_HOME"] = "/usr/java/jdk1.6.0_11/"

# execute the command for every host in the list
def ssh_batch_command(hosts,command):
    for h in hosts:
        ssh_command(h,command)

# send a command to the host via ssh
def ssh_command(host,command):
    proc = subprocess.Popen(["ssh","-oStrictHostKeyChecking=no",host,command],
                           shell = False,
                           stdout = subprocess.PIPE,
                           stderr = subprocess.PIPE,
                           bufsize = 1)

# run a command in a subprocess
def command(commandline,environ = os.environ.copy()):
    args = shlex.split(commandline)
    proc = subprocess.Popen(args,
                     shell = False,
                     stdout = subprocess.PIPE,
                     stderr = subprocess.PIPE,
                     bufsize = 1)
    for line in iter(proc.stdout.readline, ''):
        sys.stdout.write(line) 
    for line in iter(proc.stderr.readline, ''):
        sys.stderr.write(line) 
    proc.communicate() 

# init and init hadoop 
def init(hosts):
    # make directories
    hadoop_home = os.getenv("HADOOP_HOME")
    hadoop_config_dir = os.getenv("HADOOP_CONF_DIR")
    
    # set up yarn-site.xml
    yarnProperties = {'yarn.resourcemanager.resource-tracker.address':'localhost:8025',\
         'yarn.resourcemanager.scheduler.address':'localhost:8030',\
         'yarn.resourcemanager.address':'localhost:8040'}
    # core-site.xml
    coreProperties = {'fs.default.name':'hdfs://localhost:9000'}
    # update the properties
    updateProperties(yarnProperties, hosts[0])
    updateProperties(coreProperties, hosts[0])
    
    # update the XML files
    updateXML(hadoop_config_dir + "/core-site.xml", coreProperties)
    updateXML(hadoop_config_dir + "/yarn-site.xml", yarnProperties)
    
    # make temp dirs
    ssh_batch_command(hosts, " ".join([mkdir, tmp_dir]))
    try:
        # clean up slaves
        open(hadoop_config_dir+"/slaves","w").close()  
        # update slaves
        f = open(hadoop_config_dir+"/slaves","w")
        for h in hosts[1:]:
            f.write(h+"\n")
    except:
        print "errors when modifying slaves"
    finally:
        f.close()
    
    
    # for multi-nodes
    # format the namenode and launch the service
    command(hadoop_home+"/bin/hadoop namenode -format")
    command(hadoop_home+"/sbin/hadoop-daemon.sh start namenode")
    command(hadoop_home+"/sbin/hadoop-daemons.sh start datanode")
    command(hadoop_home+"/sbin/yarn-daemon.sh start resourcemanager")
    command(hadoop_home+"/sbin/yarn-daemons.sh start nodemanager")
    
# uninit and stop hadoop
def stop():
    hadoop_home = os.getenv("HADOOP_HOME")
    hadoop_config_dir = os.getenv("HADOOP_CONF_DIR")
    username = os.getenv("USER")
    
    # multi-nodes
    command(hadoop_home+"/sbin/yarn-daemons.sh stop nodemanager")
    command(hadoop_home+"/sbin/yarn-daemon.sh stop resourcemanager")
    command(hadoop_home+"/sbin/hadoop-daemons.sh stop datanode")
    command(hadoop_home+"/sbin/hadoop-daemon.sh stop namenode")
    slaves = []
    try:
        f = open(hadoop_config_dir+"/slaves","r")
        for line in f:
            slaves.append(line.rstrip()) # rstrip(): delete '\n'
    finally:
        f.close()
    
    # deleting files
    rm_tmp_directory = " ".join([rm, tmp_dir])
    rm_tmp_files = "rm /tmp/*-"+ username + "-*.pid" 
    
    # remove temp files
    command(rm_tmp_directory)
    ssh_batch_command(slaves, rm_tmp_directory)   
    command(rm_tmp_files)
    ssh_batch_command(slaves, rm_tmp_files)  

# update the XML
def updateXML(xmlFileName, properties):
    tree = ET.ElementTree(file=xmlFileName)
    for elem in tree.findall("./property"):
        k = elem.find("name").text
        if k in properties:
            elem.find("value").text = properties[k]
    tree.write(xmlFileName)

# update properties by replace the hostname
def updateProperties(p,host):
    for i in p.iteritems():
        newValue = i[1].replace("localhost",host)
        p[i[0]] = newValue

# get the list of nodes
def getNodes():
    nodefile = os.getenv("OAR_NODE_FILE")
    hosts = []
    try:
        f = open(nodefile,"r")
        for line in f:
            line = line.rstrip() # remove '\n'
            if line in hosts:
                continue
            else:
                hosts.append(line)
    finally:
        f.close()
    return hosts

if __name__ == '__main__':
    tmp_dir = "/tmp/hadoop"
    mkdir = "mkdir -p"
    rm = "rm -r"
    hosts = getNodes()
    
    # options
    usage = "usage: %prog [options] arg1 arg2"
    parser = OptionParser(usage = usage)
    parser.add_option("-i", "--init",
                      action = 'store_true',
                      dest = 'init',
                      help = "init the environment")
    parser.add_option("-j", "--jar",
                      action = "store",
                      dest = "filename",
                      help = "run a jar")
    parser.add_option("-c", "--clean",
                      action = "store_true",
                      dest = "clean",
                      help = "clean up")
    parser.add_option("-r", "--run",
                      action = "store",
                      dest = "run",
                      help = "run a command")
    (options,args) = parser.parse_args()
    
    setEnv()
    if options.init:
        print "initializing"
        init(hosts)
        print "started"
    if options.filename:
        c = ["/opt/hadoop/bin/hadoop jar",options.filename]
        c.extend(args)
        command(" ".join(c),os.environ.copy())
    if options.run:
        c = ["/opt/hadoop/bin/hadoop",options.run]
        c.extend(args)
        command(" ".join(c),os.environ.copy())
    if options.clean:
        print "stopping"
        stop()
        print "stopped"
