#!/usr/bin/env python
# -*- coding: utf-8 -*-

import os
import sys
import time
from optparse import OptionParser
from xml.dom.minidom import parse

from htlib import *
from hparser import *
from cluster import ClusterConfig

class NameNodeHandler:
    def __init__(self, home_dir, cluster_name, format=False, record_tm=False):
        self.cluster = ClusterConfig(options.home_dir, options.cluster_name)

        self.hdp_home    = self.cluster.conf['cluster_home_dir']
        self.sdir        = self.cluster.conf['cluster_share_dir']
        self.is_format   = format           # format namenode or not
        self.record_time = record_tm        # reboot timestamp

    def _get_dir(self, name):
        site_file = '%s/conf/hdfs-site.xml' % self.hdp_home
        parser = HadoopSiteParser(site_file)
        dirs = parser.get(name)
        return dirs.strip().split(',')

    def _get_datadir(self):
        return self._get_dir('dfs.data.dir')

    def _get_namedir(self):
        return self._get_dir('dfs.name.dir')

    def _clean_data(self):
        log_out("[NameNode] clean hadoop data begin...\n")

        # Get datanode list from 'slaves'
        datanode_list = ','.join(self.cluster.get_slaves())
        
        # Clean Datanode data
        datadir_list = self._get_datadir()
        for dir in datadir_list:
            clean_data = 'pdsh -f 256 -w %s rm -fr %s' % (datanode_list, dir)
            log_out('%s\n' % clean_data)
            os.system(clean_data)
        
        # Clean Namenode data
        namedir_list = self._get_namedir()
        for dir in namedir_list:
            clean_name = 'rm -fr %s' % dir
            log_out('%s\n' % clean_name)
            os.system(clean_name)
        log_out("[NameNode] clean hadoop data finished...\n")

    def _format_namenode(self):
        log_out('[NameNode] format hadoop namenode \n')

        command = '%s/bin/hadoop namenode -format' % self.hdp_home
        log_out('%s\n' % command)
        os.system(command)

        log_out('[NameNode] format hadoop namenode finished...\n')

    def reboot(self, start_all, sleeptime, restart=True):
        if start_all:
            stop  = '%s/bin/stop-all.sh' % self.hdp_home
            start = '%s/bin/start-all.sh' % self.hdp_home
        else:
            stop  = '%s/bin/stop-dfs.sh' % self.hdp_home
            start = '%s/bin/start-dfs.sh' % self.hdp_home

        log_out('[NameNode] stop hadoop now ...\n')
        os.system(stop)

        self.__killall_java()

        if self.is_format:
            time.sleep(10)
            self._clean_data()
            self._format_namenode()

        if restart:
            # time.sleep(sleeptime)
            os.system(start)
            time.sleep(120)
            log_out('[NameNode] start hadoop now ...\n')

    def __killall_java(self):
        kill_slave = '%s/hadoop-test/script/pdsh.py -c %s "pkill -9 java"' % (self.cluster.home_dir, self.cluster.cluster_name)
        kill_master = 'pkill -9 java'

        os.system(kill_slave)
        os.system(kill_master)

    def _is_started(self):
        tm = time.time()
        command = '%(h)s/bin/hadoop fs -put %(h)s/bin/hadoop boot_flag_%(t)s' % {
                'h': self.hdp_home,
                't': tm
                }
        ret = os.system(command)
        if ret == 0:
            return True
        else:
            return False

    def run(self):

        self.reboot(options.all, options.sleeptime, options.restart)
        if not options.restart: return

        while not self._is_started():
            time.sleep(1)
        
        if self.record_time:
            tm = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime())
            record_msg  = '[%s] restart hadoop success, format namenode is %s\n' % (tm, str(self.is_format))
            record_file = '%s/restart_hdp.log' % self.sdir
            open(record_file, 'w+').write(record_msg)
            
if __name__ == '__main__':
    parser = OptionParser()
    parser.add_option("-H", "--home",       dest='home_dir',      type="string", default="/home/testhdp")
    parser.add_option("-c", "--cluster",    dest='cluster_name',  type="string", default="cluster1")
    parser.add_option("-s", "--sleep",      dest='sleeptime',     type='int',    default=120)
    parser.add_option("-t", "--record",     dest='record',        action='store_true', default=False)
    parser.add_option("-a", "--all",        dest='all',           action='store_true', default=False)
    parser.add_option("-f", "--format",     dest='format',        action='store_true', default=False)
    parser.add_option("-r", "--restart",    dest='restart',       action='store_true', default=False)
    (options, args) = parser.parse_args()
    print options

    handler = NameNodeHandler(options.home_dir, options.cluster_name, options.format, options.record)
    handler.run()
