#!/usr/bin/env python
#-*-encoding:utf-8-*-
'''
Created on 2015年10月8日

@author: chenyongbing
'''
import sys,os
#current_dir = os.path.dirname(__file__)
current_dir = os.path.split(os.path.realpath(__file__))[0]

'''
    在docker 容器中 批量配置 hadoop
'''

import paramiko


ips = [
       'sz110:192.168.1.208:11022',
       'sz111:192.168.1.208:11122',
       'sz112:192.168.1.208:11222',
       'sz113:192.168.1.208:11322'
       ]

hadoop_namenode = ips[0]
hadoop_datanodes = ips[1:]

hive_node = ips[0]


def setting_masters_file(hadoop_namenode='localhost:127.0.0.1:22',configfile='/opt/hadoop/etc/hadoop/masters'):
    hadoop_namenode_host = hadoop_namenode.split(':')[0]
    cmd = 'echo "%s" > %s'%(hadoop_namenode_host,configfile)
    
def setting_slaves_file(hadoop_datanodes=[],configfile='/opt/hadoop/etc/hadoop/slaves'):
    hadoop_datanode_hosts = []
    for hadoop_datanode in hadoop_datanodes:
        hadoop_datanode_host = hadoop_datanode.split(':')
        hadoop_datanode_hosts.append(hadoop_datanode_host)
    cmd = 'echo "%s" > %s'%('\n'.join(hadoop_datanode_hosts),configfile)
        
        

        
def setting_core_site_xml(configfile='/opt/hadoop/etc/hadoop/core-site.xml'):
    pass


