#!/bin/bash
#author : xingej@163.com
###########作用#########
#
#等容器创建好后，
#    1、 创建共享数据卷,如master-hadoop
#    2、 将hadoop、spark等包移动到/usr/local/master-hadoop目录下
#    3、 修改hadoop的配置文件slaves，输入Datanode节点的名称
#######################
hostName=`hostname`
function mvHadoopSpark(){

	packageName="hadoop spark scala"

	for i in $packageName
	do
		mv /opt/$i /usr/local/
	done
}

function createSlaves(){
	rm /usr/local/hadoop/etc/hadoop/slaves
	touch /usr/local/hadoop/etc/hadoop/slaves
	
	if [[ -n $NODES ]]
	then
		slaveNodeNum=$NODES
	else	
		slaveNodeNum=3
	fi
	
	i=1
	while [[ $i -lt $slaveNodeNum ]]
	do
		echo "slave$i-hadoop" >> /usr/local/hadoop/etc/hadoop/slaves
		((i++))
	done

}

mvHadoopSpark && createSlaves

#复制共享脚本
if [[ x$hostName = x"master-hadoop" ]]
then
        cp -r /opt/comScript /usr/local
fi

#top -b





