#!/bin/bash
set -e
## 检查输入参数
if [ $# -lt 3 ]; then
    echo "$0 错误：请传入参数，(机器名：name00/node01)！"
	echo "例如：$0 name00"
	exit 2
fi
echo '查看当前集群信息'
if [ -f 'hadoop.info'  ];then
	chmod a+x hadoop.info
	cat hadoop.info
else
	touch hadoop.info
fi
echo ""
echo "测试机器名：$1"

read -p "是否继续(Y/N)？：" isY
if [ "${isY}" != "y" ] && [ "${isY}" != "Y" ];then
   exit 1
fi

# 生成文件
rm -f connectTest.sh
touch connectTest.sh

echo "循环写入文件"
# awk -F ":" 自定义分割符号
while read line
do 
    eval $(echo $line | awk '{print "i1="$1";i2="$2";i3="$3}')
	# 生成临时脚本
	echo "ssh root@$i3 'chown -R hadoop.hadoop /home/hadoop/.ssh/authorized_keys;chmod 700 /home/hadoop/.ssh;chmod 700 /home/hadoop/.ssh/*'" >> tmp.sh
	echo "ssh root@$i3 'chown -R hadoop.hadoop ${HADOOP_HOME}/etc/hadoop/slaves'" >> tmp.sh
	echo "ssh root@$i3 'mkdir -p ${HADOOP_PID_DIR};sed -i 's@\${HADOOP_PID_DIR}@${HADOOP_PID_DIR}@g' $hadoopCfgDir/hadoop-env.sh" >> tmp.sh
done < hadoop.info

echo "分发文件到各个节点"
while read line
do 
    eval $(echo $line | awk '{print "i1="$1";i2="$2";i3="$3}')
    echo "$i2"
    scp authorized_keys root@${i3}:/home/hadoop/.ssh/
    scp hosts root@${i3}:/etc/
    scp slaves root@${i3}:${HADOOP_HOME}/etc/hadoop/
	#scp tmp.sh root@${i3}:~/
done < hadoop.info

echo "执行临时脚本"
chmod a+x tmp.sh;./tmp.sh
