#!/bin/bash
set -e
## 检查输入参数
if [ $# -lt 1 ]; then
    echo "$0 错误：请传入参数，(1、服务ID; 2、服务类型:master/slave)！"
	echo "例如：./$0 1 master 或 ./$0 1 master"
	exit 2
fi
#######################	1.环境配置 	#######################
## 1.1 关闭防火墙和selinux
service iptables stop
chkconfig iptables off

setenforce 0
sed -i "s@^SELINUX=enforcing@SELINUX=disabled@g" /etc/sysconfig/selinux

## 1.2 修改机器名和hosts域名解析
cat > /etc/hosts <<END
127.0.0.1   localhost localhost.localdomain localhost4 localhost4.localdomain4
::1         localhost localhost.localdomain localhost6 localhost6.localdomain6
172.16.117.0    master
172.16.117.1    node01
172.16.117.2    node02
END

hostname node02
sed -i "s@HOSTNAME=localhost.report@HOSTNAME=node02@g" /etc/sysconfig/network

## 1.3 创建Hadoop用户
groupadd hadoop
useradd hadoop -g hadoop
echo '123456' | passwd --stdin hadoop

## 1.4 生成密钥
su - hadoop 
mkdir -p /home/hadoop/.ssh
cd /home/hadoop/.ssh
ssh-keygen -t dsa -P '' -f id_dsa
cat id_dsa.pub > authorized_keys
chmod 600 /home/hadoop/.ssh/authorized_keys
chmod 700 -R /home/hadoop/.ssh

## 在主机循环各个节点服务器获取密钥，糅合成一个密钥文件再分发给各个节点
scp hadoop@node02:/home/hadoop/.ssh/id_dsa.pub ./id_dsa.pub.node02
cat id_dsa.pub.node02>> authorized_keys
scp authorized_keys hadoop@node02:/home/hadoop/.ssh/

#######################	2.安装hadoop 	#######################
## 读取配置文件
. ../config/my-config.sh
## 重新设置下载链接
HADOOP_URL=$URL_FILE/hadoop
mkdir -p "$HADOOP_HOME"

## 如果文件不存在，则下载
if [ ! -f "$HADOOP_FILE" ]
then
    wget $HADOOP_URL/$HADOOP_FILE
    ## tar -zxvf 解压并列出详情; -C 指定目录
	## tar -zxvf $HADOOP_FILE -C /home/soft
fi
rm -rf /home/soft/$HADOOP_VERSION
tar -zxvf $HADOOP_FILE -C /home/soft
##写入环境变量 
HADOOP_PROFILE=$SYS_PROFILE_DIR/hadoop.sh

## 块输入，转义字符"\"
cat > $HADOOP_PROFILE <<END
HADOOP_HOME=$HADOOP_HOME
PATH=/usr/lib64/qt-3.3/bin:/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
PATH=\$PATH:\$HADOOP_HOME/bin
export HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_HOME}/lib/native  
export HADOOP_OPTS="-Djava.library.path=${HADOOP_HOME}/lib" 
export HADOOP_HOME PATH
END

##修改文件权限
chmod 755 $HADOOP_PROFILE
##刷新环境变量
source /etc/profile
## 创建文件夹
mkdir -p /home/soft/data
mkdir -p /home/soft/name
mkdir -p /home/soft/temple
mkdir -p /home/soft/temp

## 建立Hadoop 用户并赋权
if !(egrep "^hadoop" /etc/passwd >& /dev/null)
then 
	groupadd hadoop
	useradd hadoop -g hadoop
	echo '123456' | passwd --stdin hadoop
fi
chown -R hadoop:hadoop /home/soft

## 无秘钥ssh
cat <<end
su hadoop
cd /home/hadoop/
ssh-keygen -q -t rsa -N "" -f /home/hadoop/.ssh/id_rsa
cd .ssh
cat id_rsa.pub > authorized_keys
chmod go-wx authorized_keys

cd /home/soft/hadoop-2.2.0/etc/hadoop/
vi hadoop-env.sh
vi yarn-env.sh
core-site.xml
hdfs-site.xml
mapred-site.xml
yarn-site.xml
capacity-scheduler.xml

hadoop namenode -format
cd /home/soft/hadoop-2.2.0/sbin
./start-dfs.sh
end