#! /bin/bash
# 实现自动化安装 Hadoop 的一个脚本
((0)) && {
	1.获取用户名和Hadoop文件名
	2.更新apt-get，安装ssh
	3.解压Hadoop文件至指定文件夹 ~/hadoop
	4.进行伪分布配置
}

# 脚本编写声明
echo "This automation shell was developed to simplify the Hadoop installation process"
echo "CopyRight@BIT 05 by WDC"
echo "Last compiled May 7, 2020"
echo ""

get_char()
{
    SAVEDSTTY=`stty -g`
    stty -echo
    stty cbreak
    dd if=/dev/tty bs=1 count=1 2> /dev/null
    stty -raw
    stty echo
    stty $SAVEDSTTY
}

# 注意事项
echo "在执行 Hadoop-install 脚本前需要注意："
echo "1、本脚本仅适用于 Ubuntu 系统，目前尚未对其他系统做适配"
echo "2、当前用户必须有足够的权限，例如管理员权限"
echo "3、系统已经安装了JDK，并配置了环境变量"
echo "4、Hadoop安装文件与脚本必须都在home目录下"
echo "5、保证系统处于联网状态"
echo ""
echo "请按任意键继续!"
echo "或者按 Ctrl+C 退出"
char=`get_char`
echo "操作完成"
echo ""

# 检测用户
echo "正在检测当前用户"
userName=$(env | grep USER | cut -d "=" -f 2)
if [ "$userName" != "root" ]
then
	echo "当前用户是${userName}，非root用户，请一定要保证拥有执行权限"
else
	echo "当前用户为root用户，权限正常"
fi
echo "请按任意键继续!"
char2=`get_char`
echo "操作完成"
echo ""


# 更新apt-get
echo "更新apt-get"
sudo apt-get update

echo ""
echo "开始配置SSH"
((0)) && {
	SSH首次登陆提示，输入 yes
	根据提示输入用户密码，登录本机
	配置密钥时提示全部按回车即可
	ssh登录后记得按exit退出
}
echo "脚本配置SSH时会有两次登录SSH，第一次是配置密码登录，第二次是免密登录"
echo "看到 welcome to ubuntu的字样即登录成功"
echo "ssh登录后记得按 exit 退出，两次登录就需要两次退出"
echo ""
echo "请按任意键继续!"
char=`get_char`
echo "操作完成"
echo ""

sudo apt-get install openssh-server
ssh localhost
cd ~/.ssh/
ssh-keygen -t rsa
cat ./id_rsa.pub >> ./authorized_keys
# 再次测试
ssh localhost

echo ""
echo "接下来开始安装Hadoop"
# 获取当前目录
cd ~
BASE_PATH=$(cd `dirname $0`; pwd)
echo $BASE_PATH

# 输入Hadoop文件名
echo ""
echo "请输入Hadoop安装文件名，例如 hadoop-2.7.7.tar.gz"
read HadoopName

# 异常处理，输入文件名为空
if [ "$HadoopName" == "" ]
then
    echo "输入文件名为空"
    echo "脚本停止执行"
    exit 0
fi

# 异常处理，名字可能多一个字符
if [ "${HadoopName:0:1}" != "h" ]
then
    HadoopName="${HBaseName:1}"
fi
echo $HadoopName

echo ""
if [ ! -e "${BASE_PATH}/${HadoopName}" ]
then
	echo "文件不存在或无法读取"
	echo "脚本停止执行"
	exit 0
fi

# 解压安装
sudo tar -zxvf "${BASE_PATH}/${HadoopName}"
sudo mv ./"${HadoopName:0:12}" ./hadoop
sudo chown -R $userName:$userName ./hadoop
HADOOP_PATH="${BASE_PATH}/hadoop"
echo $HADOOP_PATH

# Hadoop配置JDK
echo "Hadoop配置JDK"
echo $JAVA_HOME
# 在第26行输入JAVA_HOME
sed -i "26iexport JAVA_HOME="$JAVA_HOME $HADOOP_PATH/etc/hadoop/hadoop-env.sh
sed -i "25d" $HADOOP_PATH/etc/hadoop/hadoop-env.sh
source $HADOOP_PATH/etc/hadoop/hadoop-env.sh
echo ""

# 环境变量配置
echo "环境变量配置"
sudo sed -i '$aexport HADOOP_HOME='${HADOOP_PATH} /etc/profile
sudo sed -i '$aexport PATH=$PATH:$HADOOP_HOME/sbin:$HADOOP_HOME/bin' /etc/profile
source /etc/profile
# 测试hadoop
hdaoop version

# 伪分布式配置
echo ""
echo "开始伪分布式配置"
# 配置core-site.xml
sed -i '18,$d' $HADOOP_PATH/etc/hadoop/core-site.xml
echo "<configuration>" >> $HADOOP_PATH/etc/hadoop/core-site.xml
echo "  <property>" >> $HADOOP_PATH/etc/hadoop/core-site.xml
echo "      <name>hadoop.tmp.dir</name>" >> $HADOOP_PATH/etc/hadoop/core-site.xml
echo "      <value>${HADOOP_PATH}/tmp</value>" >> $HADOOP_PATH/etc/hadoop/core-site.xml
echo "  </property>" >> $HADOOP_PATH/etc/hadoop/core-site.xml
echo "  <property>" >> $HADOOP_PATH/etc/hadoop/core-site.xml
echo "      <name>fs.defaultFS</name>" >> $HADOOP_PATH/etc/hadoop/core-site.xml
echo "      <value>hdfs://localhost:9000</value>" >> $HADOOP_PATH/etc/hadoop/core-site.xml
echo "  </property>" >> $HADOOP_PATH/etc/hadoop/core-site.xml
echo "</configuration>" >> $HADOOP_PATH/etc/hadoop/core-site.xml
echo "core-site.xml 配置完成!"

# 配置hdfs-site.xml
sed -i '18,$d' $HADOOP_PATH/etc/hadoop/hdfs-site.xml
echo "<configuration>" >> $HADOOP_PATH/etc/hadoop/hdfs-site.xml
echo "  <property>" >> $HADOOP_PATH/etc/hadoop/hdfs-site.xml
echo "      <name>dfs.replication</name>" >> $HADOOP_PATH/etc/hadoop/hdfs-site.xml
echo "      <value>1</value>" >> $HADOOP_PATH/etc/hadoop/hdfs-site.xml
echo "  </property>" >> $HADOOP_PATH/etc/hadoop/hdfs-site.xml
echo "  <property>" >> $HADOOP_PATH/etc/hadoop/hdfs-site.xml
echo "      <name>dfs.namenode.name.dir</name>" >> $HADOOP_PATH/etc/hadoop/hdfs-site.xml
echo "      <value>${HADOOP_PATH}/tmp/dfs/name</value>" >> $HADOOP_PATH/etc/hadoop/hdfs-site.xml
echo "  </property>" >> $HADOOP_PATH/etc/hadoop/hdfs-site.xml
echo "  <property>" >> $HADOOP_PATH/etc/hadoop/hdfs-site.xml
echo "      <name>dfs.datanode.data.dir</name>" >> $HADOOP_PATH/etc/hadoop/hdfs-site.xml
echo "      <value>${HADOOP_PATH}/tmp/dfs/data</value>" >> $HADOOP_PATH/etc/hadoop/hdfs-site.xml
echo "  </property>" >> $HADOOP_PATH/etc/hadoop/hdfs-site.xml
echo "</configuration>" >> $HADOOP_PATH/etc/hadoop/hdfs-site.xml
echo "hdfs-site.xml 配置完成!"


# Hadoop安装完成
echo "Hadoop自动安装完成，伪分布式配置完成~"
echo "自己手动初始化HDFS后即可使用Hadoop"
echo "脚本执行完成，see you~"
echo ""
