#!/usr/bin/env bash

master=${MASTER}
slaves=($(awk '{print $0}' ${FILENAME}))

#下载和解压安装包
download_soft() {
echo "master:下载Spark安装包..."
wget ${SOFT_URL}/spark-2.3.0-bin-hadoop2.7.tgz
rm -rf /usr/local/spark-2.3.0-bin-hadoop2.7
tar -zxvf spark-2.3.0-bin-hadoop2.7.tgz -C /usr/local/
rm -rf spark-2.3.0-bin-hadoop2.7.tgz
rm -rf spark
mv /usr/local/spark-2.3.0-bin-hadoop2.7 /usr/local/spark
}
#设置hadoop环境变量
set_env() {
echo "master:设置HBase环境变量..."
echo "export SPARK_HOME=/usr/local/spark" >> /etc/profile
echo "export PATH=\$PATH:\$SPARK_HOME/bin:\$SPARK_HOME/sbin" >> /etc/profile
}
#修改spark-env.sh文件
set_spark_env() {
echo "master:修改spark-env.sh文件"
cp /usr/local/spark/conf/spark-env.sh.template /usr/local/spark/conf/spark-env.sh
cat >> /usr/local/spark/conf/spark-env.sh << EOF
export JAVA_HOME=/usr/local/java
export HADOOP_CONF_DIR=/usr/local/hadoop/etc/hadoop
export HADOOP_HDFS_HOME=/usr/local/hadoop
export SPARK_HOME=/usr/local/spark
export SPARK_MASTER_IP=master
export SPARK_MASTER_PORT=7077
export SPARK_MASTER_HOST=master
export SPARK_WORKER_CORES=2
export SPARK_WORKER_PORT=8901
export SPARK_WORKER_INSTANCES=1
export SPARK_WORKER_MEMORY=2g
export SPARK_DIST_CLASSPATH=\$(/usr/local/hadoop/bin/hadoop classpath)
export SPARK_MASTER_WEBUI_PORT=8079
EOF
}
#修改slaves文件
set_slaves(){
echo "master:修改slaves文件"
for i in "${!slaves[@]}"
do
    echo "slave""${i}" >> /usr/local/spark/conf/slaves
done
}
download_soft
set_env
set_spark_env
set_slaves


