#!/usr/bin/env bash

master=${MASTER}
slaves=($(awk '{print $0}' ${FILENAME}))

#下载和解压安装包
download_soft() {
echo "下载hadoop安装包..."
wget ${SOFT_URL}/hadoop-2.7.6.tar.gz
rm -rf /usr/local/hadoop-2.7.6
tar -zxvf hadoop-2.7.6.tar.gz -C /usr/local/
rm -rf hadoop-2.7.6.tar.gz
rm -rf /usr/local/hadoop
mv /usr/local/hadoop-2.7.6 /usr/local/hadoop
}
#设置hadoop环境变量
set_env() {
echo "设置hadoop环境变量..."
echo "export HADOOP_HOME=/usr/local/hadoop" >> /etc/profile
echo "export PATH=\$PATH:/usr/local/hadoop/bin:/usr/local/hadoop/sbin" >> /etc/profile
}
#修改hosts文件
set_hosts() {
echo "修改hosts文件..."
echo ${master}" master" >> /etc/hosts
for i in "${!slaves[@]}"
do
    echo ${slaves[$i]}" slave""${i}" >> /etc/hosts
done
}
#修改core-site.xml文件
set_core_site() {
echo "修改core-site.xml文件"
sed -i 's?<configuration>?<!--<configuration>-->?g' /usr/local/hadoop/etc/hadoop/core-site.xml
sed -i 's?</configuration>?<!--</configuration>-->?g' /usr/local/hadoop/etc/hadoop/core-site.xml
cat >> /usr/local/hadoop/etc/hadoop/core-site.xml << EOF
<configuration>
<property>
    <name>hadoop.tmp.dir</name>
    <value>file:/usr/local/hadoop/tmp</value>
    <description>Abase for other temporary directories.</description>
</property>
<property>
    <name>fs.defaultFS</name>
    <value>hdfs://master:9000</value>
</property>
</configuration>
EOF
}
#修改hdfs-site.xml文件
set_hdfs_site() {
echo "修改hdfs-site.xml文件"
sed -i 's?<configuration>?<!--<configuration>-->?g' /usr/local/hadoop/etc/hadoop/hdfs-site.xml
sed -i 's?</configuration>?<!--</configuration>-->?g' /usr/local/hadoop/etc/hadoop/hdfs-site.xml
cat >> /usr/local/hadoop/etc/hadoop/hdfs-site.xml << EOF
<configuration>
    <property>
            <name>dfs.namenode.secondary.http-address</name>
            <value>master:50090</value>
    </property>
    <property>
            <name>dfs.replication</name>
            <value>3</value>
    </property>
    <property>
            <name>dfs.namenode.name.dir</name>
            <value>file:/usr/local/hadoop/tmp/dfs/name</value>
    </property>
    <property>
            <name>dfs.datanode.data.dir</name>
            <value>file:/usr/local/hadoop/tmp/dfs/data</value>
    </property>
</configuration>
EOF
}
#修改mapred-site.xml文件
set_mapred_site() {
echo "修改mapred-site.xml文件"
cp /usr/local/hadoop/etc/hadoop/mapred-site.xml.template /usr/local/hadoop/etc/hadoop/mapred-site.xml
sed -i 's?<configuration>?<!--<configuration>-->?g' /usr/local/hadoop/etc/hadoop/mapred-site.xml
sed -i 's?</configuration>?<!--</configuration>-->?g' /usr/local/hadoop/etc/hadoop/mapred-site.xml
cat >> /usr/local/hadoop/etc/hadoop/mapred-site.xml << EOF
<configuration>
    <property>
            <name>mapreduce.framework.name</name>
            <value>yarn</value>
    </property>
    <property>
            <name>mapreduce.jobhistory.address</name>
            <value>master:10020</value>
    </property>
    <property>
            <name>mapreduce.jobhistory.webapp.address</name>
            <value>master:19888</value>
    </property>
</configuration>
EOF
}
#修改yarn-site.xml文件
set_yarn_site() {
echo "修改yarn-site.xml文件"
sed -i 's?<configuration>?<!--<configuration>-->?g' /usr/local/hadoop/etc/hadoop/yarn-site.xml
sed -i 's?</configuration>?<!--</configuration>-->?g' /usr/local/hadoop/etc/hadoop/yarn-site.xml
cat >> /usr/local/hadoop/etc/hadoop/yarn-site.xml << EOF
<configuration>
    <property>
        <name>yarn.resourcemanager.hostname</name>
        <value>master</value>
    </property>
    <property>
        <name>yarn.nodemanager.resource.memory-mb</name>
        <value>10240</value>
    </property>
    <property>
        <name>yarn.nodemanager.aux-services</name>
        <value>mapreduce_shuffle</value>
    </property>
</configuration>
EOF
}
#修改hadoop-env.sh文件
set_hadoop_env(){
echo "修改hadoop-env.sh文件"
cat >> /usr/local/hadoop/etc/hadoop/hadoop-env.sh << EOF
export JAVA_HOME=/usr/local/java
export HADOOP_HOME=/usr/local/hadoop
export PATH=\$PATH:/usr/local/hadoop/bin
EOF
}
#修改slaves文件
set_slaves(){
echo "修改slaves文件"
rm /usr/local/hadoop/etc/hadoop/slaves
for i in "${!slaves[@]}"
do
    echo "slave""${i}" >> /usr/local/hadoop/etc/hadoop/slaves
done
}
download_soft
set_env
set_hosts
set_core_site
set_hdfs_site
set_mapred_site
set_yarn_site
set_hadoop_env
set_slaves


