#!/bin/bash

set -e
set -x

bypy download hadoop-3.3.1.tar.gz

iplist_path=config
while read -r line
do
    case $line in
        *'#'*)
            continue
        ;;
        *'end'*)
            break
        ;;
        *)
            IFS=',' read -ra parts <<< "$line"
        ;;
    esac

    pass="${parts[0]}" 
    address="${parts[1]}"
    hostname="${parts[2]}"
    flag="${parts[3]}"
    if [ $hostname ];then
        echo $hostname >> ./workers
    fi
    if [ $flag = 'namenode_localhost' ];then
        sed -i "s/namenode_localhost/$hostname/g" ./core-site.xml
        sed -i "s/namenode_localhost/$hostname/g" ./hdfs-site.xml
    elif [ $flag = 'secondary_namenode_localhost' ];then
        sed -i "s/secondary_namenode_localhost/$hostname/g" ./hdfs-site.xml
    fi
done < $iplist_path

while read -r line
do
    case $line in
        *'#'*)
            continue
        ;;
        *'end'*)
            break
        ;;
        *)
            IFS=',' read -ra parts <<< "$line"
        ;;
    esac
    
    pass="${parts[0]}" 
    address="${parts[1]}"
    hostname="${parts[2]}"
    if [ $address ];then
        sshpass -p$pass scp hadoop-3.3.1.tar.gz $address:/opt/
        sshpass -p$pass ssh -n $address tar xf /opt/hadoop-3.3.1.tar.gz -C /opt/
        sshpass -p$pass scp hadoop_env.sh $address:/etc/profile.d/hadoop_env.sh
        sshpass -p$pass scp core-site.xml $address:/opt/hadoop-3.3.1/etc/hadoop/core-site.xml
        sshpass -p$pass scp hdfs-site.xml $address:/opt/hadoop-3.3.1/etc/hadoop/hdfs-site.xml
        sshpass -p$pass scp workers $address:/opt/hadoop-3.3.1/etc/hadoop/
        sshpass -p$pass scp hadoop-env.sh.add $address:/opt/hadoop-3.3.1/etc/hadoop/hadoop-env.sh.add
        sshpass -p$pass ssh -n $address "cat /opt/hadoop-3.3.1/etc/hadoop/hadoop-env.sh.add >> /opt/hadoop-3.3.1/etc/hadoop/hadoop-env.sh"
    fi
done < $iplist_path

source /etc/profile
hdfs namenode -format

/opt/hadoop-3.3.1/sbin/start-dfs.sh
#jps
#http://localhost:9870
#/opt/hadoop-3.3.1/sbin/stop-dfs.sh

hdfs dfs -mkdir /home
hdfs dfs -put /root/anaconda-ks.cfg /home/
hadoop jar /opt/hadoop-3.3.1/share/hadoop/mapreduce/hadoop-mapreduce-examples-3.3.1.jar wordcount /home/anaconda-ks.cfg /home/result
hdfs dfs -cat /home/result/*

set +x
set +e