#!/bin/bash

set -e
set -x

iplist_path=config
while read -r line
do
    case $line in
        *'#'*)
            continue
        ;;
        *'end'*)
            break
        ;;
        *)
            IFS=',' read -ra parts <<< "$line"
        ;;
    esac

    pass="${parts[0]}" 
    address="${parts[1]}"
    hostname="${parts[2]}"
    flag="${parts[3]}"
    if [ $flag = 'resourcemanager_localhost' ];then
        sed -i "s/resourcemanager_localhost/$hostname/g" ./yarn-site.xml
        hadoop_classpath=$(hadoop classpath)
        sed -i "s#hadoop_classpath#$hadoop_classpath#g" ./yarn-site.xml
    fi
done < $iplist_path

while read -r line
do
    case $line in
        *'#'*)
            continue
        ;;
        *'end'*)
            break
        ;;
        *)
            IFS=',' read -ra parts <<< "$line"
        ;;
    esac
    
    pass="${parts[0]}" 
    address="${parts[1]}"
    hostname="${parts[2]}"
    if [ $address ];then
        sshpass -p$pass scp yarn-site.xml $address:/opt/hadoop-3.3.1/etc/hadoop/yarn-site.xml
        sshpass -p$pass scp mapred-site.xml $address:/opt/hadoop-3.3.1/etc/hadoop/mapred-site.xml
    fi
done < $iplist_path

/opt/hadoop-3.3.1/sbin/start-yarn.sh
#jps
#http://localhost:8088
#/opt/hadoop-3.3.1/sbin/stop-yarn.sh

hdfs dfs -mkdir -p /home
hdfs dfs -put -f /root/anaconda-ks.cfg /home/
hadoop jar /opt/hadoop-3.3.1/share/hadoop/mapreduce/hadoop-mapreduce-examples-3.3.1.jar wordcount /home/anaconda-ks.cfg /home/yarn_result
hdfs dfs -cat /home/yarn_result/*

set +x
set +e