#!/bin/sh

#synchronize all config files
ssh node 'scp /home/ls/server/hadoop/etc/hadoop/*  node1:/home/ls/server/hadoop/etc/hadoop'
ssh node 'scp /home/ls/server/hadoop/etc/hadoop/*  node2:/home/ls/server/hadoop/etc/hadoop'
ssh node 'scp /home/ls/server/hadoop/etc/hadoop/*  node3:/home/ls/server/hadoop/etc/hadoop'
ssh node 'scp /home/ls/server/hadoop/etc/hadoop/*  node4:/home/ls/server/hadoop/etc/hadoop'

#stop all daemons
ssh node '/home/ls/server/hadoop/sbin/stop-all.sh'

#remove all files
ssh node 'rm -rf /home/ls/server/hadoop/tmp'
ssh node 'rm -rf /home/ls/server/hadoop/logs'
ssh node1 'rm -rf /home/ls/server/hadoop/tmp'
ssh node1 'rm -rf /home/ls/server/hadoop/logs'
ssh node2 'rm -rf /home/ls/server/hadoop/tmp'
ssh node2 'rm -rf /home/ls/server/hadoop/logs'
ssh node3 'rm -rf /home/ls/server/hadoop/tmp'
ssh node3 'rm -rf /home/ls/server/hadoop/logs'
ssh node4 'rm -rf /home/ls/server/hadoop/tmp'
ssh node4 'rm -rf /home/ls/server/hadoop/logs'

#start journalnodes cluster
ssh node '/home/ls/server/hadoop/sbin/hadoop-daemon.sh start journalnode'
ssh node1 '/home/ls/server/hadoop/sbin/hadoop-daemon.sh start journalnode'
ssh node2 '/home/ls/server/hadoop/sbin/hadoop-daemon.sh start journalnode'

#format one namenode
ssh node '/home/ls/server/hadoop/bin/hdfs namenode -format -clusterId mycluster'
ssh node '/home/ls/server/hadoop/sbin/hadoop-daemon.sh start namenode'

#format another namenode
ssh node1 '/home/ls/server/hadoop/bin/hdfs namenode -bootstrapStandby'
sleep 10
ssh node1 '/home/ls/server/hadoop/sbin/hadoop-daemon.sh start namenode'
sleep 10

#trigger node active
ssh node '/home/ls/server/hadoop/bin/hdfs haadmin -failover --forceactive node1 node'

#start all datanodes
ssh node '/home/ls/server/hadoop/sbin/hadoop-daemons.sh start datanode'
