#!/bin/bash
#########################################################################################################################
# Royans K Tharakan
# rkt@pobox.com [ 2010 July 30 ]
# Purpose: To get latest tarballs of hadoop+hive and get them ready for use. Over time it might setup other tools as well
#########################################################################################################################

#--------------------------------------------------------------------------------------------------------------
# User defined settings
#--------------------------------------------------------------------------------------------------------------
HOME=/usr/local/ingenuity/rawhadoop
BIN=$HOME/bin
DOWNLOAD=$HOME/download
INSTALLDIR=/usr/local/ingenuity/hcn
REPOSITORYPATH="http://archive.cloudera.com/cdh/3/"
JAVA_HOME="/usr/local/ingenuity/java1_6-live/"
CLEANUP=1
NAMENODE="usipatest5"
SLAVENODES="usipatest5 usipatest6"
CACHEDATAPATH="$INSTALLDIR/../hadoopcache"
#--------------------------------------------------------------------------------------------------------------

PATH=$JAVA_HOME/bin/:$PATH

# Versions
HADOOPV=0.20.2+320
HIVEV=0.5.0+20
HBASEV=0.89.20100621+17
ZOOV=3.3.1+7
SQOOPV=1.0.0+3
PIGV=0.7.0+9

export REPOSITORYPATH
HADOOPV=`wget -qO- "$REPOSITORYPATH" | grep DIR | cut -d'"' -f6 | grep \- | sed -e's/\///g' | grep ^hadoop | cut -d'-' -f2 | tail -1`
HIVEV=`wget -qO- "$REPOSITORYPATH" | grep DIR | cut -d'"' -f6 | grep \- | sed -e's/\///g' | grep ^hive | cut -d'-' -f2 | tail -1`
HBASEV=`wget -qO- "$REPOSITORYPATH" | grep DIR | cut -d'"' -f6 | grep \- | sed -e's/\///g' | grep ^hbase | cut -d'-' -f2 | tail -1`
ZOOV=`wget -qO- "$REPOSITORYPATH" | grep DIR | cut -d'"' -f6 | grep \- | sed -e's/\///g' | grep ^zoo | cut -d'-' -f2 | tail -1`
SQOOPV=`wget -qO- "$REPOSITORYPATH" | grep DIR | cut -d'"' -f6 | grep \- | sed -e's/\///g' | grep ^sqoop | cut -d'-' -f2 | tail -1`
PIGV=`wget -qO- "$REPOSITORYPATH" | grep DIR | cut -d'"' -f6 | grep \- | sed -e's/\///g' | grep ^pig | cut -d'-' -f2 | tail -1`

#--------------------------------------------------------------------------------------------------------------
export NAMENODE CACHEDATAPATH

STATUS=0

function print 
{
	echo " : $1 "
}

## --- Assuming that this server is the namenode itself
if [ ${#NAMENODE} -gt 0 ]
then
    print " - Shutting down services"
    ssh $NAMENODE "$INSTALLDIR/hadoop-live/bin/stop-all.sh 2> /dev/null > /dev/null"
    $INSTALLDIR/hadoop-live/bin/stop-all.sh 2> /dev/null > /dev/null
fi


function printdir
{
	print $1
	exit 1
}

function getfile
{
	if [ -e $1 ]
	then
		print "$1 exists"
	else
		print "Downloading $1"
		wget  $REPOSITORYPATH/$FILE
		if [ $? -ne 0 ]; then STATUS=1; fi
	fi
}

cd $DOWNLOAD

getfile hadoop-$HADOOPV.tar.gz
getfile hive-$HIVEV.tar.gz
getfile zookeeper-$ZOOV.tar.gz
getfile sqoop-$SQOOPV.tar.gz
getfile pig-$PIGV.tar.gz

if [ $STATUS -eq 1 ]; then printdie "error downloading "; fi

if [ $CLEANUP -eq 1 ]
then
	print "Cleaning up $INSTALLDIR"; rm -rf $INSTALLDIR
fi

mkdir -p $INSTALLDIR


#--------------------------------------------------------------------------------
print "Exploding hadoop.."
cd $INSTALLDIR
tar -xvzf $DOWNLOAD/hadoop-$HADOOPV.tar.gz > /dev/null
ln -s $INSTALLDIR/hadoop-$HADOOPV hadoop-live
echo "JAVA_HOME=$JAVA_HOME; export JAVA_HOME" >> $INSTALLDIR/hadoop-$HADOOPV/conf/hadoop-env.sh
print "Setting up configuration files.."
print " - Name node set to $NAMENODE "
echo $NAMENODE > $INSTALLDIR/hadoop-$HADOOPV/conf/masters
cat > $INSTALLDIR/hadoop-$HADOOPV/conf/core-site.xml <<EOF
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>

<configuration>
  <property>
    <name>fs.default.name</name>
    <value>hdfs://__NAMENODE:8020</value>
  </property>

  <property>
     <name>hadoop.tmp.dir</name>
     <value>__CACHEDATAPATH</value>
  </property>

  <property>
  <name>dfs.replication</name>
  <value>1</value>
  <description>Default block replication.
  The actual number of replications can be specified when the file is created.
  The default is used if replication is not specified in create time.
  </description>
  </property>

</configuration>
EOF

cat $INSTALLDIR/hadoop-$HADOOPV/conf/core-site.xml | sed -e "s/__NAMENODE/$NAMENODE/g" > $INSTALLDIR/hadoop-$HADOOPV/conf/core-site.xml.1
cat $INSTALLDIR/hadoop-$HADOOPV/conf/core-site.xml.1 | sed -e "s#__CACHEDATAPATH#$CACHEDATAPATH#g" > $INSTALLDIR/hadoop-$HADOOPV/conf/core-site.xml
rm $INSTALLDIR/hadoop-$HADOOPV/conf/core-site.xml.1
mkdir -p $CACHEDATAPATH


if [ ${#SLAVENODES} -gt 0 ]
then
	print " - Slave nodes added - $SLAVENODES "
	rm $INSTALLDIR/hadoop-$HADOOPV/conf/slaves
	for slave in `echo $SLAVENODES`
	do
		echo $slave  >> $INSTALLDIR/hadoop-$HADOOPV/conf/slaves
		ssh $slave id > /dev/null
		if [ $? -ne 0 ]
		then
			print "Couldn't get to $slave..." 
		fi
	done
fi

export JAVA_HOME
cd $INSTALLDIR/hadoop-$HADOOPV

print " - Formating namenode"
if [ -d $CACHEDATAPATH/dfs/name ]; then  print " - - exists"; else cd $INSTALLDIR/hadoop-$HADOOPV;./bin/hadoop namenode -format ; fi
if [ $? -ne 0 ]; then STATUS=1; fi
print " - - done"

#--------------------------------------------------------------------------------
print "Exploding hbase.."
cd $INSTALLDIR
tar -xvzf $DOWNLOAD/hbase-$HBASEV.tar.gz > /dev/null
ln -s $INSTALLDIR/hbase-$HBASEV hbase-live
print "Setting up configuration files.."
echo "HADOOP=$INSTALLDIR/hadoop-live" >> $INSTALLDIR/hbase-$HBASEV/bin/hbase-config.sh
echo "HADOOP_HOME=$INSTALLDIR/hadoop-live" >> $INSTALLDIR/hbase-$HBASEV/bin/hbase-config.sh
echo "JAVA_HOME=$JAVA_HOME; export JAVA_HOME" >> $INSTALLDIR/hbase-$HBASEV/bin/hbase-env.sh

#--------------------------------------------------------------------------------
print "Exploding hive.."
cd $INSTALLDIR
tar -xvzf $DOWNLOAD/hive-$HIVEV.tar.gz > /dev/null
ln -s $INSTALLDIR/hive-$HIVEV hive-live
print "Setting up configuration files.."
echo "HADOOP=$INSTALLDIR/hadoop-live" >> $INSTALLDIR/hive-$HIVEV/bin/hive-config.sh
echo "HADOOP_HOME=$INSTALLDIR/hadoop-live" >> $INSTALLDIR/hive-$HIVEV/bin/hive-config.sh

#--------------------------------------------------------------------------------
print "Exploding pig.."
cd $INSTALLDIR
tar -xvzf $DOWNLOAD/pig-$PIGV.tar.gz > /dev/null
ln -s $INSTALLDIR/pig-$PIGV pig-live
print "Setting up configuration files.."
echo "HADOOP=$INSTALLDIR/hadoop-live" >> $INSTALLDIR/pig-$PIGV/conf/pig-env.sh
echo "HADOOP_HOME=$INSTALLDIR/hadoop-live" >> $INSTALLDIR/pig-$PIGV/conf/pig-env.sh


#--------------------------------------------------------------------------------
cat > $INSTALLDIR/env.sh << EOF
#!/bin/bash

JAVA_HOME=$JAVA_HOME; export JAVA_HOME
HADOOP_HOME=$INSTALLDIR/hadoop-live
HADOOP=$INSTALLDIR/hadoop-live

PATH=$PATH:$INSTALLDIR/hadoop-$HADOOPV/bin:$INSTALLDIR/hive-$HIVEV/bin:$INSTALLDIR/pig-$PIGV/bin

export JAVA_HOME HADOOP_HOME HADOOP PATH

EOF


#--------------------------------------------------------------------------------

if [ ${#SLAVENODES} -gt 0 ]
then
        print " - Setting up slave nodes - $SLAVENODES "
        for slave in `echo $SLAVENODES`
        do
		if [ "$slave" == "$NAMENODE" ]
		then
		    print " - - skipping $slave"
		else	
		    print " - - syncing $slave"	
		    ssh $slave "rm -rf $INSTALLDIR; mkdir -p $INSTALLDIR"
		    rsync -rvgouptle ssh $INSTALLDIR/* $slave:/$INSTALLDIR/ > /dev/null
		    if [ $? -ne 0 ]
		    then
                        printdie "Couldn't setup the $slave..."
		    fi
                fi
		print " - Checking if cache is ready.."
		ssh $slave "if [ -d $CACHEDATAPATH/dfs/name ]; then  echo -n '' ;else  cd $INSTALLDIR/hadoop-$HADOOPV;mkdir -p $CACHEDATAPATH;./bin/hadoop namenode -format ; fi"	
        done
fi


#-----------------------------------------------------------------------
if [ ${#NAMENODE} -gt 0 ]
then
    print " - Starting up services"
    ssh $NAMENODE "$INSTALLDIR/hadoop-$HADOOPV/bin/start-all.sh 2> /dev/null > /dev/null"
fi


