#!/bin/bash

:<<EOF
单机模式下安装Hadoop
这种情况下并不能启用hdfs
因此本没有什么好玩的
EOF

# 检查java环境

java -version > /dev/null 2>&1
if [ $? -ne 0 ]
then
 echo 'no java env'
fi

# 检查必须的参数
if [[ -z $1 || ! -f $1 ]]; then
	echo "plase proess a hadoop install file"
	exit 1 
fi

# 获取主机名

hn=` hostname `
echo ${hn}


# 关闭防火墙
r=`chkconfig iptables off > /dev/null 2>&1`

# baseDir


if [ -n "$2" ] 
	then
	baseDir=$2
else
	baseDir='/opt/modules/hadoopstandalone'
fi

# 判断并创建目录

if [[ -d ${baseDir} ]]; then
	echo ${baseDir}
else
	t=`mkdir -p ${baseDir} `
fi

# 解压Hadoop

r=`tar -zxf $1  -C ${baseDir} > /dev/null`

echo "tar is over.now run testing..."

# 写出测试数据


echo "hadoop mapreduce hive" >> ~/wc.input
echo "hbase spark storm" >> ~/wc.input
echo "sqoop hadoop hive" >> ~/wc.input
echo "spark hadoop" >> ~/wc.input

version="${1##*/}"
version="${version%%.tar.gz}"
Hadoop_home="${baseDir}/${version}"
simple_version="${version#hadoop-}"
echo " ${Hadoop_home}"
cd "${Hadoop_home}" && pwd

`bin/hadoop jar share/hadoop/mapreduce/hadoop-mapreduce-examples-${simple_version}.jar wordcount ~/wc.input output2 > /dev/null`

if [[ -f "${Hadoop_home}/output2/_SUCCESS" ]]; then
	echo "test success=========================="
	cat "${Hadoop_home}/output2/part-r-00000"
fi




