#!/bin/bash
root_dir="/usr/local/edata"
echo "$(date +"%Y-%m-%d %H:%M:%S") INFO spark setup is running..."
echo "$(date +"%Y-%m-%d %H:%M:%S") INFO checking root directory..."
if [ ! -d "$root_dir" ];then
	echo "$(date +"%Y-%m-%d %H:%M:%S") WARNING $root_dir undetected"
	mkdir -p $root_dir
	if [ $? -eq 0 ];then
		echo "$(date +"%Y-%m-%d %H:%M:%S") INFO creating $root_dir success"
	else
		echo "$(date +"%Y-%m-%d %H:%M:%S") ERROR creating $root_dir fail"
		exit 1
	fi
else
	echo "$(date +"%Y-%m-%d %H:%M:%S") INFO $root_dir detected"
fi
echo "$(date +"%Y-%m-%d %H:%M:%S") INFO checking jdk..."
jdk_ver=$(java -version 2>&1)
if [[ $jdk_ver == *"1.8.0_221"* ]];then
	echo "$(date +"%Y-%m-%d %H:%M:%S") INFO jdk 1.8.0_221 detected"
else
	if [[ $jdk_ver == *"not found"* ]];then
		echo "$(date +"%Y-%m-%d %H:%M:%S") WARNING jdk is not found,installing..."
	else
		echo "$(date +"%Y-%m-%d %H:%M:%S") ERROR other version of jdk detected (1.8.0_221 is required),uninstall it first"
		exit 1
	fi
fi

echo "$(date +"%Y-%m-%d %H:%M:%S") INFO checking scala..."
version=$(scala -version 2>&1)
if [[ $version == *"2.12.17"* ]];then
	echo "$(date +"%Y-%m-%d %H:%M:%S") INFO scala 2.12.17 detected"
else
	if [[ $version == *"not found"* ]];then
		echo "$(date +"%Y-%m-%d %H:%M:%S") WARNING scala is not found,please install scala using eata scala package..."
	else
		echo "$(date +"%Y-%m-%d %H:%M:%S") ERROR other version of scala is detected,please uninstall it first"
		exit 1
	fi
fi

echo "$(date +"%Y-%m-%d %H:%M:%S") INFO checking spark directory ..."
if [ ! -d "$root_dir/spark" ];then
	echo "$(date +"%Y-%m-%d %H:%M:%S") INFO $root_dir/spark is not found...,unpressing spark package of edata"
	tar -zxvf spark-3.3.1-bin-hadoop3.tgz
	mv spark-3.3.1-bin-hadoop3 $root_dir/spark
	mv $root_dir/spark/conf/spark-env.sh.template $root_dir/spark/conf/spark-env.sh
	mv $root_dir/spark/conf/spark-defaults.conf.template $root_dir/spark/conf/spark-defaults.conf
else
	echo "$(date +"%Y-%m-%d %H:%M:%S") $root_dir/spark detected"
fi

echo "$(date +"%Y-%m-%d %H:%M:%S") INFO checking SPARK_HOME ..."
if [[ -z "$SPARK_HOME" ]];then
	echo "export SPARK_HOME=$root_dir/spark" >> ~/.bashrc
	echo 'export PATH=$SPARK_HOME/bin:$PATH' >> ~/.bashrc
	echo "export SPARK_HOME=$root_dir/spark" >> /etc/profile
	echo 'export PATH=$SPARK_HOME/bin:$SPARK_HOME/sbin:$PATH' >> /etc/profile
	source ~/.bashrc
	source /etc/profile
else
	echo "$(date +"%Y-%m-%d %H:%M:%S") INFO SPARK_HOME is set"
fi

mode=$1
if [[ -z "$mode" ]];then
  echo "$(date +"%Y-%m-%d %H:%M:%S") ERROR spark mode is empty"
  exit 1
fi
echo $mode >> mode
env_file="spark_sa.env"
master_url=""
if [ $mode == "sa" ];then
	env_file="spark_sa.env"
	master_url="spark://"
fi

if [ $mode == "ha" ];then
	env_file="spark_ha.env"
fi
conf_dir=$root_dir/spark/conf
while IFS= read -r line
do
	if [[ -n "$line" ]];then
		IFS="_" read -r prefix suffix <<< "$line"
		IFS="=" read -r name value <<< "$suffix"
		echo "$(date +"%Y-%m-%d %H:%M:%S") setting $name=$value"
		if [ $prefix == "ENV" ];then
			key=`echo ${name} | perl -pe 's/-/_/g;'`
			echo "export $key=$value" >> $conf_dir/spark-env.sh
			if [[ $key == "SPARK_MASTER_HOST" ]];then
				master_url="$master_url$value"
			fi
			if [[ $key == "SPARK_MASTER_PORT" ]];then
				master_url="$master_url:$value"
			fi
			if [[ $key == "SPARK_MASTER_URL" ]];then
				master_url=`echo ${value} | perl -pe 's/"//g;'`
			fi
		fi
		if [ $prefix == "DEF" ];then
			echo "$name $value" >> $conf_dir/spark-defaults.conf
		fi
		if [ $prefix == "WORKER" ];then
			echo -e "$value" >> $conf_dir/workers
		fi
	fi
done < "$env_file"
echo $master_url >> master
spark-shell --version
if [[ $? -eq 0 ]];then
	echo "$(date +"%Y-%m-%d %H:%M:%S") INFO spark setup success"
else
	echo "$(date +"%Y-%m-%d %H:%M:%S") ERROR spark setup error"
fi