#!/bin/env bash

spark_dir=/data/spark
spark_conf=/data/spark/spark/conf
master="192.168.1.50"
slaves="192.168.1.51"


mk_spark_dir(){
    if [ ! -d "${spark_dir}" ]; then
        echo "mk spark dir"
        mkdir -p ${spark_dir} 
    fi
}

read -rp "waiting upload spark tgz file:"

tar_spark(){
    spark_tgz=$(ls /data/spark | grep tgz)
    if [ "" != "${spark_tgz}" ]; then
        echo "tar spark"
        tar xzf ${spark_dir}/"${spark_tgz}" -C ${spark_dir}
        #\rm ${spark_dir}/${spark_tgz}
    fi
}

alias_spark_dir(){
    if [ ! -h "${spark_dir}/spark" ]; then
        echo "alias spark dir"
        dir=$(ls /data/spark | grep -v tgz | grep -v sh)
        ln -s "${dir}" ${spark_dir}/spark
    fi
}

cp_conf_file(){
    cp ${spark_conf}/slaves.template ${spark_conf}/slaves.template.bak
    cp ${spark_conf}/spark-env.sh.template ${spark_conf}/spark-env.sh.template.bak
    cp ${spark_conf}/spark-defaults.conf.template ${spark_conf}/spark-defaults.conf.template.bak
}

write_salves(){
    echo "write slaves"
    for ip in ${slaves}; do
        echo "${ip}" > ${spark_conf}/slaves
    done
}

write_spark_envsh(){
        echo "spark_envsh"
    cat << EOF >${spark_conf}/spark-env.sh
export JAVA_HOME=${JAVA_HOME}
export SPARK_MASTER_HOST=${master}
EOF
}

chmod_spark_envsh(){
    chmod 755 ${spark_conf}/spark-env.sh
}

setup_standalone(){
    mk_spark_dir
    tar_spark
    alias_spark_dir
    write_salves
    write_spark_envsh
    chmod_spark_envsh
}


usage() {
    echo "useage:"
    echo "-h help"
    echo "-s setup standalone"
    exit 0
}

while getopts 'hs' OPT;do
    case $OPT in
        s) setup_standalone "$OPTARG";;
        h) usage;;
        ?) usage;;
    esac
done