#!/bin/bash
function setup_spark_thrift()
{
  echo_start "$1 ... ..."
  confirm_operate "install spark thrift"
  if [ $? -eq 0 ]; then
    if [ "x$HRS_SPARKTHRIFT_HOST" = "x" ]; then
      echo_warn "The variable HRS_SPARKTHRIFT_HOST does not assign a value, Unable to install! "
      echo_passed "$1 SKIP"
      return 0
    fi

    set_nopwdlogin_host $HRS_SPARKTHRIFT_HOST

    echo_doing "deliver media ... ..."
    # 清理上次传输的安装包，并重新传输
    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "if [ -f $HYRENTMP_HOME/spark-1.6.1-bin-hadoop2.6.tgz ]; then rm -rf $HYRENTMP_HOME/spark-1.6.1-bin-hadoop2.6.tgz; fi"  || { die "clean pkg fail"; }
    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "if [ -d $HYRENTMP_HOME/spark-1.6.1-bin-hadoop2.6 ]; then rm -rf $HYRENTMP_HOME/spark-1.6.1-bin-hadoop2.6; fi"  || { die "clean pkg dir fail"; }
    scp $XDIR/packages/spark-1.6.1-bin-hadoop2.6.tgz $HRS_SPARKTHRIFT_HOST:$HYRENTMP_HOME >> $SETUP_LOGFILE || { die "deliver media fail"; }
    echo_done
   
    echo_doing "clean env ... ..."
    # 停服务，清理上次安装的目录
    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "if [ -d $HRS_SPARKTHRIFT_HOME/sbin ]; then cd $HRS_SPARKTHRIFT_HOME && sbin/stop-thriftserver.sh --hiveconf hive.server2.thrift.port=$HRS_SPARKTHRIFT_PORT --hiveconf hive.server2.thrift.host=$HRS_SPARKTHRIFT_HOST --master yarn-client; fi" || { die "stop service fail"; }
    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "echo 'ps -ef|grep thriftserver|grep -v grep > /dev/null 要使用正确的方式，判断进程是否存在！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！！'" #&& { die "kill service fail"; }
    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "if [ -d $HRS_SPARKTHRIFT_HOME ]; then rm -rf $HRS_SPARKTHRIFT_HOME/*; fi" || { die "clean home dir fail"; }
    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "if [ -d $HRS_SPARKTHRIFT_LOGS ]; then rm -rf $HRS_SPARKTHRIFT_LOGS/*; fi" || { die "clean logs dir fail"; }
    echo_done

    echo_doing "install soft ... ..."
    # 解压安装
    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "tar zxf $HYRENTMP_HOME/spark-1.6.1-bin-hadoop2.6.tgz -C $HYRENTMP_HOME" ||{ die "spark decompressed fail"; }
    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "if [ ! -d $HRS_SPARKTHRIFT_HOME ]; then mkdir -p $HRS_SPARKTHRIFT_HOME; fi" || { die "make home dir fail"; }
    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "if [ ! -d $HRS_SPARKTHRIFT_LOGS ]; then mkdir -p $HRS_SPARKTHRIFT_LOGS; fi" || { die "make logs dir fail"; }
    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "mv $HYRENTMP_HOME/spark-1.6.1-bin-hadoop2.6/* $HRS_SPARKTHRIFT_HOME" || { die "deliver spark soft fail"; }
    echo_done

    echo_doing "set property files ... ..."
    # 修改配置文件。安装包里面预置：log4j.properties、spark-env.sh、spark-defaults.conf
    scp $XDIR/conf/sparkthrift/* $HRS_SPARKTHRIFT_HOST:$HRS_SPARKTHRIFT_HOME/conf >> $SETUP_LOGFILE || { die "deliver property files [log4j.properties, spark-env.sh, spark-defaults.conf] fail"; }
    #修改spark-defaults.conf的参数
    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "sed -i \"s%HRS_SPARKTHRIFT_DRIVER_MEMORY%$HRS_SPARKTHRIFT_DRIVER_MEMORY%\" $HRS_SPARKTHRIFT_HOME/conf/spark-defaults.conf"
    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "sed -i \"s%HRS_SPARKTHRIFT_SHUFFLE_PARTITIONS%$HRS_SPARKTHRIFT_SHUFFLE_PARTITIONS%\" $HRS_SPARKTHRIFT_HOME/conf/spark-defaults.conf"
    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "sed -i \"s%HRS_SPARKTHRIFT_DEFAULT_PARALLELISM%$HRS_SPARKTHRIFT_DEFAULT_PARALLELISM%\" $HRS_SPARKTHRIFT_HOME/conf/spark-defaults.conf"
    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "sed -i \"s%HRS_SPARKTHRIFT_EXECUTOR_CORES%$HRS_SPARKTHRIFT_EXECUTOR_CORES%\" $HRS_SPARKTHRIFT_HOME/conf/spark-defaults.conf"
    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "sed -i \"s%HRS_SPARKTHRIFT_EXECUTOR_MEMORY%$HRS_SPARKTHRIFT_EXECUTOR_MEMORY%\" $HRS_SPARKTHRIFT_HOME/conf/spark-defaults.conf"
    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "sed -i \"s%HRS_SPARKTHRIFT_STORAGE_MEMORYFRACTION%$HRS_SPARKTHRIFT_STORAGE_MEMORYFRACTION%\" $HRS_SPARKTHRIFT_HOME/conf/spark-defaults.conf"
    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "sed -i \"s%HRS_SPARKTHRIFT_SHUFFLE_MEMORYFRACTION%$HRS_SPARKTHRIFT_SHUFFLE_MEMORYFRACTION%\" $HRS_SPARKTHRIFT_HOME/conf/spark-defaults.conf"

    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "sed -i \"s%HRS_SPARKTHRIFT_LOGS%$HRS_SPARKTHRIFT_LOGS%\" $HRS_SPARKTHRIFT_HOME/conf/log4j.properties" || { die "set log4j.properties fail"; }
    scp $HIVE_HOST:/etc/hive/conf/hive-site.xml $HRS_SPARKTHRIFT_HOST:$HRS_SPARKTHRIFT_HOME/conf >> $SETUP_LOGFILE || { die "deliver [$HIVE_HOST:/etc/hive/conf/hive-site.xml] fail"; }
    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "chown -R $HYREN_USER:$HYREN_USER $HRS_SPARKTHRIFT_HOME" || { die "chown home fail"; }
    ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "chown -R $HYREN_USER:$HYREN_USER $HRS_SPARKTHRIFT_LOGS" || { die "chown logs fail"; }
    echo_done

    # ssh -o StrictHostKeyChecking=no $HRS_SPARKTHRIFT_HOST "cd $HRS_SPARKTHRIFT_HOME;sbin/stop-thriftserver.sh --hiveconf hive.server2.thrift.port=$HRS_SPARK_PORT --hiveconf hive.server2.thrift.host=$HRS_SPARK_HOST --master yarn-client"
   
    echo_info "set profile on all hyren hosts ... ..."
    # 给所有主机，设置spark thrift的环境变量
    local value_arr=($HYREN_ALLHOSTS)
    for i in ${value_arr[@]}; do
      echo_doing "  host : $i >>>>>> "
      ssh -o StrictHostKeyChecking=no $i 'bash -s' < $XDIR/util/func_rmot_setprofile.sh "HRS_SPARKTHRIFT_HOST" "$HRS_SPARKTHRIFT_HOST" "$HYREN_USER"  || { die "set HRS_SPARKTHRIFT_HOST fail on $i"; }
      ssh -o StrictHostKeyChecking=no $i 'bash -s' < $XDIR/util/func_rmot_setprofile.sh "HRS_SPARKTHRIFT_PORT" "$HRS_SPARKTHRIFT_PORT" "$HYREN_USER"  || { die "set HRS_SPARKTHRIFT_PORT fail on $i"; }
      ssh -o StrictHostKeyChecking=no $i 'bash -s' < $XDIR/util/func_rmot_setprofile.sh "HRS_SPARKTHRIFT_HOME" "$HRS_SPARKTHRIFT_HOME" "$HYREN_USER"  || { die "set HRS_SPARKTHRIFT_HOME fail on $i"; }
      ssh -o StrictHostKeyChecking=no $i 'bash -s' < $XDIR/util/func_rmot_setprofile.sh "HRS_SPARKTHRIFT_LOGS" "$HRS_SPARKTHRIFT_LOGS" "$HYREN_USER"  || { die "set HRS_SPARKTHRIFT_LOGS fail on $i"; }
      echo_done
    done
    echo_done

    echo_success "$1"
    echo_info "****************************************"
    echo_info "RDSRUN HOST    dir  : $HRS_SPARKTHRIFT_HOST"
    echo_info "RDSRUN HOME    dir  : $HRS_SPARKTHRIFT_HOME"
    echo_info "RDSRUN LOGS   file  : $HRS_SPARKTHRIFT_LOGS"
    echo_info "RDSRUN SERVER PORT  : $HRS_SPARKTHRIFT_PORT"
    echo_info "****************************************"

    echo
    echo_warn "========================================"
    echo_warn " 请按照如下步骤启动服务："
    echo_warn " 1. 使用用户：$HYREN_USER，登录主机：$HRS_SPARK_HOST"
    echo_warn " 2. 进入目录：$HRS_SPARK_HOST"
    echo_warn " 3. 执行：sbin/stop-thriftserver.sh --hiveconf hive.server2.thrift.port=$HRS_SPARKTHRIFT_PORT --hiveconf hive.server2.thrift.host=$HRS_SPARKTHRIFT_HOST --master yarn-client"
    echo_warn "========================================="
    
    echo_passed "$1 done."
  else
    echo_passed "$1 SKIP"
  fi
}