#!/bin/bash

source ./common.sh

# 获取版本
spark_version1=$(basename "${spark_package_path}" | awk -F'.tgz' '{print $1}')

spark_version="spark-yarn"

# ha目录
ha_directory="/opt/ha"

# 获取hadoop版本
hadoop_version=$(basename "${hadoop_package_path}" | awk -F'.tar.gz' '{print $1}')

for server in "${spark_servers[@]}"; do
  # 复制安装包
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${master_server}" "rsync -av ${spark_package_path}  ${new_user}@${server}:${software_directory}"
done

for server in "${spark_servers[@]}"; do
  # 删除已存在的目录
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "sudo rm -rf ${module_path}/spark*"
done

# 解压缩安装包
for server in "${spark_servers[@]}"; do
  
  echo "开始解压 ${server}"
  
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "tar -zxf ${spark_package_path} -C ${module_path}/"
done

#修改解压后的文件名称
for server in "${spark_servers[@]}"; do
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "mv ${module_path}/${spark_version1} ${module_path}/${spark_version}"
done

#修改/opt/module/spark-yarn/conf/spark-env.sh，添加YARN_CONF_DIR配置，保证后续运行任务的路径都变成集群路径
for server in "${spark_servers[@]}"; do

  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "mv ${module_path}/${spark_version}/conf/spark-env.sh.template ${module_path}/${spark_version}/conf/spark-env.sh"
  
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "echo 'YARN_CONF_DIR=${ha_directory}/${hadoop_version}/etc/hadoop' | sudo tee -a ${module_path}/${spark_version}/conf/spark-env.sh"
  
done

#修改spark-default.conf.template名称
for server in "${spark_servers[@]}"; do

  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "mv ${module_path}/${spark_version}/conf/spark-defaults.conf.template ${module_path}/${spark_version}/conf/spark-defaults.conf"
  
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "echo 'spark.eventLog.enabled          true' | sudo tee -a ${module_path}/${spark_version}/conf/spark-defaults.conf"
  
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "echo 'spark.eventLog.dir               hdfs://hadoop100:8020/directory' | sudo tee -a ${module_path}/${spark_version}/conf/spark-defaults.conf"
  
done

#修改spark-env.sh文件，添加如下配置
for server in "${spark_servers[@]}"; do
  
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "echo 'export SPARK_HISTORY_OPTS=\"' | sudo tee -a ${module_path}/${spark_version}/conf/spark-env.sh"
  
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "echo '-Dspark.history.ui.port=18080' | sudo tee -a ${module_path}/${spark_version}/conf/spark-env.sh"
  
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "echo '-Dspark.history.fs.logDirectory=hdfs://hadoop100:8020/directory' | sudo tee -a ${module_path}/${spark_version}/conf/spark-env.sh"
  
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "echo '-Dspark.history.retainedApplications=30\"' | sudo tee -a ${module_path}/${spark_version}/conf/spark-env.sh"
  
done

#修改配置文件/opt/module/spark-yarn/conf/spark-defaults.conf
for server in "${spark_servers[@]}"; do

  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "echo 'spark.yarn.historyServer.address=hadoop100:18080' | sudo tee -a ${module_path}/${spark_version}/conf/spark-defaults.conf"
  
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "echo 'spark.history.ui.port=18080' | sudo tee -a ${module_path}/${spark_version}/conf/spark-defaults.conf"
  
done

#重启Spark历史服务
for server in "${spark_servers[0]}"; do

  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "${module_path}/${spark_version}/sbin/stop-history-server.sh"
  
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "${module_path}/${spark_version}/sbin/start-history-server.sh"
  
done

echo "spark 安装成功"