#!/bin/bash

source ./common.sh

# 获取版本
spark_version1=$(basename "${spark_package_path}" | awk -F'.tgz' '{print $1}')

spark_version="spark"

hive_site_file=./spark_conf/hive-site.txt

# 配置环境变量
env_variables=(
  "#SPARK_HOME"
  "export SPARK_HOME=${module_path}/${spark_version}"
  "export PATH=\$PATH:\$SPARK_HOME/bin"
)

for server in "${spark_servers[@]}"; do
  # 复制安装包
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${master_server}" "rsync -av ${spark_package_path}  ${new_user}@${server}:${software_directory}"
done

for server in "${spark_servers[@]}"; do
  # 删除已存在的目录
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "sudo rm -rf ${module_path}/spark*"
done

# 解压缩安装包
for server in "${spark_servers[@]}"; do
  
  echo "开始解压 ${server}"
  
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "tar -zxf ${spark_package_path} -C ${module_path}/"
done

#修改解压后的文件名称
for server in "${spark_servers[@]}"; do
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "mv ${module_path}/${spark_version1} ${module_path}/${spark_version}"
done

#修改spark-env.sh配置文件
for server in "${spark_servers[@]}"; do

  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "mv ${module_path}/${spark_version}/conf/spark-env.sh.template ${module_path}/${spark_version}/conf/spark-env.sh"
  
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "echo 'export SPARK_DIST_CLASSPATH=\$(hadoop classpath)' | sudo tee -a ${module_path}/${spark_version}/conf/spark-env.sh"
  
done

for server in "${spark_servers[@]}"; do
  # 判断my_env.sh文件是否存在，如果不存在，新建
  echo "开始判断my_env.sh文件是否存在"
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "sudo touch /etc/profile.d/my_env.sh"
  echo "判断my_env.sh文件是否存在完成"
done

for server in "${spark_servers[@]}"; do
  # 删除原有的配置
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "sudo sed -i '/SPARK_HOME/d' /etc/profile.d/my_env.sh"
done

for server in "${spark_servers[@]}"; do
  # 追加新的配置
  for line in "${env_variables[@]}"; do
    sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "echo '${line}' | sudo tee -a /etc/profile.d/my_env.sh"
  done
done

for server in "${spark_servers[@]}"; do
  # 刷新环境变量
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "source /etc/profile"
done

#在hive中创建spark配置文件
for server in "${hive_servers[@]}"; do

  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "scp ${new_user}@${master_server}:${autoshell_path}/LiXianShuCang/spark_conf/spark-defaults.conf ${new_user}@${server}:${module_path}/hive/conf/"
  
done

#在HDFS创建如下路径，用于存储历史日志
for server in "${NameNode_servers[0]}"; do

  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "hadoop fs -mkdir /spark-history" || true
  
done

#向HDFS上传Spark纯净版jar包
for server in "${NameNode_servers[0]}"; do

  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "hadoop fs -rm -r /spark-jars" || true

  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "hadoop fs -mkdir /spark-jars"
  
  sshpass -p "${new_user_password}" ssh -o StrictHostKeyChecking=no "${new_user}@${server}" "hadoop fs -put /opt/module/spark/jars/* /spark-jars"
  
done

#修改hive-site.xml文件
for server in "${hive_servers[@]}"; do

  server_id_line_number=$(sshpass -p "${root_password}" ssh -o StrictHostKeyChecking=no "root@${server}" "awk '/<\/configuration>/{print NR; exit}' /opt/module/hive/conf/hive-site.xml")
  
  if [[ -n "${server_id_line_number}" ]]; then
	  # 使用 rsa_auth_line_number 进行操作，在 sed 命令中删除该行
	  
	  echo "开始删除<\/configuration> ${server}  第 ${server_id_line_number} 行"
	  
	  sshpass -p "${root_password}" ssh -o StrictHostKeyChecking=no "root@${server}" "sed -i \"${server_id_line_number}d\" /opt/module/hive/conf/hive-site.xml"
  fi

  while read -r entry; do
  
    entry=$(echo "${entry}" | tr -d '\r')
  
    echo "开始配置${server} 修改hive-site.xml文件,进入while循环 ${entry}"

    # 添加
    sshpass -p "${new_user_password}" ssh -n -o StrictHostKeyChecking=no "${new_user}@${server}" "echo '${entry}' | sudo tee -a /opt/module/hive/conf/hive-site.xml > /dev/null"
	
  done < "${hive_site_file}"
  
  sshpass -p "${new_user_password}" ssh -n -o StrictHostKeyChecking=no "${new_user}@${server}" "sudo sync"
  
done

echo "spark 安装成功"