# 确保脚本在遇到错误时立即退出
set -x
set -e
set -o pipefail

# 检查是否为root用户
if [[ "$(whoami)" != "root" ]]; then
  echo "请使用root用户运行此脚本。"
  exit 1
fi

root_password="Sgg1111@"
new_user="atguigu"
new_user_password="000000"

# 将 'mysql_password' 替换为你的实际MySQL root密码
mysql_user="root"
mysql_password="000000"

#Doris账号密码
doris_user="root"
doris_password="000000"
doris_priority_networks="172.23.255.9"

#JDK安装包所在位置
jdk_package_path="/opt/software/jdk-8u212-linux-x64.tar.gz"
#zookeeper安装包所在位置
zookeeper_package_path="/opt/software/apache-zookeeper-3.7.1-bin.tar.gz"
#hadoop安装包所在位置
hadoop_package_path="/opt/software/hadoop-3.3.4.tar.gz"
#kafka安装包所在位置
kafka_package_path="/opt/software/kafka_2.12-3.3.1.tgz"
#flume安装包所在位置
flume_package_path="/opt/software/apache-flume-1.10.1-bin.tar.gz"
#hive安装包所在位置
hive_package_path="/opt/software/hive-3.1.3.tar.gz"
#spark安装包所在位置
spark_package_path="/opt/software/spark-3.3.1-bin-without-hadoop.tgz"
#maxwell安装包所在位置
maxwell_package_path="/opt/software/maxwell-1.29.2.tar.gz"
#datax安装包所在位置
datax_package_path="/opt/software/datax.tar.gz"

#doris fe 安装包所在位置
doris_fe_package_path="/opt/software/doris/apache-doris-fe-1.2.4.1-bin-x86_64.tar.xz"
#doris be 安装包所在位置
doris_be_package_path="/opt/software/doris/apache-doris-be-1.2.4.1-bin-x86_64.tar.xz"
#doris dependencies 安装包所在位置
doris_dependencies_package_path="/opt/software/doris/apache-doris-dependencies-1.2.4.1-bin-x86_64.tar.xz"

#hbase 安装包所在位置
hbase_package_path="/opt/software/hbase-2.4.11-bin.tar.gz"

#复制压缩包到这个目录
software_directory="/opt/software/"

#解压到的目录
module_path="/opt/module"

#脚本存放的目录
autoshell_path="/opt/auto_script"

#启用binlog的数据库，需根据实际情况作出修改
binlog_do_db="edu"

# 定义服务器列表、密码和用户名等变量
servers=(
  "hadoop100"
  "hadoop101"
  "hadoop102"
  "hadoop103"
  "hadoop104"
)

#安装zookeeper的服务器
zookeeper_servers=(
  "hadoop102"
  "hadoop103"
  "hadoop104"
)

#安装JournalNode的服务器
JournalNode_servers=(
  "hadoop102"
  "hadoop103"
  "hadoop104"
)

#安装NameNode的服务器
NameNode_servers=(
  "hadoop100"
  "hadoop101"
)

#安装DataNode的服务器
DataNode_servers=(
  "hadoop102"
  "hadoop103"
  "hadoop104"
)

#安装kafka的服务器
kafka_servers=(
  "hadoop102"
  "hadoop103"
  "hadoop104"
)

#安装flume的服务器
flume_servers=(
  "hadoop100"
  "hadoop101"
)

#安装hive的服务器
hive_servers=(
  "hadoop100"
  "hadoop101"
  "hadoop102"
  "hadoop103"
  "hadoop104"
)

#安装 hiveserver2 的服务器
hiveserver2_servers=(
  "hadoop100"
)

#安装 metastore 的服务器
metastore_servers=(
  "hadoop101"
)

#安装MySQL的服务器
MySQL_servers=(
  "hadoop100"
)

#安装spark的服务器
spark_servers=(
  "hadoop100"
  "hadoop102"
  "hadoop103"
  "hadoop104"
)

#安装maxwell的服务器
maxwell_servers=(
  "hadoop100"
)

#安装datax的服务器
datax_servers=(
  "hadoop100"
)

#安装 doris 的服务器
doris_servers=(
  "hadoop100"
)

#安装 hbase 的服务器
hbase_servers=(
  "hadoop102"
)

#软件都从这台服务器上复制
master_server="${servers[0]}"