#!/bin/bash

# 定义日志文件
LOG_FILE="$HOME/deploy_spark.log"

# 定义工作目录
WORK_DIR="$HOME/spark_deployment"

# 保存当前工作目录
pushd . >/dev/null

# 创建工作目录
mkdir -p "$WORK_DIR" && cd "$WORK_DIR" || exit

# 0. 下载源码
wget https://github.com/apache/spark/archive/refs/tags/v3.5.1.tar.gz
tar -zxf v3.5.1.tar.gz
cd spark-3.5.1 || exit

# 1. 执行部署 R
./deploy_R.sh
if [ $? -ne 0 ]; then
    echo "deploy_R.sh failed." | tee -a "$LOG_FILE"
    exit 1
fi

# 2. 执行部署 protoc-jar
./deploy_protoc_jar.sh
if [ $? -ne 0 ]; then
    echo "deploy_protoc_jar.sh failed." | tee -a "$LOG_FILE"
    exit 1
fi

# 3. 执行部署 protobuf
./deploy_protobuf.sh
if [ $? -ne 0 ]; then
    echo "deploy_protobuf.sh failed." | tee -a "$LOG_FILE"
    exit 1
fi

# 4. 执行部署 grpc
./deploy_grpc.sh
if [ $? -ne 0 ]; then
    echo "deploy_grpc.sh failed." | tee -a "$LOG_FILE"
    exit 1
fi

# 5. 合并补丁
# 假设补丁文件在上一级目录下
patch -p1 <../spark-patch.patch
if [ $? -eq 0 ]; then
    echo "Patch applied successfully." | tee -a "$LOG_FILE"
else
    echo "Failed to apply patch." | tee -a "$LOG_FILE"
    exit 1
fi

# 编译 Spark
nohup dev/make-distribution.sh --name riscv64 --tgz -Pyarn,hive,hive-thriftserver,sparkr -DskipTests -Dmaven.compiler.optimize=false -Dmaven.test.skip=true -Dmaven.javadoc.skip=true -Dmaven.scaladoc.skip=true -T 32 >"$LOG_FILE" &

# 检查编译结果
if [ $? -eq 0 ]; then
    echo "Spark compiled successfully." | tee -a "$LOG_FILE"
else
    echo "Spark compilation failed." | tee -a "$LOG_FILE"
    exit 1
fi

# 6. 安装 Spark
INSTALL_DIR="$HOME"
tar zxf ./spark-3.5.1-bin-riscv64.tgz -C "$INSTALL_DIR"
if [ $? -eq 0 ]; then
    echo "Spark installed successfully." | tee -a "$LOG_FILE"
else
    echo "Failed to install Spark." | tee -a "$LOG_FILE"
    exit 1
fi

# 7. 配置环境变量
# 在追加之前检查是否存在SPARK_HOME和PATH的配置
grep -q 'export SPARK_HOME=' "$HOME/.bashrc" || echo "export SPARK_HOME=$INSTALL_DIR/$SPARK_BIN_DIR" >>"$HOME/.bashrc"
grep -q 'export PATH=$PATH:$SPARK_HOME/bin' "$HOME/.bashrc" || echo 'export PATH=$PATH:$SPARK_HOME/bin' >>"$HOME/.bashrc"
source "$HOME/.bashrc"

# 清理源码目录
# rm -rf "$WORK_DIR"

# 输出部署完成信息
echo "Spark deployment script finished." | tee -a "$LOG_FILE"

# 恢复之前的工作目录
popd >/dev/null || exit
