#!/bin/bash
# build_and_run.sh
# This script automates the entire build and run process for the Spark JNI project.
# It compiles the native code, builds the Java application, and submits it to Spark.

# Exit immediately if a command exits with a non-zero status.
set -e

if [ -z "$JAVA_HOME" ]; then
  echo "Error: JAVA_HOME environment variable is not set. It's required for native compilation."
  exit 1
fi

echo "### Step 1: Making compilation script executable ###"
chmod +x native_compile_riscv.sh
echo "OK"
echo

echo "### Step 2: Compiling Native C Code ###"
./native_compile_riscv.sh
if [ $? -ne 0 ]; then
    echo "Error: Native code compilation failed."
    exit 1
fi
echo "OK"
echo

echo "### Step 3: Building Java Application with Maven ###"
echo "Running 'mvn clean package'..."
mvn clean package
if [ $? -ne 0 ]; then
    echo "Error: Maven build failed."
    exit 1
fi
echo "OK"
echo

# --- Spark Submission ---
JAR_FILE=$(find target -name "tpch-rvv-optimization-*.jar")
if [ -z "$JAR_FILE" ]; then
    echo "Error: Could not find the application JAR file in the target directory."
    exit 1
fi

echo "### Step 4: Submitting Application to Spark ###"
echo "Using JAR: $JAR_FILE"
echo "Native Library: native/libtpchrvvagg.so"
echo

# Explanation of spark-submit options:
# --class: The main entry point of the application.
# --master: Set to 'local[*]' to run locally using all available cores.
# --files: Distributes the native shared library to all Spark executors.
# --conf spark.executor.extraJavaOptions: Sets the JVM library path on executors so they can find the .so file.
# --conf spark.driver.extraJavaOptions: Sets the JVM library path for the driver (useful for local/client mode).
spark-submit \
    --class com.abcd1234.TpchQueryOptimized \
    --master "local[*]" \
    --files native/libtpchrvvagg.so \
    --conf "spark.executor.extraJavaOptions=-Djava.library.path=." \
    --conf "spark.driver.extraJavaOptions=-Djava.library.path=./native" \
    "$JAR_FILE"

if [ $? -ne 0 ]; then
    echo "Error: Spark job failed."
    exit 1
fi

echo "### Spark Job Completed Successfully ###"
