#!/bin/bash
# 自动编译和运行所有MapReduce任务

echo "========================================="
echo "  MapReduce实验自动运行脚本"
echo "========================================="

# 检查容器状态
echo -e "\n[1/6] 检查Hadoop集群状态..."
if ! docker exec namenode hadoop version > /dev/null 2>&1; then
    echo "错误: Hadoop集群未运行，请先执行 docker-compose up -d"
    exit 1
fi
echo "✓ Hadoop集群运行正常"

# 复制源代码到容器
echo -e "\n[2/6] 复制源代码到容器..."
docker cp src/main/java/Task1MerchantCouponStats.java namenode:/workspace/
docker cp src/main/java/Task2MerchantDistance.java namenode:/workspace/
docker cp src/main/java/Task3CouponUsageTime.java namenode:/workspace/
docker cp src/main/java/Task4DiscountRateAnalysis.java namenode:/workspace/
echo "✓ 源代码复制完成"

# 编译
echo -e "\n[3/6] 编译MapReduce程序..."
docker exec namenode bash -c '
cd /workspace
export CLASSPATH=""
for jar in /opt/hadoop-3.2.1/share/hadoop/common/*.jar; do
    CLASSPATH=$CLASSPATH:$jar
done
for jar in /opt/hadoop-3.2.1/share/hadoop/common/lib/*.jar; do
    CLASSPATH=$CLASSPATH:$jar
done
for jar in /opt/hadoop-3.2.1/share/hadoop/mapreduce/*.jar; do
    CLASSPATH=$CLASSPATH:$jar
done
javac -encoding UTF-8 -classpath "$CLASSPATH" Task*.java
jar -cvf tasks.jar Task*.class > /dev/null
rm -f *.class
echo "编译完成: tasks.jar ($(ls -lh tasks.jar | awk "{print \$5}"))"
'

# 清理旧输出
echo -e "\n[4/6] 清理旧输出..."
docker exec namenode hdfs dfs -rm -r /output/* 2>/dev/null || true

# 运行任务
echo -e "\n[5/6] 运行MapReduce任务..."

echo "  → 任务1: 商家优惠券使用统计..."
docker exec namenode hadoop jar /workspace/tasks.jar Task1MerchantCouponStats \
  /input/ccf_offline_stage1_train.csv \
  /input/ccf_online_stage1_train.csv \
  /output/task1 2>&1 | grep -E "(completed successfully|FAILED)"

echo "  → 任务2: 商家距离分布统计..."
docker exec namenode hadoop jar /workspace/tasks.jar Task2MerchantDistance \
  /input/ccf_offline_stage1_train.csv \
  /output/task2 2>&1 | grep -E "(completed successfully|FAILED)"

echo "  → 任务3: 优惠券使用时间统计..."
docker exec namenode hadoop jar /workspace/tasks.jar Task3CouponUsageTime \
  /input/ccf_offline_stage1_train.csv \
  /output/task3 2>&1 | grep -E "(completed successfully|FAILED)"

echo "  → 任务4: 折扣率分析..."
docker exec namenode hadoop jar /workspace/tasks.jar Task4DiscountRateAnalysis \
  /input/ccf_offline_stage1_train.csv \
  /output/task4 2>&1 | grep -E "(completed successfully|FAILED)"

# 下载结果
echo -e "\n[6/6] 下载结果到本地..."
mkdir -p results/output
docker exec namenode hdfs dfs -get -f /output/task1/part-r-00000 /tmp/task1.txt
docker exec namenode hdfs dfs -get -f /output/task2/part-r-00000 /tmp/task2.txt
docker exec namenode hdfs dfs -get -f /output/task3/part-r-00000 /tmp/task3.txt
docker exec namenode hdfs dfs -get -f /output/task4/part-r-00000 /tmp/task4.txt

docker cp namenode:/tmp/task1.txt results/output/
docker cp namenode:/tmp/task2.txt results/output/
docker cp namenode:/tmp/task3.txt results/output/
docker cp namenode:/tmp/task4.txt results/output/

echo -e "\n========================================="
echo "  ✓ 所有任务执行完成！"
echo "========================================="
echo "结果文件位置: results/output/"
echo "  - task1.txt: $(wc -l < results/output/task1.txt) 条记录"
echo "  - task2.txt: $(wc -l < results/output/task2.txt) 条记录"
echo "  - task3.txt: $(wc -l < results/output/task3.txt) 条记录"
echo "  - task4.txt: $(wc -l < results/output/task4.txt) 条记录"
echo ""
echo "Web UI访问:"
echo "  - HDFS: http://localhost:9870"
echo "  - YARN: http://localhost:8088"
echo ""
