#!/bin/bash

# 基础变量
HIVE_DB="stuinfo"
MYSQL_CONN="jdbc:mysql://localhost:3306/stuinfo"
MYSQL_USER="root"
MYSQL_PASS="root"
NUM_MAPPERS=1
MYSQL_PORT="3306"
MYSQL_DATABASE="stuinfo"
MYSQL_HOST="0.0.0.0"

# 要清空的 MySQL 表
TABLES=("class_gender_summary" "gender_summary" "semester_class_gender_summary" "semester_gender_summary" "status_gender_summary" "students_info")

for TABLE in "${TABLES[@]}"; do
    echo "Truncating MySQL table $TABLE..."
    mysql -h $MYSQL_HOST -P $MYSQL_PORT -u $MYSQL_USER -p$MYSQL_PASS $MYSQL_DATABASE -e "TRUNCATE TABLE $TABLE;"
done
#TRUNCATE TABLE：快速清空表数据，保留表结构，比DELETE更高效。
# 清空 Hive 表
truncate_tables=(
    "class_gender_summary"
    "gender_summary"
    "semester_class_gender_summary"
    "semester_gender_summary"
    "status_gender_summary"
    "stu_unclean_detail"
    "stu_detail"
    "stu_etl"
)

for table in "${truncate_tables[@]}"; do
    echo "Truncating Hive table $table..."
    hive -e "TRUNCATE TABLE $HIVE_DB.$table;"
done
#Hive 表数据存储在 HDFS 中，执行TRUNCATE会删除对应
# HDFS 路径下的所有文件（如/user/hive/warehouse/stuinfo.db/class_gender_summary/*）。

# Step 1: 从 stutable 清洗并插入 stu_etl
echo "Inserting data into stu_etl..."
hive -e "
USE $HIVE_DB;
INSERT INTO stu_etl
SELECT courseid, name, gender, dateofbirth, studentid, semester, gpa, enrollmentstatus
FROM (
    SELECT *, ROW_NUMBER() OVER (PARTITION BY name ORDER BY studentid) AS rn
    FROM stutable
    WHERE gender <= 1
) t
WHERE t.rn = 1;"
echo "Data inserted into stu_etl."
#WHERE gender <= 1：过滤无效性别数据，确保只保留合法值（如0或1）。
 #ROW_NUMBER() OVER (...)：窗口函数，按以下规则编号：
 #PARTITION BY name：按学生姓名分组（处理重名情况）。
 #ORDER BY studentid：在每个分组内按studentid升序排序。
 #AS rn：为每行生成序号（rn），最小的studentid对应rn=1。
 #WHERE t.rn = 1：仅保留每个姓名分组中的第一条记录（studentid最小的），实现去重。
   #字段映射：将筛选后的字段（courseid, name, ...）插入到stu_etl表中。

# Step 2: 直接从 stu_etl 写入 stu_unclean_detail
echo "Inserting data into stu_unclean_detail..."
hive -e "
USE $HIVE_DB;
INSERT INTO stu_unclean_detail
SELECT
    courseid,
    name,
    gender,
    dateofbirth,
    studentid,
    semester,
    gpa,
    enrollmentstatus
FROM stu_etl;"
echo "Data inserted into stu_unclean_detail."

# Step 3: 清洗 stu_unclean_detail 为 stu_detail（去重）
echo "Cleaning data into stu_detail..."
hive -e "
USE $HIVE_DB;
INSERT INTO TABLE stu_detail
SELECT courseid, name, gender, dateofbirth, studentid, semester, gpa, enrollmentstatus
FROM (
    SELECT *,
           ROW_NUMBER() OVER (PARTITION BY name) as row_num
    FROM stu_unclean_detail
) t
WHERE t.row_num = 1;"
echo "Data inserted into stu_detail."
#去重逻辑：
 #按name（学生姓名）分组，使用ROW_NUMBER()窗口函数为每组生成行号。
 #仅保留每组的第一行（row_num = 1），去除重复姓名的记录。
#数据去向：将去重后的完整学生信息存入目标表stu_detail。
#PARTITION BY name：按学生姓名分组，将同名学生视为一组。
 #ROW_NUMBER() ...：为每组内的记录分配行号，默认按 Hive 内部排序（可能是随机或按数据插入顺序）。
 #示例：若有两个张三，则各自的row_num分别为 1、2。
 #WHERE t.row_num = 1：仅保留每组的第一条记录，确保stu_detail中姓名唯一。
  #字段映射：将去重后的字段完整插入到stu_detail表。

# Step 4: 插入各类 summary 表
echo "Inserting data into gender_summary..."
hive -e "
USE $HIVE_DB;
INSERT INTO gender_summary
SELECT
    COUNT(CASE WHEN gender = 1 THEN 1 END),
    COUNT(CASE WHEN gender = 0 THEN 1 END),
    CURRENT_TIMESTAMP
FROM stu_detail;"

echo "Inserting data into class_gender_summary..."
hive -e "
USE $HIVE_DB;
INSERT INTO class_gender_summary
SELECT
    courseid,
    COUNT(CASE WHEN gender = 1 THEN 1 END),
    COUNT(CASE WHEN gender = 0 THEN 1 END),
    CURRENT_TIMESTAMP
FROM stu_detail
GROUP BY courseid;"

echo "Inserting data into semester_gender_summary..."
hive -e "
USE $HIVE_DB;
INSERT INTO semester_gender_summary
SELECT
    semester,
    COUNT(CASE WHEN gender = 1 THEN 1 END),
    COUNT(CASE WHEN gender = 0 THEN 1 END),
    CURRENT_TIMESTAMP
FROM stu_detail
GROUP BY semester;"

echo "Inserting data into status_gender_summary..."
hive -e "
USE $HIVE_DB;
INSERT INTO status_gender_summary
SELECT
    enrollmentstatus,
    COUNT(CASE WHEN gender = 1 THEN 1 END),
    COUNT(CASE WHEN gender = 0 THEN 1 END),
    CURRENT_TIMESTAMP
FROM stu_detail
GROUP BY enrollmentstatus;"

echo "Inserting data into semester_class_gender_summary..."
hive -e "
USE $HIVE_DB;
INSERT INTO semester_class_gender_summary
SELECT
    semester,
    courseid,
    COUNT(CASE WHEN gender = 1 THEN 1 END),
    COUNT(CASE WHEN gender = 0 THEN 1 END),
    CURRENT_TIMESTAMP
FROM stu_detail
GROUP BY semester, courseid;"

# Step 5: Sqoop 导出 Hive → MySQL
export_tables=(
    "gender_summary"
    "class_gender_summary"
    "semester_gender_summary"
    "status_gender_summary"
    "semester_class_gender_summary"
)

for table in "${export_tables[@]}"; do
    echo "Exporting Hive table $table to MySQL..."
    sqoop export \
        --connect $MYSQL_CONN \
        --username $MYSQL_USER \
        --password $MYSQL_PASS \
        --table $table \
        --export-dir /training/hive/warehouse/$HIVE_DB.db/$table \
        --input-fields-terminated-by '\t' \
        --input-lines-terminated-by '\n' \
        --input-null-string '\\N' \
        --input-null-non-string '\\N' \
        --num-mappers $NUM_MAPPERS
    echo "Exported $table."
done

# 导出 stu_detail → MySQL students_info 表
echo "Exporting stu_detail to MySQL table student_info..."
sqoop export \
    --connect $MYSQL_CONN \
    --username $MYSQL_USER \
    --password $MYSQL_PASS \
    --table students_info \
    --export-dir /user/hive/warehouse/$HIVE_DB.db/stu_detail \
    --input-fields-terminated-by '\t' \
    --input-lines-terminated-by '\n' \
    --input-null-string '\\N' \
    --input-null-non-string '\\N' \
    --num-mappers $NUM_MAPPERS
echo "Exported stu_detail to student_info."


