package org.spark

import org.apache.log4j.{Level, Logger}
import org.apache.spark.ml.classification.RandomForestClassifier
import org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
import org.apache.spark.ml.feature.{StandardScaler, VectorAssembler, StringIndexer}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._

object HumanActivityClassification {
  def main(args: Array[String]): Unit = {
    // 设置日志级别
    Logger.getLogger("org").setLevel(Level.ERROR)

    // 创建SparkSession
    val spark = SparkSession.builder()
      .appName("Human Activity Classification Test")
      .master("local[*]")
      .getOrCreate()

    println("Spark Session successfully created.")

    // 定义Schema
    import org.apache.spark.sql.types._
    val schema = StructType(Array(
      StructField("timestamp", StringType, true),
      StructField("back_x", DoubleType, true),
      StructField("back_y", DoubleType, true),
      StructField("back_z", DoubleType, true),
      StructField("thigh_x", DoubleType, true),
      StructField("thigh_y", DoubleType, true),
      StructField("thigh_z", DoubleType, true),
      StructField("label", DoubleType, true)
    ))

    // 读取数据
    println("Loading training data...")
    val trainDF = spark.read
      .option("header", "true")
      .schema(schema)
      .csv("E:/localCodeStore/test2/train.csv")

    println("Loading test data...")
    val testDF = spark.read
      .option("header", "true")
      .schema(schema)
      .csv("E:/localCodeStore/test2/test.csv")

    // 显示数据结构
    println("\nTraining Data Schema:")
    trainDF.printSchema()

    println("\nSample of Training Data:")
    trainDF.show(5)

    // 移除包含无效 `label` 的行
    val cleanedTrainDF = trainDF.filter(col("label").isNotNull)
    val cleanedTestDF = testDF.filter(col("label").isNotNull)

    // 定义特征列
    val featureColumns = Array("back_x", "back_y", "back_z",
      "thigh_x", "thigh_y", "thigh_z")

    // 特征组装
    val assembler = new VectorAssembler()
      .setInputCols(featureColumns)
      .setOutputCol("features")
      .setHandleInvalid("skip") // 跳过包含null、NaN或无穷大的行

    println("\nAssembling features...")
    val trainAssembled = assembler.transform(cleanedTrainDF)
    val testAssembled = assembler.transform(cleanedTestDF)

    // 标准化特征
    val scaler = new StandardScaler()
      .setInputCol("features")
      .setOutputCol("scaledFeatures")
      .setWithStd(true)
      .setWithMean(true)

    println("\nScaling features...")
    val scalerModel = scaler.fit(trainAssembled)
    val trainScaled = scalerModel.transform(trainAssembled)
    val testScaled = scalerModel.transform(testAssembled)

    // 训练随机森林模型
    println("\nTraining Random Forest model...")
    val rf = new RandomForestClassifier()
      .setLabelCol("label")
      .setFeaturesCol("scaledFeatures")
      .setNumTrees(10)
      .setMaxDepth(5)
      .setSeed(42)

    val model = rf.fit(trainScaled)

    // 预测
    println("\nMaking predictions...")
    val predictions = model.transform(testScaled)

    // 评估模型
    println("Evaluating model...")
    val evaluatorAccuracy = new MulticlassClassificationEvaluator()
      .setLabelCol("label")
      .setPredictionCol("prediction")
      .setMetricName("accuracy")

    val evaluatorF1 = new MulticlassClassificationEvaluator()
      .setLabelCol("label")
      .setPredictionCol("prediction")
      .setMetricName("f1")

    // 计算准确率和F1-Score
    val accuracy = evaluatorAccuracy.evaluate(predictions)
    val f1Score = evaluatorF1.evaluate(predictions)
    println(s"\nTest set accuracy = $accuracy")
    println(s"Test set F1-Score = $f1Score")

    // 生成混淆矩阵
    println("\nConfusion Matrix:")
    predictions.groupBy("label", "prediction").count().show()


    // 显示部分预测结果
    println("\nSample predictions:")
    predictions.select("label", "prediction", "features")
      .show(5)

    // 显示特征重要性
    println("\nFeature Importances:")
    val featureImportances = model.featureImportances
    featureColumns.zip(featureImportances.toArray).foreach { case (feature, importance) =>
      println(s"$feature: $importance")
    }
    //    // 立即停止SparkSession
    //    spark.stop()
    //
    //    println("Spark Session successfully stopped.")
    // 让程序保持运行，直到手动终止
    println("Spark会话已成功关闭。程序将保持运行，按 Ctrl+C 停止。")
    Thread.sleep(Long.MaxValue) // 无限期等待，直到手动终止
  }
}
