package SYY.Service

import org.apache.spark.sql.{SparkSession, functions => F}

object AgeAnalysisService {
  def analyzeAge(csvFilePath: Option[String] = None): Unit = {
    val spark = SparkSession.builder()
      .appName("AgeAnalysis")
      .master("local[*]")
      .getOrCreate()

    val filePath = csvFilePath.getOrElse("input/Chinese_resume_data.csv")

    try {
      val df = spark.read
        .option("header", "true")
        .option("inferSchema", "true")
        .csv(filePath)

      // 分析年龄与筛选结果的相关性
      val resultDF = df.groupBy("年龄", "筛选结果")
        .agg(F.count("*").as("数量"))

      // 显示结果
      resultDF.show()

      // 保存结果到数据库
      saveToDatabase(resultDF, "age_analysis_result")
    } catch {
      case e: Exception =>
        println(s"处理文件时出错: ${e.getMessage}")
    } finally {
      spark.stop()
    }
  }

  def saveToDatabase(df: org.apache.spark.sql.DataFrame, tableName: String): Unit = {
    val jdbcUrl = "jdbc:mysql://localhost:3306/your_database"
    val connectionProperties = new java.util.Properties()
    connectionProperties.put("user", "your_username")
    connectionProperties.put("password", "your_password")

    try {
      df.write
        .mode(org.apache.spark.sql.SaveMode.Overwrite)
        .jdbc(jdbcUrl, tableName, connectionProperties)
      println(s"数据已成功保存到 $tableName 表")
    } catch {
      case e: Exception =>
        println(s"保存数据到数据库时出错: ${e.getMessage}")
    }
  }
}