package app

import controller.SchoolTypeAnalysisController
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import util.SparkUtil
import org.apache.spark.streaming.Seconds

object SchoolTypeAnalysisEntryApp {
  def main(args: Array[String]): Unit = {
    // 正确初始化SparkSession
    val sparkConf = new SparkConf()
      .setAppName("SchoolTypeAnalysis")
      .setMaster("local[*]")
    // 调用CreateSpark初始化SparkContext和SparkSession
    SparkUtil.CreateSpark(sparkConf, Seconds(5))
    // 从SparkUtil获取初始化后的SparkSession
    val spark = SparkUtil.takeSpark()

    try {
      // 使用绝对路径确保文件可访问
      val csvPath = "input/Chinese_resume_data.csv"
      
      val controller = new SchoolTypeAnalysisController(spark)
      controller.dispatch(csvPath)
    } finally {
      if (spark != null) spark.stop()
    }
  }
}