package org.niit.service

import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.functions._
import org.niit.dao.OfflineAnalysisDao
import org.niit.util.ConfigUtil

/**
 * 离线分析服务，使用Spark SQL进行多维度数据分析
 */
object OfflineAnalysisService {

  /**
   * 加载订单数据
   *
   * @param spark SparkSession
   * @param dataPath 数据文件路径
   * @return 订单DataFrame
   */
  def loadOrderData(spark: SparkSession, dataPath: String): DataFrame = {
    // 读取CSV格式的订单数据，第一行是标题
    val orderDF = spark.read
      .option("header", "true")
      .option("inferSchema", "true")
      .csv(dataPath)
      
    // 打印模式和示例数据
    orderDF.printSchema()
    orderDF.show(5)
    
    orderDF
  }

  /**
   * 年龄段分布分析
   *
   * @param orderDF 订单DataFrame
   */
  def analyzeAgeDistribution(orderDF: DataFrame): Unit = {
    // 定义年龄段
    val ageRanges = Map(
      "18岁以下" -> (0, 18),
      "19-25岁" -> (19, 25),
      "26-35岁" -> (26, 35),
      "36-45岁" -> (36, 45),
      "46-55岁" -> (46, 55),
      "56岁以上" -> (56, 200)
    )
    
    import orderDF.sparkSession.implicits._
    
    // 统计各年龄段订单数量
    val ageDF = ageRanges.map { case (range, (min, max)) =>
      val count = orderDF.filter($"年龄" >= min && $"年龄" <= max).count()
      (range, count)
    }.toSeq.toDF("age_range", "order_count")
    
    // 计算总订单数和百分比
    val totalOrders = ageDF.agg(sum("order_count")).first().getLong(0)
    val resultDF = ageDF.withColumn("percentage", ($"order_count" / totalOrders) * 100)
    
    // 显示结果
    resultDF.show()
    
    // 将结果保存到MySQL
    resultDF.collect().foreach { row =>
      val ageRange = row.getString(0)
      val orderCount = row.getLong(1).toInt
      val percentage = row.getDouble(2)
      OfflineAnalysisDao.saveAgeDistribution(ageRange, orderCount, percentage)
    }
  }

  /**
   * 性别比例分析
   *
   * @param orderDF 订单DataFrame
   */
  def analyzeGenderRatio(orderDF: DataFrame): Unit = {
    import orderDF.sparkSession.implicits._
    
    // 按性别分组统计订单数量
    val genderDF = orderDF.groupBy($"性别")
      .agg(count("*").alias("order_count"))
      
    // 计算总订单数和百分比
    val totalOrders = genderDF.agg(sum("order_count")).first().getLong(0)
    val resultDF = genderDF.withColumn("percentage", ($"order_count" / totalOrders) * 100)
    
    // 显示结果
    resultDF.show()
    
    // 将结果保存到MySQL
    resultDF.collect().foreach { row =>
      val gender = row.getString(0)
      val orderCount = row.getLong(1).toInt
      val percentage = row.getDouble(2)
      OfflineAnalysisDao.saveGenderRatio(gender, orderCount, percentage)
    }
  }

  /**
   * 地区分布分析
   *
   * @param orderDF 订单DataFrame
   */
  def analyzeRegionDistribution(orderDF: DataFrame): Unit = {
    import orderDF.sparkSession.implicits._
    
    // 按城市分组统计订单数量
    val regionDF = orderDF.groupBy($"城市")
      .agg(count("*").alias("order_count"))
      .orderBy($"order_count".desc)
      
    // 计算总订单数和百分比
    val totalOrders = regionDF.agg(sum("order_count")).first().getLong(0)
    val resultDF = regionDF.withColumn("percentage", ($"order_count" / totalOrders) * 100)
    
    // 显示结果
    resultDF.show()
    
    // 将结果保存到MySQL
    resultDF.collect().foreach { row =>
      val city = row.getString(0)
      val orderCount = row.getLong(1).toInt
      val percentage = row.getDouble(2)
      OfflineAnalysisDao.saveRegionDistribution(city, orderCount, percentage)
    }
  }

  /**
   * 菜品类别偏好分析
   *
   * @param orderDF 订单DataFrame
   */
  def analyzeCategoryPreference(orderDF: DataFrame): Unit = {
    import orderDF.sparkSession.implicits._
    
    // 按菜品类别分组统计订单数量
    val categoryDF = orderDF.groupBy($"菜品类别")
      .agg(count("*").alias("order_count"))
      .orderBy($"order_count".desc)
      
    // 计算总订单数和百分比
    val totalOrders = categoryDF.agg(sum("order_count")).first().getLong(0)
    val resultDF = categoryDF.withColumn("percentage", ($"order_count" / totalOrders) * 100)
    
    // 显示结果
    resultDF.show()
    
    // 将结果保存到MySQL
    resultDF.collect().foreach { row =>
      val category = row.getString(0)
      val orderCount = row.getLong(1).toInt
      val percentage = row.getDouble(2)
      OfflineAnalysisDao.saveCategoryPreference(category, orderCount, percentage)
    }
  }

  /**
   * 消费金额分析
   *
   * @param orderDF 订单DataFrame
   */
  def analyzePriceDistribution(orderDF: DataFrame): Unit = {
    import orderDF.sparkSession.implicits._
    
    // 定义价格区间
    val priceRanges = Map(
      "0-50元" -> (0, 50),
      "50-100元" -> (50, 100),
      "100-200元" -> (100, 200),
      "200-500元" -> (200, 500),
      "500元以上" -> (500, 10000)
    )
    
    // 统计各价格区间订单数量和平均金额
    val priceDF = priceRanges.map { case (range, (min, max)) =>
      val filteredDF = orderDF.filter($"价格" >= min && $"价格" < max)
      val count = filteredDF.count()
      val avgPrice = if (count > 0) filteredDF.agg(avg("价格")).first().getDouble(0) else 0.0
      (range, count, avgPrice)
    }.toSeq.toDF("price_range", "order_count", "avg_price")
    
    // 显示结果
    priceDF.show()
    
    // 将结果保存到MySQL
    priceDF.collect().foreach { row =>
      val priceRange = row.getString(0)
      val orderCount = row.getLong(1).toInt
      val avgPrice = row.getDouble(2)
      OfflineAnalysisDao.savePriceAnalysis(priceRange, orderCount, avgPrice)
    }
  }

  /**
   * 活跃时段分析
   *
   * @param orderDF 订单DataFrame
   */
  def analyzeTimePeriod(orderDF: DataFrame): Unit = {
    import orderDF.sparkSession.implicits._
    
    // 按时间段分组统计订单数量
    val timePeriodDF = orderDF.groupBy($"时间段")
      .agg(count("*").alias("order_count"))
      
    // 计算总订单数和百分比
    val totalOrders = timePeriodDF.agg(sum("order_count")).first().getLong(0)
    val resultDF = timePeriodDF.withColumn("percentage", ($"order_count" / totalOrders) * 100)
    
    // 显示结果
    resultDF.show()
    
    // 将结果保存到MySQL
    resultDF.collect().foreach { row =>
      val timePeriod = row.getString(0)
      val orderCount = row.getLong(1).toInt
      val percentage = row.getDouble(2)
      OfflineAnalysisDao.saveTimePeriodAnalysis(timePeriod, orderCount, percentage)
    }
  }

  /**
   * 平台分析
   *
   * @param orderDF 订单DataFrame
   */
  def analyzePlatform(orderDF: DataFrame): Unit = {
    import orderDF.sparkSession.implicits._
    
    // 按平台分组统计订单数量
    val platformDF = orderDF.groupBy($"平台")
      .agg(count("*").alias("order_count"))
      .orderBy($"order_count".desc)
      
    // 计算总订单数和百分比
    val totalOrders = platformDF.agg(sum("order_count")).first().getLong(0)
    val resultDF = platformDF.withColumn("percentage", ($"order_count" / totalOrders) * 100)
    
    // 显示结果
    resultDF.show()
    
    // 将结果保存到MySQL
    resultDF.collect().foreach { row =>
      val platform = row.getString(0)
      val orderCount = row.getLong(1).toInt
      val percentage = row.getDouble(2)
      OfflineAnalysisDao.savePlatformAnalysis(platform, orderCount, percentage)
    }
  }

  /**
   * 执行所有离线分析
   *
   * @param spark SparkSession
   * @param dataPath 数据文件路径
   */
  def runAllAnalysis(spark: SparkSession, dataPath: String): Unit = {
    val orderDF = loadOrderData(spark, dataPath)
    
    println("开始执行离线分析...")
    
    println("\n1. 年龄段分布分析")
    analyzeAgeDistribution(orderDF)
    
    println("\n2. 性别比例分析")
    analyzeGenderRatio(orderDF)
    
    println("\n3. 地区分布分析")
    analyzeRegionDistribution(orderDF)
    
    println("\n4. 菜品类别偏好分析")
    analyzeCategoryPreference(orderDF)
    
    println("\n5. 消费金额分析")
    analyzePriceDistribution(orderDF)
    
    println("\n6. 活跃时段分析")
    analyzeTimePeriod(orderDF)
    
    println("\n7. 平台分析")
    analyzePlatform(orderDF)
    
    println("\n离线分析完成！")
  }
} 