package org.example

import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types._

import java.util.Properties
import org.apache.log4j.Logger

object StockDataAnalyzer {
  val logger = Logger.getLogger(getClass)

  def main(args: Array[String]): Unit = {
    // 创建SparkSession
    val spark = SparkSession.builder()
      .appName("StockDataAnalysis")
      .master("local[*]")
      .getOrCreate()

    try {
      // 1. 从MySQL读取数据
      val df = readFromMySQL(spark)
      logger.info(s"已从MySQL读取 ${df.count()} 条记录")

      // 2. 数据清洗
      val cleanedDF = cleanData(df)
      logger.info(s"数据清洗后剩余 ${cleanedDF.count()} 条有效记录")
      if (cleanedDF.isEmpty) {
        logger.warn("警告：数据清洗后无有效数据，请检查清洗逻辑！")
      }

      // 3. 获取当天最新数据
      val latestData = getLatestData(cleanedDF)
      logger.info(s"获取到 ${latestData.count()} 条最新数据")
      if (latestData.isEmpty) {
        logger.warn("警告：获取最新数据后无有效数据，请检查数据源或时间排序逻辑！")
      }

      // 4. 计算每日股票统计
      val dailyStats = calculateDailyStats(latestData)
      logger.info(s"计算得到 ${dailyStats.count()} 条每日股票统计数据")
      if (dailyStats.isEmpty) {
        logger.warn("警告：每日股票统计数据为空，请检查计算逻辑！")
      }

      // 5. 计算市场整体统计
      val marketStats = calculateMarketStats(latestData)
      logger.info("计算得到市场整体统计数据")
      if (marketStats.isEmpty) {
        logger.warn("警告：市场整体统计数据为空，请检查计算逻辑！")
      }

      // 6. 保存结果到MySQL
      try {
        saveToMySQL(dailyStats, "stock_daily_stats1")
        saveToMySQL(marketStats, "market_daily_stats")
        logger.info("数据分析完成，结果已成功保存到MySQL")
      } catch {
        case e: Exception =>
          logger.error(s"保存数据到MySQL失败: ${e.getMessage}")
          e.printStackTrace()
      }

      // 打印部分结果到控制台（仅用于调试）
      logger.debug("部分每日股票统计数据示例：")
      dailyStats.show(5)

      logger.debug("市场整体统计数据：")
      marketStats.show()

    } catch {
      case e: Exception => logger.error(s"数据分析出错: ${e.getMessage}", e)
    } finally {
      spark.stop()
    }
  }

  // 从MySQL读取数据
  def readFromMySQL(spark: SparkSession): DataFrame = {
    val properties = new Properties()
    properties.put("user", "root")
    properties.put("password", "litao021218")
    properties.put("driver", "com.mysql.cj.jdbc.Driver")

    val jdbcUrl = "jdbc:mysql://localhost:3306/stock_data?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai&useSSL=false"

    val df = spark.read.jdbc(jdbcUrl, "sina_kcb_stocks1", properties)

    df
  }

  // 数据清洗
  def cleanData(df: DataFrame): DataFrame = {
    df
      // 转换数据类型
      .withColumn("trade", col("trade").cast(DoubleType))
      .withColumn("pricechange", col("pricechange").cast(DoubleType))
      .withColumn("changepercent",
        regexp_replace(col("changepercent"), "%", "").cast(DoubleType))
      .withColumn("buy", col("buy").cast(DoubleType))
      .withColumn("sell", col("sell").cast(DoubleType))
      .withColumn("settlement", col("settlement").cast(DoubleType))
      .withColumn("open", col("open").cast(DoubleType))
      .withColumn("high", col("high").cast(DoubleType))
      .withColumn("low", col("low").cast(DoubleType))
      .withColumn("volume", col("volume").cast(LongType))
      .withColumn("amount", col("amount").cast(LongType))
      .withColumn("update_date", to_date(col("update_time")))
      // 过滤无效数据
      .filter(col("symbol").isNotNull && col("name").isNotNull && col("trade").isNotNull)
      // 放宽过滤条件，允许部分异常值通过
      .filter(col("trade") >= 0 && col("open") >= 0 && col("high") >= 0 && col("low") >= 0 && col("settlement") >= 0)
  }

  // 获取当天最新数据
  def getLatestData(df: DataFrame): DataFrame = {
    val latestDF = df.orderBy(col("update_time").desc).limit(1000)

    latestDF
  }

  // 计算每日股票统计
  def calculateDailyStats(df: DataFrame): DataFrame = {
    val dailyStats = df.groupBy("symbol", "name", "update_date")
      .agg(
        first("trade").as("latest_price"),
        first("changepercent").as("latest_change_percent"),
        max("high").as("day_high"),
        min("low").as("day_low"),
        avg("trade").as("avg_price"),
        sum("volume").as("total_volume"),
        sum("amount").as("total_amount"),
        count("symbol").as("update_count")
      )
      // 添加涨跌幅排名
      .withColumn("rank", rank().over(Window.orderBy(col("latest_change_percent").desc)))

    dailyStats
  }

  // 计算市场整体统计
  def calculateMarketStats(df: DataFrame): DataFrame = {
    df.agg(
      countDistinct("symbol").as("total_stocks"),
      avg("changepercent").as("market_avg_change"),
      sum("volume").as("market_total_volume"),
      sum("amount").as("market_total_amount"),
      avg(when(col("changepercent") > 0, 1).otherwise(0)).as("up_ratio")
    )
  }

  // 保存结果到MySQL
  def saveToMySQL(df: DataFrame, tableName: String): Unit = {
    val properties = new Properties()
    properties.put("user", "root")
    properties.put("password", "litao021218")
    properties.put("driver", "com.mysql.cj.jdbc.Driver")

    val jdbcUrl = "jdbc:mysql://localhost:3306/stock_data?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai&useSSL=false"

    try {
        df.write
            .mode("append")
            .jdbc(jdbcUrl, tableName, properties)
        logger.info(s"成功保存数据到表 $tableName")
    } catch {
        case e: Exception =>
            logger.error(s"保存数据到表 $tableName 失败: ${e.getMessage}", e)
    }
  }
}
