from pyspark.sql import SparkSession
from pyspark.sql.functions import *
from pyspark.sql.functions import countDistinct
from pyspark.sql.types import *
from pyspark.sql.window import Window


def analyze_stock_data():
    # 1. 初始化SparkSession
    spark = SparkSession.builder \
        .appName("StockDataAnalysis") \
        .config("spark.master", "local[*]") \
        .config("spark.jars", "mysql-connector-java-5.1.32-bin.jar") \
        .getOrCreate()

    try:
        # 2. 从MySQL读取数据
        df = spark.read \
            .format("jdbc") \
            .option("url", "jdbc:mysql://localhost:3306/stock_data") \
            .option("dbtable", "sina_kcb_stocks1") \
            .option("user", "root") \
            .option("password", "litao021218") \
            .load()

        # 3. 数据清洗
        cleaned_df = df \
            .withColumn("trade", col("trade").cast(DoubleType())) \
            .withColumn("pricechange", col("pricechange").cast(DoubleType())) \
            .withColumn("changepercent",
                        regexp_replace(col("changepercent"), "%", "").cast(DoubleType())) \
            .withColumn("update_date", to_date(col("update_time"))) \
            .dropna(subset=["symbol", "name", "trade"])

        # 4. 获取当天最新数据
        latest_data = cleaned_df.orderBy(col("update_time").desc()).limit(1000)

        # 5. 计算每日股票统计
        daily_stats = latest_data.groupBy("symbol", "name", "update_date").agg(
            first("trade").alias("latest_price"),
            first("changepercent").alias("latest_change_percent"),
            max("high").alias("day_high"),
            min("low").alias("day_low"),
            avg("trade").alias("avg_price"),
            sum("volume").alias("total_volume"),
            sum("amount").alias("total_amount"),
            count("symbol").alias("update_count")
        )

        # 6. 计算市场整体统计
        market_stats = latest_data.agg(
            countDistinct("symbol").alias("total_stocks"),
            avg("changepercent").alias("market_avg_change"),
            sum("volume").alias("market_total_volume"),
            sum("amount").alias("market_total_amount"),
            avg(when(col("changepercent") > 0, 1).otherwise(0)).alias("up_ratio")
        )

        # 7. 保存结果
        daily_stats.write \
            .format("jdbc") \
            .option("url", "jdbc:mysql://localhost:3306/stock_data") \
            .option("dbtable", "stock_daily_stats") \
            .option("user", "root") \
            .option("password", "litao021218") \
            .mode("overwrite") \
            .save()

        market_stats.write \
            .format("jdbc") \
            .option("url", "jdbc:mysql://localhost:3306/stock_data") \
            .option("dbtable", "market_daily_stats") \
            .option("user", "root") \
            .option("password", "litao021218") \
            .mode("overwrite") \
            .save()

        print("数据分析完成，结果已保存到MySQL")

    except Exception as e:
        print(f"数据分析出错: {e}")
    finally:
        spark.stop()


if __name__ == "__main__":
    analyze_stock_data()