from pyspark.sql import SparkSession
from pyspark.sql.functions import *

if __name__ == "__main__":
    spark = SparkSession.builder \
                       .appName("Ecommerce User Behavior SQL Analysis") \
                       .getOrCreate()
    df = spark.read.csv("D://ecommerce_env/user_behavior.csv", header=True, inferSchema=True)
    df.createOrReplaceTempView("user_behavior")  # 注册临时视图

    # 指标1：商品类别与行为类型的购买转化率（已修复）
    conversion_rate = spark.sql("""
        SELECT 
            `t1`.`商品类别`,
            `t1`.`点击量`,
            `t2`.`购买量`,
            ROUND(`t2`.`购买量`/`t1`.`点击量`, 4) AS `转化率`
        FROM (
            SELECT `商品类别`, COUNT(*) AS `点击量`
            FROM `user_behavior`
            WHERE `行为类型` = '点击'
            GROUP BY `商品类别`
        ) `t1`
        JOIN (
            SELECT `商品类别`, COUNT(*) AS `购买量`
            FROM `user_behavior`
            WHERE `行为类型` = '购买'
            GROUP BY `商品类别`
        ) `t2` ON `t1`.`商品类别` = `t2`.`商品类别`
        ORDER BY `转化率` DESC
    """)
    conversion_rate.show()

    # 指标2：各省份商品销售金额Top3（修复QUALIFY语法）
    province_sales = spark.sql("""
        SELECT `地域`, `商品类别`, `总销售额`
        FROM (
            SELECT 
                `地域`,
                `商品类别`,
                SUM(`商品价格`) AS `总销售额`,
                ROW_NUMBER() OVER (PARTITION BY `地域` ORDER BY SUM(`商品价格`) DESC) AS `rn`
            FROM `user_behavior`
            WHERE `行为类型` = '购买'
            GROUP BY `地域`, `商品类别`
        ) `tmp`
        WHERE `rn` <= 3
        ORDER BY `地域`, `总销售额` DESC
    """)
    province_sales.show()

    # 指标3：时间段（小时）加购量趋势（时间分组）
    add_to_cart_trend = spark.sql("""
        SELECT 
            HOUR(`时间戳`) AS `小时`,
            COUNT(*) AS `加购次数`
        FROM `user_behavior`
        WHERE `行为类型` = '加购'
        GROUP BY `小时`
        ORDER BY `小时`
    """)
    add_to_cart_trend.show()

    # 指标4：高价值用户（购买次数Top10）（排序、聚合）
    high_value_users = spark.sql("""
        SELECT 
            `用户ID`,
            COUNT(*) AS `购买次数`,
            SUM(`商品价格`) AS `总消费金额`
        FROM `user_behavior`
        WHERE `行为类型` = '购买'
        GROUP BY `用户ID`
        ORDER BY `购买次数` DESC, `总消费金额` DESC
        LIMIT 10
    """)
    high_value_users.show()

    spark.stop()