from pyspark.sql import SparkSession
from pyspark.sql.functions import col, min, max, unix_timestamp, lead, when, sum, count, avg
from pyspark.sql.window import Window

# 初始化Spark会话
spark = SparkSession.builder \
    .appName("ShopAverageStayTime") \
    .getOrCreate()

def calculate_average_stay_time(
        user_action_log_path: str,  # 用户行为日志路径
        page_mapping_path: str,  # 页面分类字典路径
        session_timeout: int = 1800,  # 会话超时时间（秒），默认30分钟
        start_date: str = None,
        end_date: str = None
) -> float:
    """
    计算店铺访客平均停留时间

    参数:
    - user_action_log_path: 用户行为日志路径
    - page_mapping_path: 页面分类字典路径
    - session_timeout: 会话超时时间（秒）
    - start_date: 开始日期 (格式: 'YYYY-MM-DD')
    - end_date: 结束日期 (格式: 'YYYY-MM-DD')

    返回:
    - 平均停留时间（秒）
    """
    # 读取用户行为日志
    action_df = spark.read.csv(user_action_log_path,header=True, inferSchema=True) \
        .select("user_id", "page_url", "action_time") \
        .withColumn("action_timestamp", unix_timestamp(col("action_time")))  # 转换为时间戳

    # 读取页面分类字典（过滤出店铺相关页面）
    page_mapping_df = spark.read.csv(page_mapping_path,header=True, inferSchema=True) \
        .filter(col("page_type").isin(["shop_page", "product_detail"]))  # 店铺页和商品详情页

    # 关联行为日志与页面分类，过滤出店铺相关行为
    shop_actions_df = action_df.join(
        page_mapping_df,
        on="page_url",
        how="inner"
    )

    # 按用户和时间排序，为会话划分做准备
    window_spec = Window.partitionBy("user_id").orderBy("action_timestamp")

    # 计算相邻操作的时间间隔
    shop_actions_with_gap_df = shop_actions_df.withColumn(
        "time_gap",
        lead("action_timestamp", 1).over(window_spec) - col("action_timestamp")
    )

    # 标记会话开始（时间间隔超过阈值或第一条记录）
    session_start_df = shop_actions_with_gap_df.withColumn(
        "is_new_session",
        when(col("time_gap").isNull() | (col("time_gap") > session_timeout), 1).otherwise(0)
    )

    # 为每个会话分配唯一ID
    session_id_df = session_start_df.withColumn(
        "session_id",
        sum("is_new_session").over(window_spec)
    )

    # 计算每个会话的停留时间
    session_stay_time_df = session_id_df.groupBy("user_id", "session_id") \
        .agg(
        min("action_timestamp").alias("session_start_time"),
        max("action_timestamp").alias("session_end_time")
    ) \
        .withColumn("stay_time", col("session_end_time") - col("session_start_time"))

    # 计算平均停留时间（秒）
    average_stay_time = session_stay_time_df.agg(avg("stay_time")).collect()[0][0]

    return average_stay_time

# 示例调用
if __name__ == "__main__":
    average_stay_time = calculate_average_stay_time(
        user_action_log_path="mock_data/user_action_log.csv",
        page_mapping_path="mock_data/page_mapping.csv",
        session_timeout=1800,  # 30分钟
        start_date="2025-07-01",
        end_date="2025-07-10"
    )

    print(f"店铺访客平均停留时间: {average_stay_time:.2f} 秒")