from pyspark.sql import SparkSession
from pyspark.sql.functions import col, countDistinct

import 支付买家数2

# 初始化Spark会话
spark = SparkSession.builder \
    .appName("ShopCustomerCount") \
    .getOrCreate()

def Number_of_user_visitors(
        user_action_log_path: str,  # 用户行为日志路径
        page_mapping_path: str,  # 页面分类字典路径
        start_date: str = None,
        end_date: str = None
) -> int:
    """
    计算用户访客数

    参数：
    - user_action_log_path: 用户行为日志路径
    - page_mapping_path: 页面分类字典路径
    - start_date: 开始日期 (格式: 'YYYY-MM-DD')
    - end_date: 结束日期 (格式: 'YYYY-MM-DD')

    返回:
    - 用户访客数
    """
    # 读取用户行为日志
    action_df = spark.read.csv(user_action_log_path,header=True, inferSchema=True) \
        .filter(col("action_time").between(start_date, end_date)) \
        .select("user_id", "page_url")

    # 读取页面分类字典（过滤出店铺相关页面）
    page_mapping_df = spark.read.csv(page_mapping_path,header=True, inferSchema=True) \
        .filter(col("page_type").isin(["shop_page", "product_detail"]))  # 店铺页和商品详情页

    # 关联行为日志与页面分类，过滤出店铺相关行为
    shop_actions_df = action_df.join(
        page_mapping_df,
        on="page_url",
        how="inner"
    )

    # 计算用户访客数
    user_visitors_count = shop_actions_df.distinct().count()

    return user_visitors_count

# 示例调用
if __name__ == "__main__":
    user_visitors_count = Number_of_user_visitors(
        user_action_log_path="mock_data/user_action_log.csv",
        page_mapping_path="mock_data/page_mapping.csv",
        start_date="2025-07-01",
        end_date="2025-07-10"
    )

    Payment_buyer_count = 支付买家数2.Payment_buyer_count(
        orders_path="mock_data/orders.csv",
        refunds_path="mock_data/refunds.csv",
        start_date="2025-07-01",
        end_date="2025-07-10"
    )

    print(f"用户访客数: {user_visitors_count}")
    print(f"支付买家数: {Payment_buyer_count}")
    print(f"店铺支付转换率: {user_visitors_count/Payment_buyer_count:.2f} %")