from pyspark.sql import SparkSession
from pyspark.sql.functions import col, countDistinct

# 初始化Spark会话
spark = SparkSession.builder \
    .appName("ShopCustomerCount") \
    .getOrCreate()

def Payment_buyer_count(
        orders_path: str,
        refunds_path: str,
        start_date: str,
        end_date: str
) -> int:
    """
    计算指定日期范围内的支付买家数

    参数:
    - orders_path: 订单数据路径
    - refunds_path: 退款数据路径
    - start_date: 开始日期 (格式: 'YYYY-MM-DD')
    - end_date: 结束日期 (格式: 'YYYY-MM-DD')

    返回:
    - 支付买家数
    """
    # 读取订单数据
    orders_df = spark.read.csv(orders_path,header=True, inferSchema=True) \
        .filter(col("payment_time").between(start_date, end_date)) \
        .filter(col("is_presale") == "False") \
        .filter(col("payment_status") == "SUCCESS") \
        .select("buyer_id") \
        .distinct()

    # 读取预售订单数据
    orders_presale_df = spark.read.csv(orders_path, header=True, inferSchema=True) \
        .filter(col("presale_final_payment_time").between(start_date, end_date)) \
        .filter(col("is_presale") == "True") \
        .filter(col("payment_status") == "SUCCESS") \
        .select("buyer_id") \
        .distinct()

    # 合并两类买家数并去重
    all_users_df = orders_df.union(orders_presale_df).distinct()

    # 计算支付买家数
    Payment_buyer_count = all_users_df.count()

    return Payment_buyer_count

# 示例调用
if __name__ == "__main__":
    Payment_buyer_count = Payment_buyer_count(
        orders_path="mock_data/orders.csv",
        refunds_path="mock_data/refunds.csv",
        start_date="2025-07-01",
        end_date="2025-07-10"
    )

    print(f"支付买家数 (2025-07-01至2025-07-10): {Payment_buyer_count}")