from pyspark.sql import SparkSession
from pyspark.sql.functions import col, countDistinct

import 支付买家数2

# 初始化Spark会话
spark = SparkSession.builder \
    .appName("ShopCustomerCount") \
    .getOrCreate()

def Average_transaction_value(
        order_info_path: str,
        start_date: str,
        end_date: str
) -> float:
    """
    计算指定日期范围内的支付金额

    参数:
    - order_info_path: 订单信息数据路径
    - start_date: 开始日期 (格式: 'YYYY-MM-DD')
    - end_date: 结束日期 (格式: 'YYYY-MM-DD')

    返回:
    - 支付金额
    """
    # 读取订单表数据
    order_info_df = spark.read.csv(order_info_path, header=True, inferSchema=True) \
        .filter(col("order_time").between(start_date, end_date)) \
        .filter(col("status") == "paid") \
        .select("user_id", "amount", "quantity")

    # 计算总支付金额（单价 * 数量 之和）
    payment_amount = order_info_df.withColumn("payment_amount", col("amount") * col("quantity")) \
        .agg(countDistinct("payment_amount").alias("payment_amount")) \
        .collect()[0]["payment_amount"]

    # 计算支付买家数（去重用户ID的数量）
    paying_users = order_info_df.agg(countDistinct("user_id").alias("user_count")) \
        .collect()[0]["user_count"]

    # 计算客单价（避免除以0）
    if paying_users == 0:
        average_value = 0.0
    else:
        average_value = round(payment_amount / paying_users, 2)

    return average_value

if __name__ == "__main__":
    order_info_path = "mock_data/order_info.csv"
    start_date = "2025-07-01"
    end_date = "2025-07-10"

    average_transaction_value = Average_transaction_value(order_info_path, start_date, end_date)
    print(f"指定日期范围内的支付金额: {average_transaction_value}")