from pyspark.sql import SparkSession
from pyspark.sql.functions import col, count, sum, avg, date_format, when, month, year
import pyspark.sql.functions as F
# 创建SparkSession
spark = SparkSession.builder \
    .appName("RefundAnalysis") \
    .config("spark.sql.legacy.timeParserPolicy", "LEGACY") \
    .getOrCreate()

# 读取数据
df = spark.read.csv("transaction_data.csv", header=True, inferSchema=True)

# 注册为临时表
df.createOrReplaceTempView("transactions")

# 1. 总体退款指标
total_transactions = df.count()
total_refunds = df.filter(col("is_refund") == 1).count()
refund_rate = total_refunds / total_transactions * 100
total_revenue = df.select(sum("transaction_amount")).collect()[0][0]
total_refund_amount = df.filter(col("is_refund") == 1).select(sum("transaction_amount")).collect()[0][0]
refunded_revenue_rate = total_refund_amount / total_revenue * 100

print(f"总交易数: {total_transactions}")
print(f"总退款数: {total_refunds}")
print(f"退款率: {refund_rate:.2f}%")
print(f"总交易额: {total_revenue:.2f}")
print(f"总退款额: {total_refund_amount:.2f}")
print(f"退款金额占比: {refunded_revenue_rate:.2f}%")

# 2. 按退款原因统计
refund_reason_counts = df.filter(col("is_refund") == 1) \
    .groupBy("refund_reason") \
    .agg(count("transaction_id").alias("refund_count"),
         sum("transaction_amount").alias("total_refund_amount")) \
    .orderBy("refund_count", ascending=False)

print("\n按退款原因统计:")
refund_reason_counts.show()

# 3. 按产品类别统计退款
category_refund_stats = df \
    .groupBy("product_category") \
    .agg(
        count("transaction_id").alias("total_transactions"),
        sum(when(col("is_refund") == 1, 1).otherwise(0)).alias("refund_count"),
        sum(when(col("is_refund") == 1, col("transaction_amount")).otherwise(0)).alias("refund_amount"),
        sum("transaction_amount").alias("total_amount")
    ) \
    .withColumn("refund_rate", col("refund_count") / col("total_transactions") * 100) \
    .withColumn("refund_amount_rate", col("refund_amount") / col("total_amount") * 100) \
    .orderBy("refund_rate", ascending=False)

print("\n按产品类别统计退款:")
category_refund_stats.show()

# 4. 按天统计退款趋势
dayly_refund_trend = df.filter(col("is_refund") == 1) \
    .withColumn("refund_day", date_format(col("refund_date"), "yyyy-MM-dd")) \
    .groupBy("refund_day") \
    .agg(
        count("transaction_id").alias("refund_count"),
        sum("transaction_amount").alias("refund_amount")
    ) \
    .orderBy("refund_day")

print("\n按天退款趋势:")
dayly_refund_trend.show()

# 5. 退款处理时间分析
refund_processing_time = df.filter(col("is_refund") == 1) \
    .withColumn("processing_days", (col("refund_date").cast("long") - col("transaction_date").cast("long")) / (24 * 3600)) \
    .select(
        avg("processing_days").alias("average_processing_days"),
        F.min("processing_days").alias("min_processing_days"),
        F.max("processing_days").alias("max_processing_days")
    )

print("\n退款处理时间分析:")
refund_processing_time.show()

# 将结果保存为CSV供可视化使用
refund_reason_counts.toPandas().to_csv("refund_reason_stats.csv", index=False)
category_refund_stats.toPandas().to_csv("category_refund_stats.csv", index=False)
dayly_refund_trend.toPandas().to_csv("dayly_refund_trend.csv", index=False)

# 停止SparkSession
spark.stop()    