from pyspark.sql import SparkSession
from pyspark.sql.functions import col, count, sum, avg, max, min, when, date_format, to_date
import random
from pyspark.sql import functions as F
from datetime import datetime, timedelta

# 创建SparkSession
spark = SparkSession.builder \
    .appName("CustomerThemeStoreAnalysis") \
    .config("spark.driver.memory", "4g") \
    .config("spark.executor.memory", "4g") \
    .getOrCreate()


# 生成模拟数据
def generate_customer_data(num_customers=1000):
    """生成客户模拟数据"""
    customer_data = []
    for i in range(1, num_customers + 1):
        customer_id = i
        customer_name = f"客户{i}"
        # 随机生成性别
        gender = random.choice(["男", "女"])
        # 随机生成年龄(18-70岁)
        age = random.randint(18, 70)
        # 随机生成会员等级
        member_level = random.choice(["普通会员", "银卡会员", "金卡会员", "钻石会员"])
        # 随机生成地区
        region = random.choice(["华东", "华北", "华南", "西南", "西北", "东北", "中部"])
        # 随机生成注册日期(过去1年内)
        registration_date = (datetime.now() - timedelta(days=random.randint(0, 365))).strftime("%Y-%m-%d")
        # 随机生成客户类型
        customer_type = random.choice(["新客户", "老客户", "流失客户", "潜在客户"])

        customer_data.append((
            customer_id,
            customer_name,
            gender,
            age,
            member_level,
            region,
            registration_date,
            customer_type
        ))

    return customer_data


def generate_order_data(customer_ids, num_orders=5000):
    """生成订单模拟数据"""
    order_data = []
    product_categories = ["服装", "电子产品", "食品", "家居", "美妆", "运动", "图书"]

    for i in range(1, num_orders + 1):
        order_id = i
        customer_id = random.choice(customer_ids)
        # 随机生成订单日期(过去1年内)
        order_date = (datetime.now() - timedelta(days=random.randint(0, 365))).strftime("%Y-%m-%d")
        # 随机生成订单金额(10-10000元)
        order_amount = round(random.uniform(10, 10000), 2)
        # 随机生成支付方式
        payment_method = random.choice(["支付宝", "微信支付", "银行卡", "现金"])
        # 随机生成订单状态
        order_status = random.choice(["已完成", "已取消", "待支付", "已发货", "已收货"])
        # 随机生成商品类别
        product_category = random.choice(product_categories)
        # 随机生成购买数量(1-20件)
        quantity = random.randint(1, 20)
        order_data.append((
            order_id,
            customer_id,
            order_date,
            order_amount,
            payment_method,
            order_status,
            product_category,
            quantity
        ))

    return order_data


# 生成客户数据
customer_data = generate_customer_data(1000)
# 提取客户ID列表用于生成订单
customer_ids = [row[0] for row in customer_data]
# 生成订单数据
order_data = generate_order_data(customer_ids, 5000)

# 创建DataFrame
customer_df = spark.createDataFrame(
    customer_data,
    ["customer_id", "customer_name", "gender", "age", "member_level", "region", "registration_date", "customer_type"]
)

order_df = spark.createDataFrame(
    order_data,
    ["order_id", "customer_id", "order_date", "order_amount", "payment_method", "order_status", "product_category",
     "quantity"]
)

# 注册临时表
customer_df.createOrReplaceTempView("customers")
order_df.createOrReplaceTempView("orders")

# 计算关键指标
# 1. 客户总数
total_customers = customer_df.count()

# 2. 活跃客户数(过去30天有订单)
active_customers = spark.sql("""
                             SELECT COUNT(DISTINCT customer_id) as active_customers
                             FROM orders
                             WHERE order_date >= DATE_SUB(CURRENT_DATE, 30)
                             """).collect()[0]["active_customers"]

# 3. 流失客户数(过去90天没有订单的老客户)
churned_customers = spark.sql("""
                              SELECT COUNT(DISTINCT c.customer_id) as churned_customers
                              FROM customers c
                                       LEFT JOIN (SELECT customer_id, MAX(order_date) as last_order_date
                                                  FROM orders
                                                  GROUP BY customer_id) o ON c.customer_id = o.customer_id
                              WHERE c.customer_type = '老客户'
                                AND (o.last_order_date < DATE_SUB(CURRENT_DATE, 90) OR o.last_order_date IS NULL)
                              """).collect()[0]["churned_customers"]

# 4. 平均订单金额
avg_order_amount = spark.sql("""
                             SELECT AVG(order_amount) as avg_order_amount
                             FROM orders
                             WHERE order_status = '已完成'
                             """).collect()[0]["avg_order_amount"]

# 5. 客户地域分布
region_distribution = spark.sql("""
                                SELECT region,
                                       COUNT(customer_id)                                                      as customer_count,
                                       ROUND(COUNT(customer_id) * 100.0 / (SELECT COUNT(*) FROM customers),
                                             2)                                                                as percentage
                                FROM customers
                                GROUP BY region
                                ORDER BY customer_count DESC
                                """)

# 6. 会员等级分布
member_distribution = spark.sql("""
                                SELECT member_level,
                                       COUNT(customer_id)                                                      as customer_count,
                                       ROUND(COUNT(customer_id) * 100.0 / (SELECT COUNT(*) FROM customers),
                                             2)                                                                as percentage
                                FROM customers
                                GROUP BY member_level
                                ORDER BY CASE
                                             WHEN member_level = '普通会员' THEN 1
                                             WHEN member_level = '银卡会员' THEN 2
                                             WHEN member_level = '金卡会员' THEN 3
                                             WHEN member_level = '钻石会员' THEN 4
                                             ELSE 5
                                             END
                                """)

# 7. 客户类型分布
customer_type_distribution = spark.sql("""
                                       SELECT customer_type,
                                              COUNT(customer_id)                                                      as customer_count,
                                              ROUND(COUNT(customer_id) * 100.0 / (SELECT COUNT(*) FROM customers),
                                                    2)                                                                as percentage
                                       FROM customers
                                       GROUP BY customer_type
                                       """)

# 8. 每月销售额趋势
monthly_sales = spark.sql("""
                          SELECT DATE_FORMAT(order_date, 'yyyy-MM') as month,
        COUNT(order_id) as order_count,
        SUM(order_amount) as total_sales,
        AVG(order_amount) as avg_sales
                          FROM orders
                          WHERE order_status = '已完成'
                          GROUP BY DATE_FORMAT(order_date, 'yyyy-MM')
                          ORDER BY month
                          """)

# 9. 最受欢迎的商品类别
popular_categories = spark.sql("""
                               SELECT product_category,
                                      COUNT(order_id)   as order_count,
                                      SUM(quantity)     as total_quantity,
                                      SUM(order_amount) as total_sales
                               FROM orders
                               WHERE order_status = '已完成'
                               GROUP BY product_category
                               ORDER BY total_sales DESC
                               """)

# 10. 高价值客户(RFM分析)
high_value_customers = spark.sql("""
                                 WITH customer_rfm AS (SELECT customer_id,
                                                              DATEDIFF(CURRENT_DATE, MAX(order_date)) as recency,
                                                              COUNT(order_id)                         as frequency,
                                                              SUM(order_amount)                       as monetary
                                                       FROM orders
                                                       WHERE order_status = '已完成'
                                                       GROUP BY customer_id),
                                      rfm_scores AS (SELECT customer_id,
                                                            recency,
                                                            frequency,
                                                            monetary,
                                                            NTILE(4) OVER (ORDER BY recency ASC) as r_score, NTILE(4) OVER (ORDER BY frequency DESC) as f_score, NTILE(4) OVER (ORDER BY monetary DESC) as m_score
                                                     FROM customer_rfm),
                                      rfm_combined AS (SELECT customer_id,
                                                              recency,
                                                              frequency,
                                                              monetary,
                                                              r_score,
                                                              f_score,
                                                              m_score,
                                                              CONCAT(r_score, f_score, m_score) as rfm_segment,
                                                              r_score + f_score + m_score       as rfm_score
                                                       FROM rfm_scores)
                                 SELECT c.customer_id,
                                        c.customer_name,
                                        c.member_level,
                                        r.recency,
                                        r.frequency,
                                        r.monetary,
                                        r.rfm_score,
                                        CASE
                                            WHEN r.rfm_score >= 10 THEN '高价值客户'
                                            WHEN r.rfm_score >= 6 THEN '中等价值客户'
                                            ELSE '低价值客户'
                                            END as customer_value
                                 FROM rfm_combined r
                                          JOIN customers c ON r.customer_id = c.customer_id
                                 WHERE r.rfm_score >= 10
                                 ORDER BY r.monetary DESC
                                 """)

# 打印关键指标
print("\n===== 客户主题店铺客户工单分析报告 =====")
print(f"\n1. 客户总数: {total_customers}")
print(f"2. 活跃客户数(过去30天): {active_customers}")
print(f"3. 流失客户数(过去90天): {churned_customers}")
print(f"4. 平均订单金额: {avg_order_amount:.2f}元")

print("\n5. 客户地域分布:")
region_distribution.show()

print("\n6. 会员等级分布:")
member_distribution.show()

print("\n7. 客户类型分布:")
customer_type_distribution.show()

print("\n8. 每月销售额趋势(最近6个月):")
monthly_sales.orderBy(col("month").desc()).limit(6).show()

print("\n9. 最受欢迎的商品类别:")
popular_categories.show()

print("\n10. 高价值客户列表:")
high_value_customers.show(10)

# 保存结果到CSV
# try:
#     print("\n正在将分析结果保存到CSV文件...")
#     region_distribution.write.csv("region_distribution.csv", header=True, mode="overwrite")
#     member_distribution.write.csv("member_distribution.csv", header=True, mode="overwrite")
#     monthly_sales.write.csv("monthly_sales.csv", header=True, mode="overwrite")
#     high_value_customers.write.csv("high_value_customers.csv", header=True, mode="overwrite")
#     print("保存成功！")
# except Exception as e:
#     print(f"保存失败: {e}")

# 停止SparkSession
spark.stop()