from pyspark.sql import SparkSession
from pyspark.sql import functions as F
import matplotlib.pyplot as plt
import seaborn as sns
from pyspark.sql.window import Window
import datetime
import os
import pandas as pd
from pyspark.sql import Row

"""
-------------------------------------------------
   Description :	TODO：
   SourceFile  :	GenderModel
   Author      :	itcast team
-------------------------------------------------
"""


# 设置环境变量
os.environ['JAVA_HOME'] = '/export/server/jdk1.8.0_241/'
os.environ['SPARK_HOME'] = '/export/server/spark'
os.environ['PYSPARK_PYTHON'] = '/root/anaconda3/envs/pyspark_env/bin/python'
os.environ['PYSPARK_DRIVER_PYTHON'] = '/root/anaconda3/envs/pyspark_env/bin/python'

# 创建SparkSession
spark = SparkSession \
    .builder \
    .master("local[2]") \
    .appName("SparkSQLAppName") \
    .config("spark.sql.shuffle.partitions", 4) \
    .getOrCreate()

# 定义MySQL连接参数
mysql_props = {
    "user": "root",
    "password": "123456",
    "driver": "com.mysql.jdbc.Driver"
}
mysql_url = "jdbc:mysql://up01:3306/ShopCustomer?characterEncoding=utf-8"
# 读取用户访问数据
user_visits = spark.read.jdbc(
    url=mysql_url,
    table="user_visits",
    properties=mysql_props
)

# 读取用户行为数据
user_actions = spark.read.jdbc(
    url=mysql_url,
    table="user_actions",
    properties=mysql_props
)

# 读取交易数据
transactions = spark.read.jdbc(
    url=mysql_url,
    table="transactions",
    properties=mysql_props
)

# 读取用户维度数据
user_dimension = spark.read.jdbc(
    url=mysql_url,
    table="user_dimension",
    properties=mysql_props
)

# 设置分析日期范围
analysis_date = "2023-07-10"
start_date = f"{analysis_date} 00:00:00"
end_date = f"{analysis_date} 23:59:59"

# 店铺ID过滤条件
shop_id = "s2001"

#访客数UV
def calculate_uv(df_visits, shop_id, start_date, end_date):
    """计算访客数(UV)"""
    uv = df_visits.filter(
        (F.col("shop_id") == shop_id) &
        (F.col("visit_time") >= start_date) &
        (F.col("visit_time") <= end_date)
    ).agg(F.countDistinct("user_id").alias("uv"))

    return uv

#转化率
def calculate_conversion(df_visits, df_transactions, shop_id, date_range):
    """计算转化率"""
    visitors = df_visits.filter(
        (F.col("shop_id") == shop_id) &
        (F.col("visit_time") >= date_range[0]) &
        (F.col("visit_time") <= date_range[1])
    ).select("user_id").distinct()

    buyers = df_transactions.filter(
        (F.col("shop_id") == shop_id) &
        (F.col("payment_time") >= date_range[0]) &
        (F.col("payment_time") <= date_range[1]) &
        (F.col("status") == 1)
    ).select("user_id").distinct()

    conversion_rate = buyers.join(visitors, "user_id", "inner").count() / visitors.count()

    return conversion_rate


conversion_rate = calculate_conversion(
    user_visits,
    transactions,
    shop_id,
    (start_date, end_date)
)


uv_result = calculate_uv(user_visits, shop_id, start_date, end_date)

#UV的价值
def calculate_uv_value(df_visits, df_transactions, shop_id, date_range):
    """计算UV价值"""
    visitors = df_visits.filter(
        (F.col("shop_id") == shop_id) &
        (F.col("visit_time") >= date_range[0]) &
        (F.col("visit_time") <= date_range[1])
    ).select("user_id").distinct()

    total_payment = df_transactions.filter(
        (F.col("shop_id") == shop_id) &
        (F.col("payment_time") >= date_range[0]) &
        (F.col("payment_time") <= date_range[1]) &
        (F.col("status") == 1)
    ).agg(F.sum("payment_amount").alias("total_payment")).first()["total_payment"] or 0.0

    uv_value = total_payment / visitors.count()

    return uv_value


uv_value = calculate_uv_value(
    user_visits,
    transactions,
    shop_id,
    (start_date, end_date)
)

#客单价
def calculate_avg_order_value(df_transactions, shop_id, date_range):
    """计算客单价"""
    result = df_transactions.filter(
        (F.col("shop_id") == shop_id) &
        (F.col("payment_time") >= date_range[0]) &
        (F.col("payment_time") <= date_range[1]) &
        (F.col("status") == 1)
    ).agg(
        F.sum("payment_amount").alias("total_payment"),
        F.countDistinct("user_id").alias("buyer_count")
    ).first()

    avg_order_value = result["total_payment"] / result["buyer_count"] if result["buyer_count"] > 0 else 0.0

    return avg_order_value


avg_order_value = calculate_avg_order_value(
    transactions,
    shop_id,
    (start_date, end_date)
)

#加购人数
def calculate_add_to_cart_users(df_actions, shop_id, date_range):
    """计算加购人数"""
    add_cart_users = df_actions.filter(
        (F.col("shop_id") == shop_id) &
        (F.col("action_time") >= date_range[0]) &
        (F.col("action_time") <= date_range[1]) &
        (F.col("action_type") == "add_cart")
    ).select("user_id").distinct().count()

    return add_cart_users


add_cart_users = calculate_add_to_cart_users(
    user_actions,
    shop_id,
    (start_date, end_date)
)

#新老访客数
def calculate_new_return_visitors(df_visits, df_user_dim, shop_id, date):
    """计算新老访客数"""
    # 获取当日访问用户
    daily_visitors = df_visits.filter(
        (F.col("shop_id") == shop_id) &
        (F.to_date("visit_time") == date)
    ).select("user_id").distinct()

    # 关联用户维度数据
    visitors_with_dim = daily_visitors.join(
        df_user_dim.select("user_id", "first_visit_date"),
        "user_id",
        "left"
    )

    # 计算新老访客
    new_visitors = visitors_with_dim.filter(
        F.to_date("first_visit_date") == date
    ).count()

    return_visitors = visitors_with_dim.filter(
        F.to_date("first_visit_date") < date
    ).count()

    return new_visitors, return_visitors


new_visitors, return_visitors = calculate_new_return_visitors(
    user_visits,
    user_dimension,
    shop_id,
    F.to_date(F.lit(analysis_date))
)

#跳失率
def calculate_bounce_rate(df_visits, df_actions, shop_id, date_range):
    """计算跳失率"""
    # 获取所有访客
    all_visitors = df_visits.filter(
        (F.col("shop_id") == shop_id) &
        (F.col("visit_time") >= date_range[0]) &
        (F.col("visit_time") <= date_range[1])
    ).select("user_id").distinct()

    # 获取有交互行为的访客
    engaged_visitors = df_actions.filter(
        (F.col("shop_id") == shop_id) &
        (F.col("action_time") >= date_range[0]) &
        (F.col("action_time") <= date_range[1]) &
        F.col("action_type").isin(["click", "add_cart", "favorite", "order", "comment", "ask"])
    ).select("user_id").distinct()

    # 计算跳失率
    bounce_rate = 1 - (engaged_visitors.count() / all_visitors.count())

    return bounce_rate


bounce_rate = calculate_bounce_rate(
    user_visits,
    user_actions,
    shop_id,
    (start_date, end_date)
)

from pyspark.sql import Row

# 汇总所有指标
metrics_result = {
    "analysis_date": analysis_date,
    "shop_id": shop_id,
    "uv": uv_result.first()["uv"],
    "conversion_rate": round(conversion_rate, 4),
    "uv_value": round(uv_value, 2),
    "avg_order_value": round(avg_order_value, 2),
    "add_to_cart_users": add_cart_users,
    "new_visitors": new_visitors,
    "return_visitors": return_visitors,
    "bounce_rate": round(bounce_rate, 4)
}

# 打印结果（保持不变）
print("="*50)
print(f"电商店铺核心指标分析报告 - {analysis_date}")
print(f"店铺ID: {shop_id}")
print("="*50)
print(f"1. 访客数(UV): {metrics_result['uv']}")
print(f"2. 转化率: {metrics_result['conversion_rate']*100:.2f}%")
print(f"3. UV价值: ¥{metrics_result['uv_value']:.2f}")
print(f"4. 客单价: ¥{metrics_result['avg_order_value']:.2f}")
print(f"5. 加购人数: {metrics_result['add_to_cart_users']}")
print(f"6. 新访客数: {metrics_result['new_visitors']}")
print(f"7. 老访客数: {metrics_result['return_visitors']}")
print(f"8. 跳失率: {metrics_result['bounce_rate']*100:.2f}%")
print("="*50)

# 使用Row对象创建DataFrame
MetricsRow = Row("analysis_date", "shop_id", "uv", "conversion_rate",
                "uv_value", "avg_order_value", "add_to_cart_users",
                "new_visitors", "return_visitors", "bounce_rate")

metrics_row = MetricsRow(
    metrics_result["analysis_date"],
    metrics_result["shop_id"],
    metrics_result["uv"],
    metrics_result["conversion_rate"],
    metrics_result["uv_value"],
    metrics_result["avg_order_value"],
    metrics_result["add_to_cart_users"],
    metrics_result["new_visitors"],
    metrics_result["return_visitors"],
    metrics_result["bounce_rate"]
)

metrics_df = spark.createDataFrame([metrics_row])

metrics_df.show()
# 将结果保存到MySQL
# metrics_df.write.jdbc(
#     url=mysql_url,
#     table="ecommerce_metrics_daily",
#     mode="append",
#     properties=mysql_props
# )

# 计算每日UV
daily_uv = user_visits.groupBy(
    F.date_format("visit_time", "yyyy-MM-dd").alias("date")
).agg(
    F.countDistinct("user_id").alias("uv")
).orderBy("date")

# 转换为Pandas DataFrame
uv_pd = daily_uv.toPandas()

# 绘制折线图
plt.figure(figsize=(12, 6))
sns.lineplot(data=uv_pd, x="date", y="uv", marker="o")
plt.title("Daily Unique Visitors (UV) Trend")
plt.xlabel("Date")
plt.ylabel("UV Count")
plt.xticks(rotation=45)
plt.grid(True)
plt.show()

# 计算每小时各行为类型计数
hourly_behavior = user_actions.groupBy(
    F.hour("action_time").alias("hour"),
    "action_type"
).count().orderBy("hour", "action_type")

# 转换为Pandas并透视
behavior_pd = hourly_behavior.toPandas()
heatmap_data = behavior_pd.pivot(index="hour", columns="action_type", values="count")

# 绘制热力图
plt.figure(figsize=(12, 6))
sns.heatmap(heatmap_data.fillna(0), cmap="YlGnBu", annot=True, fmt=".0f")
plt.title("User Activity Heatmap by Hour of Day")
plt.xlabel("Action Type")
plt.ylabel("Hour of Day")
plt.show()

# 计算漏斗各阶段数据
funnel_stages = {
    "Visit": user_visits.select("user_id").distinct().count(),
    "Product View": user_actions.filter(F.col("action_type") == "click").select("user_id").distinct().count(),
    "Add to Cart": user_actions.filter(F.col("action_type") == "add_cart").select("user_id").distinct().count(),
    "Purchase": transactions.select("user_id").distinct().count()
}

# 创建DataFrame
funnel_pd = pd.DataFrame({
    "Stage": list(funnel_stages.keys()),
    "Count": list(funnel_stages.values()),
    "Percentage": [x/funnel_stages["Visit"]*100 for x in funnel_stages.values()]
})

# 绘制漏斗图
plt.figure(figsize=(10, 6))
sns.barplot(data=funnel_pd, x="Percentage", y="Stage", palette="Blues_d", orient="h")
plt.title("Conversion Funnel Analysis")
plt.xlabel("Conversion Rate (%)")
plt.ylabel("Funnel Stage")
plt.xlim(0, 100)

# 添加数据标签
for i, (count, pct) in enumerate(zip(funnel_pd["Count"], funnel_pd["Percentage"])):
    plt.text(pct+2, i, f"{pct:.1f}% (n={count})", va="center")

plt.show()

# 计算用户行为特征
# 计算用户行为特征 - 修正后的版本
# 计算用户行为特征 - 修正后的版本
user_features = user_visits.groupBy("user_id").agg(
    F.count("*").alias("visit_count")
).join(
    transactions.groupBy("user_id").agg(
        F.sum("payment_amount").alias("total_spend"),
        F.count("*").alias("purchase_count")
    ),
    "user_id",
    "left"
).fillna(0)

# 转换为Pandas
features_pd = user_features.toPandas()

# 简单聚类分析（基于访问次数和消费金额）
plt.figure(figsize=(12, 8))
sns.scatterplot(
    data=features_pd,
    x="visit_count",
    y="total_spend",
    size="purchase_count",
    sizes=(20, 200),
    alpha=0.7
)
plt.title("Customer Value Segmentation")
plt.xlabel("Visit Count")
plt.ylabel("Total Spend (RMB)")
plt.grid(True)
plt.show()

# 计算新老访客指标
def classify_visitor(user_id, first_visit_date, visit_date):
    return "New" if visit_date == first_visit_date else "Returning"

classify_udf = F.udf(classify_visitor)

visitor_type = user_visits.join(
    user_dimension.select("user_id", "first_visit_date"),
    "user_id"
).withColumn(
    "visitor_type",
    classify_udf("user_id", "first_visit_date", "visit_time")
).groupBy("visitor_type").count()

# 转换为Pandas
visitor_pd = visitor_type.toPandas()

# 绘制柱状图
plt.figure(figsize=(8, 6))
sns.barplot(data=visitor_pd, x="visitor_type", y="count", palette="Set2")
plt.title("New vs Returning Visitors")
plt.xlabel("Visitor Type")
plt.ylabel("Count")
plt.show()

import plotly.express as px

# 将Spark DataFrame转换为Pandas
transactions_pd = transactions.toPandas()

# 创建交互式交易金额分布图
fig = px.histogram(
    transactions_pd,
    x="payment_amount",
    nbins=20,
    title="Transaction Amount Distribution",
    labels={"payment_amount": "Payment Amount (RMB)"},
    template="plotly_white"
)
fig.update_layout(bargap=0.1)
fig.show()

# 创建交互式用户行为时间序列
actions_pd = user_actions.withColumn(
    "date", F.date_format("action_time", "yyyy-MM-dd")
).groupBy("date", "action_type").count().orderBy("date").toPandas()

fig = px.line(
    actions_pd,
    x="date",
    y="count",
    color="action_type",
    title="Daily User Actions by Type",
    labels={"count": "Action Count", "date": "Date"},
    template="plotly_white"
)
fig.update_layout(hovermode="x unified")
fig.show()

from matplotlib.gridspec import GridSpec

# 创建综合仪表板
plt.figure(figsize=(18, 12))
gs = GridSpec(3, 3, figure=plt.gcf())

# 1. UV趋势
ax1 = plt.subplot(gs[0, :])
sns.lineplot(data=uv_pd, x="date", y="uv", marker="o", ax=ax1)
ax1.set_title("Daily Unique Visitors Trend")
ax1.set_xticks(range(len(uv_pd["date"])))  # 添加这行
ax1.set_xticklabels(uv_pd["date"], rotation=45)
ax1.grid(True)


# 2. 转化漏斗
ax2 = plt.subplot(gs[1, 0])
sns.barplot(data=funnel_pd, x="Percentage", y="Stage", palette="Blues_d", orient="h", ax=ax2)
ax2.set_title("Conversion Funnel")

# 3. 新老访客
ax3 = plt.subplot(gs[1, 1])
sns.barplot(data=visitor_pd, x="visitor_type", y="count", palette="Set2", ax=ax3)
ax3.set_title("New vs Returning Visitors")

# 4. 用户价值散点
ax4 = plt.subplot(gs[1, 2])
sns.scatterplot(data=features_pd, x="visit_count", y="total_spend",
               size="purchase_count", sizes=(20, 200), alpha=0.7, ax=ax4)
ax4.set_title("Customer Value Segmentation")

# 5. 行为热力图
ax5 = plt.subplot(gs[2, :])
sns.heatmap(heatmap_data.fillna(0), cmap="YlGnBu", annot=True, fmt=".0f", ax=ax5)
ax5.set_title("User Activity by Hour")

plt.tight_layout()
plt.suptitle("Ecommerce Performance Dashboard", y=1.02, fontsize=16)
plt.show()

# 关闭SparkSession
spark.stop()