package cn.hfuu.spark;

import org.apache.spark.sql.*;
import org.apache.spark.sql.functions.*;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

public class SparkJson {

    public static void main(String[] args) {
        // 创建SparkSession
        SparkSession spark = SparkSession.builder()
                .appName("ECommerceUserBehaviorAnalysis")
                .master("local[*]") // 根据实际情况设置master
                .getOrCreate();

        StructType schema = new StructType(new StructField[]{
                DataTypes.createStructField("userId", DataTypes.StringType, true),
                DataTypes.createStructField("orderId", DataTypes.StringType, true),
                DataTypes.createStructField("productId", DataTypes.LongType, true),
                DataTypes.createStructField("category", DataTypes.StringType, true),
                DataTypes.createStructField("price", DataTypes.FloatType, true),
                DataTypes.createStructField("quantity", DataTypes.FloatType, true),
                DataTypes.createStructField("timestamp", DataTypes.DateType, true)
        });
        // 读取数据
        Dataset<Row> data = spark.read().schema(schema).json("hdfs://home/fan/data.json");

        // 将数据转换为DataFrame
        data.createOrReplaceTempView("user_behavior");

        // 1. 统计每个商品的总销售额
        Dataset<Row> totalSalesPerProduct = spark.sql(
                "SELECT productId, SUM(price * quantity) AS totalSales " +
                        "FROM user_behavior " +
                        "GROUP BY productId"
        );

        // 2. 识别出销售额最高的前10个商品
        Dataset<Row> top10Products = totalSalesPerProduct.orderBy(functions.col("totalSales").desc()).limit(10);

        // 3. 计算每个用户的总购买次数
        Dataset<Row> totalPurchasesPerUser = spark.sql(
                "SELECT userId, COUNT(*) AS totalPurchases " +
                        "FROM user_behavior " +
                        "GROUP BY userId"
        );

        // 4. 识别出购买次数最多的前10个用户
        Dataset<Row> top10Users = totalPurchasesPerUser.orderBy(functions.col("totalPurchases").desc()).limit(10);

        // 5. 识别出每个用户购买金额最高的商品
        Dataset<Row> topSpendingProductPerUser = spark.sql(
                "SELECT userId, productId, " +
                        "SUM(price * quantity) AS totalSpent, " +
                        "RANK() OVER (PARTITION BY userId ORDER BY SUM(price * quantity) DESC) as rank " +
                        "FROM user_behavior " +
                        "GROUP BY userId, productId"
        ).filter(functions.col("rank").$less$eq(3));

        // 输出结果
        totalSalesPerProduct.show();
        top10Products.show();
        totalPurchasesPerUser.show();
        top10Users.show();
        topSpendingProductPerUser.show();

        // 停止SparkSession
        spark.stop();
    }
}
