package cn.hfuu.spark;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.*;

public class CountCustomersBean {
    public static void main(String[] args) {
        // 创建SparkSession
        SparkSession spark = SparkSession.builder()
                .appName("CustomerAnalysis")
                .master("local")
                .getOrCreate();

        // 定义customers表结构
        StructType customersSchema = new StructType(new StructField[]{
                DataTypes.createStructField("customer_id", DataTypes.IntegerType, false),
                DataTypes.createStructField("customer_fname", DataTypes.StringType, true),
                DataTypes.createStructField("customer_lname", DataTypes.StringType, true),
                DataTypes.createStructField("customer_email", DataTypes.StringType, true),
                DataTypes.createStructField("customer_password", DataTypes.StringType, true),
                DataTypes.createStructField("customer_street", DataTypes.StringType, true),
                DataTypes.createStructField("customer_city", DataTypes.StringType, true),
                DataTypes.createStructField("customer_state", DataTypes.StringType, true),
                DataTypes.createStructField("customer_zipcode", DataTypes.StringType, true)
        });

        // 定义orders表结构
        StructType ordersSchema = new StructType(new StructField[]{
                DataTypes.createStructField("order_id", DataTypes.IntegerType, false),
                DataTypes.createStructField("order_date", DataTypes.DateType, true),
                DataTypes.createStructField("order_customer_id", DataTypes.IntegerType, false),
                DataTypes.createStructField("order_status", DataTypes.StringType, true)
        });

        // 定义order_items表结构
        StructType orderItemsSchema = new StructType(new StructField[]{
                DataTypes.createStructField("order_item_id", DataTypes.IntegerType, false),
                DataTypes.createStructField("order_item_order_id", DataTypes.IntegerType, false),
                DataTypes.createStructField("order_item_product_id", DataTypes.IntegerType, false),
                DataTypes.createStructField("order_item_quantity", DataTypes.IntegerType, false),
                DataTypes.createStructField("order_item_subtotal", DataTypes.FloatType, true),
                DataTypes.createStructField("order_item_product_price", DataTypes.FloatType, true)
        });

        // 定义products表结构
        StructType productsSchema = new StructType(new StructField[]{
                DataTypes.createStructField("product_id", DataTypes.IntegerType, false),
                DataTypes.createStructField("category_id", DataTypes.IntegerType, true),
                DataTypes.createStructField("product_name", DataTypes.StringType, true),
                DataTypes.createStructField("product_price", DataTypes.FloatType, true),
                DataTypes.createStructField("product_image_url", DataTypes.StringType, true)
        });

        // 加载数据
        Dataset<Row> customers = spark.read().option("header", "false").schema(customersSchema).csv("file:///usr/local/customers.csv");
        Dataset<Row> orders = spark.read().option("header", "false").schema(ordersSchema).csv("file:///usr/local/orders.csv");
        Dataset<Row> orderItems = spark.read().option("header", "false").schema(orderItemsSchema).csv("file:///usr/local/order_items.csv");
        Dataset<Row> products = spark.read().option("header", "false").schema(productsSchema).csv("file:///usr/local/products.csv");

        // 注册临时视图
        customers.createOrReplaceTempView("customers");
        orders.createOrReplaceTempView("orders");
        orderItems.createOrReplaceTempView("order_items");
        products.createOrReplaceTempView("products");

        // 查询消费额最高的客户
        Dataset<Row> topSpendingCustomer = spark.sql(
                "SELECT c.customer_fname, c.customer_lname, SUM(oi.order_item_subtotal) as total_spent " +
                        "FROM customers c JOIN orders o ON c.customer_id = o.order_customer_id " +
                        "JOIN order_items oi ON o.order_id = oi.order_item_order_id " +
                        "GROUP BY c.customer_id, c.customer_fname, c.customer_lname " +
                        "ORDER BY total_spent DESC LIMIT 1"
        );
        System.out.println("Top Spending Customer:");
        topSpendingCustomer.show();

        // 查询销量最高的产品
        Dataset<Row> bestSellingProduct = spark.sql(
                "SELECT oi.order_item_product_id, SUM(oi.order_item_quantity) as total_sold " +
                        "FROM order_items oi " +
                        "GROUP BY oi.order_item_product_id " +
                        "ORDER BY total_sold DESC LIMIT 1"
        );
        System.out.println("Best Selling Product ID:");
        bestSellingProduct.show();

        // 查询销量最高的产品名称
        Dataset<Row> bestSellingProductName = bestSellingProduct.join(products, bestSellingProduct.col("order_item_product_id").equalTo(products.col("product_id")))
                .select("product_name");
        System.out.println("Best Selling Product Name:");
        bestSellingProductName.show();

        // 查询购买力最强的地区（州）
        Dataset<Row> topPurchasingState = spark.sql(
                "SELECT c.customer_state, SUM(oi.order_item_subtotal) as total_spent " +
                        "FROM customers c JOIN orders o ON c.customer_id = o.order_customer_id " +
                        "JOIN order_items oi ON o.order_id = oi.order_item_order_id " +
                        "GROUP BY c.customer_state " +
                        "ORDER BY total_spent DESC LIMIT 1"
        );
        System.out.println("Top Purchasing State:");
        topPurchasingState.show();

        // 停止SparkSession
        spark.stop();
    }
}