package cn.mavor.day1112;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

public class CustomerCount1112 {
    public static void main(String[] args) throws Exception {
        // 创建SparkSession
        SparkSession spark = SparkSession.builder()
                .appName("SalesAnalysisApp")
                .master("local")
                .getOrCreate();

        // 定义customers表的模式
        StructType customersSchema = new StructType(new StructField[]{
                DataTypes.createStructField("customer_id", DataTypes.LongType, true),
                DataTypes.createStructField("customer_fname", DataTypes.StringType, true),
                DataTypes.createStructField("customer_lname", DataTypes.StringType, true),
                DataTypes.createStructField("customer_email", DataTypes.StringType, true),
                DataTypes.createStructField("customer_password", DataTypes.StringType, true),
                DataTypes.createStructField("customer_street", DataTypes.StringType, true),
                DataTypes.createStructField("customer_city", DataTypes.StringType, true),
                DataTypes.createStructField("customer_state", DataTypes.StringType, true),
                DataTypes.createStructField("customer_zipcode", DataTypes.StringType, true)
        });

        // 定义orders表的模式
        StructType ordersSchema = new StructType(new StructField[]{
                DataTypes.createStructField("order_id", DataTypes.IntegerType, true),
                DataTypes.createStructField("order_date", DataTypes.DateType, true),
                DataTypes.createStructField("order_customer_id", DataTypes.IntegerType, true),
                DataTypes.createStructField("order_status", DataTypes.StringType, true)
        });

        // 定义order_items表的模式
        StructType orderItemsSchema = new StructType(new StructField[]{
                DataTypes.createStructField("order_item_id", DataTypes.IntegerType, true),
                DataTypes.createStructField("order_item_order_id", DataTypes.IntegerType, true),
                DataTypes.createStructField("order_item_product_id", DataTypes.IntegerType, true),
                DataTypes.createStructField("order_item_quantity", DataTypes.IntegerType, true),
                DataTypes.createStructField("order_item_subtotal", DataTypes.FloatType, true),
                DataTypes.createStructField("order_item_product_price", DataTypes.FloatType, true)
        });

        // 读取customers数据
        Dataset<Row> customers = spark.read()
                .option("header", "true")
                .schema(customersSchema)
                .csv("file:///usr/local/hive-3.1.3/table/customers.csv");

        // 读取orders数据
        Dataset<Row> orders = spark.read()
                .option("header", "true")
                .schema(ordersSchema)
                .csv("file:///usr/local/tables/orders.csv");

        // 读取order_items数据
        Dataset<Row> orderItems = spark.read()
                .option("header", "true")
                .schema(orderItemsSchema)
                .csv("file:///usr/local/tables/order_items.csv");

        // 注册为临时视图
        customers.createOrReplaceTempView("customers");
        orders.createOrReplaceTempView("orders");
        orderItems.createOrReplaceTempView("order_items");

        // 消费额最高的顾客
        Dataset<Row> topSpender = spark.sql(
                "SELECT c.customer_id, c.customer_fname, c.customer_lname, SUM(oi.order_item_subtotal) AS total_spent " +
                        "FROM orders o JOIN order_items oi ON o.order_id = oi.order_item_order_id " +
                        "JOIN customers c ON o.order_customer_id = c.customer_id " +
                        "GROUP BY c.customer_id, c.customer_fname, c.customer_lname " +
                        "ORDER BY total_spent DESC LIMIT 1"
        );
        topSpender.show();

        // 销量最高的产品
        Dataset<Row> bestSellingProduct = spark.sql(
                "SELECT order_item_product_id, SUM(order_item_quantity) AS total_sold " +
                        "FROM order_items " +
                        "GROUP BY order_item_product_id " +
                        "ORDER BY total_sold DESC LIMIT 1"
        );
        bestSellingProduct.show();

        // 购买力最强的地区
        Dataset<Row> strongestRegion = spark.sql(
                "SELECT c.customer_state, SUM(oi.order_item_subtotal) AS total_spent " +
                        "FROM orders o JOIN order_items oi ON o.order_id = oi.order_item_order_id " +
                        "JOIN customers c ON o.order_customer_id = c.customer_id " +
                        "GROUP BY c.customer_state " +
                        "ORDER BY total_spent DESC LIMIT 1"
        );
        strongestRegion.show();

        spark.stop();
    }
}
