package cn.hfuu.spark;

import org.apache.spark.sql.AnalysisException;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

/**
 * @author oldliu
 * @since 1.0
 */
public class SalesStat {
    public static void main(String[] args) throws AnalysisException {
        // 创建SparkSession
        SparkSession spark = SparkSession.builder()
                .appName("SparkApp")
                .master("local")
                .getOrCreate();
        // 假设有一个结构类型
        StructType schemaCustomer = new StructType(new StructField[]{
                // 定义字段
                DataTypes.createStructField("customer_id", DataTypes.LongType, true),
                DataTypes.createStructField("customer_fname", DataTypes.StringType, true),
                DataTypes.createStructField("customer_lname", DataTypes.StringType, true),
                DataTypes.createStructField("customer_email", DataTypes.StringType, true),
                DataTypes.createStructField("customer_password", DataTypes.StringType, true),
                DataTypes.createStructField("customer_street", DataTypes.StringType, true),
                DataTypes.createStructField("customer_city", DataTypes.StringType, true),
                DataTypes.createStructField("customer_state", DataTypes.StringType, true),
                DataTypes.createStructField("customer_zipcode", DataTypes.StringType, true)
        });
        StructType schemaProduct = new StructType(new StructField[]{
                DataTypes.createStructField("product_id", DataTypes.LongType, true),
                DataTypes.createStructField("product_category_id", DataTypes.LongType, true),
                DataTypes.createStructField("product_name", DataTypes.StringType, true),
                DataTypes.createStructField("product_description", DataTypes.StringType, true),
                DataTypes.createStructField("product_price", DataTypes.FloatType, true),
                DataTypes.createStructField("product_image", DataTypes.StringType, true)
        });
        StructType schemaOrder = new StructType(new StructField[]{
                DataTypes.createStructField("order_id", DataTypes.LongType, true),
                DataTypes.createStructField("order_date", DataTypes.DateType, true),
                DataTypes.createStructField("order_customer_id", DataTypes.LongType, true),
                DataTypes.createStructField("order_status", DataTypes.StringType, true)
        });
        StructType schemaOrderItem = new StructType(new StructField[]{
                DataTypes.createStructField("order_item_id", DataTypes.LongType, true),
                DataTypes.createStructField("order_item_order_id", DataTypes.LongType, true),
                DataTypes.createStructField("order_item_product_id", DataTypes.LongType, true),
                DataTypes.createStructField("order_item_quantity", DataTypes.LongType, true),
                DataTypes.createStructField("order_item_subtotal", DataTypes.FloatType, true),
                DataTypes.createStructField("order_item_product_price", DataTypes.FloatType, true)

        });
        // 创建JavaSparkContext
        //JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());
        Dataset<Row> csvCustomer = spark.read()
                .option("header", "false") // 假设CSV没有标题行
                .schema(schemaCustomer)
                .csv("file:///usr/local/retail_db-csv/customers.csv");// file:// 本地文件
        csvCustomer.createTempView("customers");
        Dataset<Row> csvProduct = spark.read()
                .option("header", "false") // 假设CSV没有标题行
                .schema(schemaProduct)
                .csv("file:///usr/local/retail_db-csv/products.csv");// file:// 本地文件
        csvProduct.createTempView("products");

        Dataset<Row> csvOrder = spark.read()
                .option("header", "false") // 假设CSV没有标题行
                .schema(schemaOrder)
                .csv("file:///usr/local/retail_db-csv/orders.csv");// file:// 本地文件
        csvOrder.createTempView("orders");

        Dataset<Row> csvItems = spark.read()
                .option("header", "false") // 假设CSV没有标题行
                .schema(schemaOrderItem)
                .csv("file:///usr/local/retail_db-csv/order_items.csv");// file:// 本地文件
        csvItems.createTempView("order_items");

        Dataset<Row> sql = csvCustomer.sqlContext().sql(" " +
                "with t as ( " +
                " select sum(order_item_subtotal) as m,order_item_order_id " +
                "  from order_items group by order_item_order_id " +
                ") " +
                " " +
                " ,x as ( " +
                "  select sum(t.m)as m2,order_customer_id from orders o inner join t  " +
                "   on(t.order_item_order_id=o.order_id) " +
                "   group by order_customer_id " +
                ") " +
                "  " +
                "  select * from customers where customer_id " +
                "  in( " +
                "     select order_customer_id from x where m2 " +
                "     =(select max(m2) from x) " +
                "  )  " +
                " ");
        System.out.println("=>>>>>>>>>>>>>>>>>>>-");
        sql.foreach(x -> {
            System.out.println(x.getLong(0) + "--" + x.getString(1) + " " + x.getString(2) + "---" + x.getString(3) + "---" + x.getString(4) + "---" + x.getString(6) + "---" + x.getString(7));
        });
        System.out.println("=>>>>>>>>>>>>>>>>>>>-");
        spark.stop();
    }
}
