// TpchQueryOptimized.java
package com.abcd1234;

import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.Encoders;
import static org.apache.spark.sql.functions.*;

public class TpchQueryOptimized {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder()
            .appName("TPCH-JNI-Optimized")
            .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
            .getOrCreate();

        // 注册 UDAF
        spark.udf().register("rvv_agg", udaf(new RVVAggregatorUDAF(), Encoders.kryo(Row.class)));
        
        // 初始化数据库
        spark.sql("CREATE DATABASE IF NOT EXISTS TPCH;");
        spark.sql("USE TPCH;");
        spark.sql("""
CREATE TABLE NATION  (
    N_NATIONKEY  int,
    N_NAME       string,
    REGIONKEY  int,
    N_COMMENT    string)
    USING csv OPTIONS (PATH 'file:/home/wangyijia/tpc-h/csvs/nation.csv');
        """);

        spark.sql("""
CREATE TABLE REGION  (
    R_REGIONKEY  int,
    R_NAME       string,
    R_COMMENT   string)
    USING csv OPTIONS (PATH 'file:/home/wangyijia/tpc-h/csvs/region.csv');
        """);

        spark.sql("""
CREATE TABLE PART  (
    P_PARTKEY     int,
    P_NAME        string,
    P_MFGR        string,
    P_BRAND       string,
    P_TYPE        string,
    P_SIZE        int,
    P_CONTAINER  string,
    P_RETAILPRICE decimal(15,2),
    P_COMMENT     string )
    USING csv OPTIONS (PATH 'file:/home/wangyijia/tpc-h/csvs/part.csv');
        """);

        spark.sql("""
CREATE TABLE SUPPLIER (
    S_SUPPKEY     int,
    S_NAME        string,
    S_ADDRESS     string,
    S_NATIONKEY   int,
    S_PHONE       string,
    S_ACCTBAL     decimal(15,2),
    S_COMMENT     string)
    USING csv OPTIONS (PATH 'file:/home/wangyijia/tpc-h/csvs/supplier.csv');
        """);

        spark.sql("""
CREATE TABLE PARTSUPP (
    PS_PARTKEY     int,
    PS_SUPPKEY     int,
    PS_AVAILQTY    int,
    PS_SUPPLYCOST  decimal(15,2),
    PS_COMMENT     string)
    USING csv OPTIONS (PATH 'file:/home/wangyijia/tpc-h/csvs/partsupp.csv');
        """);

        spark.sql("""
CREATE TABLE CUSTOMER (
    C_CUSTKEY     int,
    C_NAME        string,
    C_ADDRESS     string,
    C_NATIONKEY   int,
    C_PHONE       string,
    C_ACCTBAL     decimal(15,2),
    C_MKTSEGMENT string,
    C_COMMENT     string)
    USING csv OPTIONS (PATH 'file:/home/wangyijia/tpc-h/csvs/customer.csv');
        """);

        spark.sql("""
CREATE TABLE ORDERS  (
    O_ORDERKEY       int,
    O_CUSTKEY        int,
    O_ORDERSTATUS    string,
    O_TOTALPRICE     decimal(15,2),
    O_ORDERDATE      date,
    O_ORDERPRIORITY  string,
    O_CLERK          string,
    O_SHIPPRIORITY   int,
    O_COMMENT        string)
    USING csv OPTIONS (PATH 'file:/home/wangyijia/tpc-h/csvs/orders.csv');
        """);

        spark.sql("""
CREATE TABLE LINEITEM (
    L_ORDERKEY    int,
    L_PARTKEY     int,
    L_SUPPKEY     int,
    L_LINENUMBER  int,
    L_QUANTITY    decimal(15,2),
    L_EXTENDEDPRICE  decimal(15,2),
    L_DISCOUNT    decimal(15,2),
    L_TAX         decimal(15,2),
    L_RETURNFLAG  string,
    L_LINESTATUS  string,
    L_SHIPDATE    date,
    L_COMMITDATE  date,
    L_RECEIPTDATE date,
    L_SHIPINSTRUCT string,
    L_SHIPMODE    string,
    L_COMMENT      string)
    USING csv OPTIONS (PATH 'file:/home/wangyijia/tpc-h/csvs/lineitem.csv');
        """);

        // 执行优化后的 Q1 查询
        Dataset<Row> result = spark.sql("""
SELECT
    l_returnflag,
    l_linestatus,
    SUM(l_quantity) AS sum_qty,
    SUM(l_extendedprice) AS sum_base_price,
    SUM(l_extendedprice * (1 - l_discount)) AS sum_disc_price,
    SUM(l_extendedprice * (1 - l_discount) * (1 + l_tax)) AS sum_charge,
    AVG(l_quantity) AS avg_qty,
    AVG(l_extendedprice) AS avg_price,
    AVG(l_discount) AS avg_disc,
    COUNT(*) AS count_order
FROM
    tpch.lineitem
WHERE
    l_shipdate <= DATE '1998-09-02'
GROUP BY
    l_returnflag,
    l_linestatus
ORDER BY
    l_returnflag,
    l_linestatus;
        """);

        long startTime = System.currentTimeMillis();

        // 展开聚合结果
        result.select(
            col("l_returnflag"),
            col("l_linestatus"),
            col("sum_qty"),
            col("sum_base_price"),
            col("sum_disc_price"),
            col("sum_charge"),
            col("avg_qty"),
            col("avg_price"),
            col("avg_disc"),
            col("count_order")
        ).show();

        long endTime = System.currentTimeMillis();
        System.out.println("Query Execution Time: " + (endTime - startTime) + " ms");

        spark.stop();
    }
}