package cn.mavor.day1108;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;


public class CustomerCount1108 {
    public static void main(String[] args) throws Exception {
        // 创建SparkSession
        SparkSession spark = SparkSession.builder()
                .appName("CustomerAnalysisApp")
                .master("local")
                .getOrCreate();

        // 定义customers表的模式
        StructType customersSchema = new StructType(new StructField[]{
                DataTypes.createStructField("customer_id", DataTypes.LongType, true),
                DataTypes.createStructField("customer_fname", DataTypes.StringType, true),
                DataTypes.createStructField("customer_lname", DataTypes.StringType, true),
                DataTypes.createStructField("customer_email", DataTypes.StringType, true),
                DataTypes.createStructField("customer_password", DataTypes.StringType, true),
                DataTypes.createStructField("customer_street", DataTypes.StringType, true),
                DataTypes.createStructField("customer_city", DataTypes.StringType, true),
                DataTypes.createStructField("customer_state", DataTypes.StringType, true),
                DataTypes.createStructField("customer_zipcode", DataTypes.StringType, true)
        });

        // 读取customers数据
        Dataset<Row> customers = spark.read()
                .option("header", "true")
                .schema(customersSchema)
                .csv("file:///usr/local/hive-3.1.3/table/customers.csv");

        // 注册为临时视图
        customers.createOrReplaceTempView("customers");

        // 1. 查找来自customer_state中来自“TX”的客户
        System.out.println("1. 查找来自 TX 州的客户:");
        Dataset<Row> txCustomers = spark.sql(
                "SELECT * FROM customers WHERE customer_state = 'TX'"
        );
        txCustomers.show();

        // 2. 查找 customers.csv 中 lname 中包含 Smith 的客户信息，以及客户数量
        System.out.println("2. 查找姓氏包含 Smith 的客户信息:");
        Dataset<Row> smithCustomers = spark.sql(
                "SELECT * FROM customers WHERE customer_lname LIKE '%Smith%'"
        );
        smithCustomers.show();

        // 计算姓氏包含 Smith 的客户数量
        long smithCount = smithCustomers.count();
        System.out.println("姓氏包含 Smith 的客户数量: " + smithCount);

        spark.stop();
    }
}
