package com.sub.spark.sql.instance;

import lombok.Data;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

import java.io.Serializable;

/**
 * @ClassName SparkSqlInstance
 * @Description: SparkSql实例 的创建和基本使用
 * SparkSql发展史：RDD（Spark1.0）=》Dataframe（Spark1.3）=》Dataset（Spark1.6）
 * @Author Submerge.
 * @Since 2025/5/23 23:58
 * @Version 1.0
 */
public class SparkSqlInstance {

    public static void main(String[] args) throws InterruptedException {

        //1、创建SparkSession
        SparkSession sparkSession = SparkSession.builder()
                .appName("sub-spark-sql")
                .master("local[*]")
                .getOrCreate();

        //2、读取json文件
        Dataset<Row> jsonDataset = sparkSession.read().json("data/demo/spark/user.json");
        jsonDataset.show();

        //3、注册临时表
        jsonDataset.createOrReplaceTempView("user");

        //4、使用SQL 查询数据
        Dataset<Row> sqlDataset = sparkSession.sql("select * from user");
        sqlDataset.show();

        // Row 转 Java Bean 对象
        Dataset<User> userDataset = jsonDataset.as(Encoders.bean(User.class));
        userDataset.show();

        // dataset 转 rdd
        //RDD<User> rdd = userDataset.rdd();
        //rdd.map(user -> user.getName() + ":" + user.getAge())


        //localhost:4040
        //Thread.sleep(100000);


        //关闭资源
        sparkSession.stop();
    }

}

@Data
class User implements Serializable {
    public Long age;
    public String name;
    public User() {
    }
    public User(Long age, String name) {
        this.age = age;
        this.name = name;
    }
}
