import bean.User;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.api.java.function.ReduceFunction;
import org.apache.spark.sql.*;
import scala.Tuple2;

/**
 * @Author:wsl
 * @Data:2023/05/04/14:08
 * @Description:
 */
public class Sql {
    public static void main(String[] args) {

        SparkConf conf = new SparkConf().setAppName("sparksql").setMaster("local[*]");
        SparkSession spark = SparkSession.builder().config(conf).getOrCreate();
        System.setProperty("HADOOP_USER_NAME", "atguigu");

        Dataset<Row> lineDS = spark.read().json("user.json");

        Dataset<User> userDS = lineDS.as(Encoders.bean(User.class));
        userDS.printSchema();
        userDS.show();

        //sql
        lineDS.createOrReplaceTempView("table");
        Dataset<Row> sql = spark.sql("select * from table where age>18");
        sql.show();

        spark.stop();
    }
}
