package top.doe.spark_sql;

import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.serializer.KryoSerializer;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.zookeeper.KeeperException;

import java.io.IOException;
import java.util.Arrays;

public class _03_Task {

    public static void main(String[] args) throws IOException, InterruptedException, KeeperException {

        SparkConf conf = new SparkConf();

        SparkSession spark = SparkSession.builder()
                .master("local")
                .appName("datasource")
                .config(conf)
                .config("spark.sql.shuffle.partitions",2)
                //.config("spark.sql.adaptive.enabled", "true")
                //.enableHiveSupport()  // 开启hive支持
                .getOrCreate();

        //-------------HIVE表读写-----------------------------------------------------
        Dataset<Row> ds7 = spark.read().json("sql_data/datasource/order.data");
        ds7.createOrReplaceTempView("tmp");
        spark.sql("select * from tmp where uid>1").show()/*.explain("codegen")*/;

        Thread.sleep(Long.MAX_VALUE);

    }

    @Data
    @NoArgsConstructor
    @AllArgsConstructor
    public static class Bean {
        private String name;
        private int age;
    }

}
