package KnowSparkSQL.DSL.DatasetMethods.CreateDataset;

import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

import java.util.Arrays;
import java.util.List;

public class CreateDataset3 {
    public static void main(String[] args) {
        /**
         * 3.JavaRDD转为DataSet<Row>
         * 调用sparkSession.createDataFrame(JavaRDD<?> rdd, Class<?> beanClass)，
         * 将JavaRDD转成DataFrame对象
         */

        SparkSession ss = SparkSession.builder().appName("CreateDataset3").master("local").getOrCreate();

        List<Student> data = Arrays.asList(new Student("张三"), new Student("李四"));
        JavaSparkContext javaSparkContext = new JavaSparkContext(ss.sparkContext());
        JavaRDD<Student> studentJavaRDD= javaSparkContext.parallelize(data);

        Dataset<Row> dataFrame = ss.createDataFrame(studentJavaRDD, Student.class);


        dataFrame.printSchema();




    }
    public static class Student {
        private String name;

        public Student(String name) {
            this.name = name;
        }

        public String getName() {
            return name;
        }

        public void setName(String name) {
            this.name = name;
        }
    }
}
