package KnowSparkSQL.DSL.DatasetMethods;

import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

public class DatasetConversionRDD {
    public static void main(String[] args) {
        SparkSession sparkSession = SparkSession.builder().appName("DatasetConversionRDD").master("local").getOrCreate();

        Dataset<Row> json = sparkSession.read().json("./data/student.json");

        // 将Dataset<Row>转成RDD
        JavaRDD<Row> rdd = json.toJavaRDD();

        // 从rdd中取回一行数据
        Row row = rdd.take(1).get(0);

        StructType schema = row.schema();

        StructField[] fields = schema.fields();

        for(StructField field : fields){

            String name = field.name();

            DataType dataType = field.dataType();

            String typeName = dataType.typeName();

            System.out.println(name + "字段的字段类型是" + typeName);

        }

        int name = row.fieldIndex("name");

        Object o = row.get(name);

        System.out.println("第一行的name字段的值是" + o);


    }
}
