package demo;

import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import scala.collection.JavaConverters;
import scala.collection.Seq;

import java.util.Arrays;

public class DataFrameRDDApp {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("DataFrameRDDApp").master("local[2]").getOrCreate();
        // SparkSession spark = SparkSession.builder().getOrCreate();

        JavaRDD<Person> peopleRDD = spark.read()
                .textFile("F:\\lzc\\SparkSQL\\sparklearning\\src\\main\\java\\demo\\person.txt")
                .javaRDD()
                .map(line -> {
                    String[] parts = line.split(",");
                    Person person = new Person(Integer.parseInt(parts[0]), parts[1], Integer.parseInt(parts[2]));
                    return person;
                });
        // Apply a schema to an RDD of JavaBeans to get a DataFrame
        Dataset<Row> peopleDF = spark.createDataFrame(peopleRDD, Person.class);

        peopleDF.printSchema();
        peopleDF.show();
        peopleDF.show();

        peopleDF.createOrReplaceTempView("people");
//        spark.sql("select * from people where name = 'lisi'").show();

        Dataset<Row> peopleDF2 = spark.createDataFrame(peopleRDD, Person.class);
        peopleDF2.createOrReplaceTempView("people2");

        spark.sql("select * from people a join people b on a.id = b.id").show();

        spark.stop();
    }
}
