/**
 * Alipay.com Inc.
 * Copyright (c) 2004-2020 All Rights Reserved.
 */
package com.dal.spark.sql;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

import java.io.Serializable;
import java.util.StringJoiner;

/**
 * @author dingaolin
 * @version : RDD2DataFrameReflection.java, v 0.1 2020年11月11日 上午10:04 dingaolin Exp $
 */
public class RDD2DataFrameReflection {
    public static void main(String[] args) {

        SparkConf conf = new SparkConf()
                .setMaster("local")
                .setAppName("RDD2DataFrameReflection");

        // 创建普通的RDD
        JavaSparkContext sc = new JavaSparkContext(conf);
        JavaRDD<String> lines = sc.textFile("src/main/resources/people.json");
        JavaRDD<Person> personJavaRDD = lines.map(new Function<String, Person>() {
            @Override
            public Person call(String line) throws Exception {
                String[] lineSplited = line.split(",");
                Person stu = new Person();
                stu.setName(lineSplited[0]);
                stu.setAge(Integer.parseInt(lineSplited[1].trim()));
                return stu;
            }
        });

        SparkSession spark = SparkSession.builder().config(conf).getOrCreate();
        // 使用反射方式，将RDD转换为DataFrame
        // 将Student.class传入进去，其实就是用反射的方式来创建DataFrame
        // 因为Student.class本身就是反射的一个应用
        // 然后底层还得通过对Student Class进行反射，来获取其中的field
        // 这里要求，JavaBean必须实现Serializable接口，是可序列化的
        Dataset<Row> studentDF = spark.createDataFrame(personJavaRDD, Person.class);

        // 拿到了一个DataFrame之后，就可以将其注册为一个临时表，然后针对其中的数据执行SQL语句
        studentDF.registerTempTable("person");
        studentDF.printSchema();

        // 针对students临时表执行SQL语句，查询年龄小于等于18岁的学生，就是teenageer
        Dataset<Row> teenagerDF = spark.sql("select * from person where name = 'Michael'");
        // 将查询出来的DataFrame，再次转换为RDD
        JavaRDD<Row> teenagerRDD = teenagerDF.javaRDD();

        // 将RDD中的数据，进行映射，映射为Student
        JavaRDD<Person> teenagerStudentRDD = teenagerRDD.map(new Function<Row, Person>() {

            private static final long serialVersionUID = 1L;

            @Override
            public Person call(Row row) throws Exception {

                // row中的数据的顺序，可能是跟我们期望的是不一样的！
                Person stu = new Person();
                stu.setAge(row.getInt(0));
                stu.setName(row.getString(1));
                return stu;
            }
        });

        // 将数据collect回来，打印出来
        teenagerStudentRDD.foreach(new VoidFunction<Person>() {
            @Override
            public void call(Person person) throws Exception {
                System.out.println(person);
            }
        });

        spark.stop();

    }


    public static class Person implements Serializable {

        private String name;
        private int age;

        public String getName() {
            return name;
        }

        public void setName(String name) {
            this.name = name;
        }

        public int getAge() {
            return age;
        }

        public void setAge(int age) {
            this.age = age;
        }

        @Override
        public String toString() {
            return new StringJoiner(", ", Person.class.getSimpleName() + "[", "]")
                    .add("name='" + name + "'")
                    .add("age=" + age)
                    .toString();
        }
    }
}