package com.zzl.spark.sql;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;


public class RDD2DataFrameByReflection {
    public static void main(String[] args) {
        System.setProperty("hadoop.home.dir", "E:\\hadoop");
        SparkConf conf = new SparkConf().setMaster("local").setAppName("RDD2DataFrameByReflection");
        JavaSparkContext sc = new JavaSparkContext(conf);
        SQLContext sqlContext = new SQLContext(sc);
        JavaRDD<String> lines = sc.textFile("peoples.txt");
        JavaRDD<Person> map = lines.map((l) -> {
            String[] split = l.split(",");
            Person p = new Person();
            p.setId(Integer.valueOf(split[0]));
            p.setName(split[1]);
            p.setAge(Integer.valueOf(split[2]));
            return p;
        });

        Dataset<Row> df = sqlContext.createDataFrame(map, Person.class);
//        df.registerTempTable("personTable");
        df.createOrReplaceTempView("personTable");
        Dataset<Row> resultDataFrame = sqlContext.sql("select * from personTable where age>7");

        JavaRDD<Row> rowJavaRDD = resultDataFrame.javaRDD();
        JavaRDD<Person> map1 = rowJavaRDD.map((r) -> {
            int id = r.getAs("id");
            String name = r.getAs("name");
            int age = r.getAs("age");
            return new Person(id, name, age);
        });
        map1.foreach(p->{
            System.out.println(p.getName());
        });
        resultDataFrame.show();
    }
}
