package com.navinfo.platform.examples.parquet;


import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.*;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

/**
 * 简单Bean模式
 */
public class MyParquetJavaSample {
    public static void main(String[] args) throws AnalysisException {
        // $example on:init_session$
        SparkSession spark = SparkSession
                .builder()
                .master("local[4]")
                .appName("MyParquetJavaSample")
                .getOrCreate();
        // $example off:init_session$

        runBasicDataFrameExample(spark);

        spark.stop();
    }

    private static void runBasicDataFrameExample(SparkSession spark) {
        // Create a JavaSparkContext using the SparkSession's SparkContext object
        JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());

        Person person = new Person();
        person.setName("Andy");
        person.setAge(32);
        List<Contact> contactList = new ArrayList<Contact>();
        Contact contact = new Contact();
        contact.setCell("1238012323");
        contact.setCity("shenyang");
        contactList.add(contact);
        //person.setContactList(contactList);

        Person person2 = new Person();
        person2.setName("Jerry");
        person2.setAge(20);

        List<Person> data = Arrays.asList(person,person2);
        JavaRDD<Person> distData = jsc.parallelize(data);

        // Apply a schema to an RDD of JavaBeans to get a DataFrame
        Dataset<Row> peopleDF = spark.createDataFrame(distData, Person.class);

        peopleDF.show();

        peopleDF.write().mode(SaveMode.Overwrite).partitionBy("name").parquet("/user/root/persons.parquet");

        Dataset<Row> peopleReadDF = spark.read().parquet("/user/root/persons.parquet");

        peopleReadDF.show();

        peopleReadDF.selectExpr("name","age+1").show();


    }
}
