package com.navinfo.platform.examples.parquet;


import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.*;
import org.apache.spark.sql.types.ArrayType;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

/**
 * 当bean中嵌套bean的时候，如果想DF转换，采用schema自定义模式
 */
public class MyParquetJavaSchemaSample {
    public static void main(String[] args) throws AnalysisException {
        // $example on:init_session$
        SparkSession spark = SparkSession
                .builder()
                .master("local[4]")
                .appName("MyParquetJavaSchemaSample")
                .getOrCreate();
        // $example off:init_session$

        runBasicDataFrameExample(spark);

        spark.stop();
    }

    private static void runBasicDataFrameExample(SparkSession spark) {
        // Create a JavaSparkContext using the SparkSession's SparkContext object
        JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());

        Person person = new Person();
        person.setName("Andy");
        person.setAge(321);
        List<Contact> contactLista = new ArrayList<Contact>();
        Contact contact = new Contact();
        contact.setCell("123801232312");
        contact.setCity("shenyang");
        contactLista.add(contact);
        person.setContactList(contactLista);

        Person person2 = new Person();
        person2.setName("JerryV");
        person2.setAge(202);

        List<Person> data = Arrays.asList(person,person2);
        JavaRDD<Person> distData = jsc.parallelize(data);

        // 创建 name和age的StructField
        StructField nameField = DataTypes.createStructField("name", DataTypes.StringType, true);
        StructField ageField = DataTypes.createStructField("age", DataTypes.IntegerType, true);

        // 创建 location的结构
        StructField cityField = DataTypes.createStructField("city", DataTypes.StringType, true);
        StructField cellField = DataTypes.createStructField("cell", DataTypes.StringType, true);
        StructType contactStruct = DataTypes.createStructType(new StructField[] { cityField, cellField });
        StructField contactField = DataTypes.createStructField("contactList",DataTypes.createArrayType(contactStruct,true),true);

        // 创建StructType
        List<StructField> fields = new ArrayList<StructField>();
        fields.add(nameField);
        fields.add(ageField);
        fields.add(contactField);
        StructType schema = DataTypes.createStructType(fields);

        // 转换JavaRDD为Rows
        JavaRDD<Row> rowRDD = distData.map(record -> {
            List<Row> test = new ArrayList<Row>();
            if (record.getContactList()!=null) {
                for(Contact obj :record.getContactList()) {
                    test.add(RowFactory.create(obj.getCity(), obj.getCell()));
                }
            }
            return RowFactory.create(record.getName(), record.getAge(),test.toArray());
        });

        // Apply a schema to an RDD of JavaBeans to get a DataFrame
        Dataset<Row> peopleDF = spark.createDataFrame(rowRDD,schema);

        peopleDF.show();

        peopleDF.write().mode(SaveMode.Append).partitionBy("name").parquet("/user/root/persons.parquet");

        Dataset<Row> peopleReadDF = spark.read().parquet("/user/root/persons.parquet");

        peopleReadDF.show();

        peopleReadDF.selectExpr("name","age+1").show();


    }
}
