package com.hngy.java.sql;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

import scala.Tuple2;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

/**
 * 需求：使用编程方式实现RDD转换为DataFrame
 */
public class RddToDataFrameByProgramJava {

    public static void main(String[] args){
        SparkConf conf = new SparkConf();
        conf.setMaster("local");

        //创建SparkSession对象，里面包含SparkContext和SqlContext
        SparkSession sparkSession = SparkSession.builder()
                .appName("RddToDataFrameByProgramJava")
                .config(conf)
                .getOrCreate();
        JavaSparkContext sc = JavaSparkContext.fromSparkContext(sparkSession.sparkContext());
        Tuple2<String,Integer> t1 = new Tuple2<>("jack",18);    
        Tuple2<String,Integer> t2 = new Tuple2<>("tom",20);    
        Tuple2<String,Integer> t3 = new Tuple2<>("jessic",30);
        JavaRDD<Tuple2<String, Integer>> dataRdd = sc.parallelize(Arrays.asList(t1, t2, t3));
        //组装rowRDD
        JavaRDD<Row> rowRdd = dataRdd.map(new Function<Tuple2<String, Integer>, Row>() {
            @Override
            public Row call(Tuple2<String, Integer> tup) {
                return RowFactory.create(tup._1, tup._2);
            }
        });
        //指定元数据信息【这个元数据信息就可以动态从外部获取了，比较灵活】
        ArrayList<StructField> structFieldArrayList = new ArrayList<>();
        structFieldArrayList.add(DataTypes.createStructField("name",DataTypes.StringType,true));
        structFieldArrayList.add(DataTypes.createStructField("age",DataTypes.IntegerType,true));
        StructType schema = DataTypes.createStructType(structFieldArrayList);

        //构建DataFrame
        Dataset<Row> stuDf = sparkSession.createDataFrame(rowRdd, schema);

        //下面就可以通过DataFrame的方式操作dataRDD中的数据了
        stuDf.createOrReplaceTempView("student");

        //执行sql查询
        Dataset<Row> resDf = sparkSession.sql("select name,age from student where age > 18");

        //DataFrame 转 RDD
        JavaRDD<Row> resRdd = resDf.javaRDD();
        List<Tuple2<String, Integer>> resList = resRdd.map(new Function<Row, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> call(Row row) {
                return new Tuple2<>(row.getAs("name").toString(), Integer.parseInt(row.getAs("age").toString()));
            }
        }).collect();

        for (Tuple2<String,Integer> tup : resList){
            System.out.println(tup);
        }

        sparkSession.stop();
    }
}
