package com.simoniu.sparkdemo.javademo.sql;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import scala.Tuple2;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

/**
 * 需求：使用编程方式实现RDD转换为DataFrame
 * Created by simoniu
 */
public class RddToDataFrameByProgramJavaDemo {

    public static void main(String[] args) {
        SparkConf conf = new SparkConf();
        conf.setMaster("local");
        //创建SparkSession对象，里面包含SparkContext和SqlContext
        SparkSession sparkSession = SparkSession.builder()
                .appName("RddToDataFrameByProgramJava")
                .config(conf)
                .getOrCreate();

        //获取SparkContext
        //从sparkSession中获取的是scala中的sparkContext，所以需要转换成java中的sparkContext
        JavaSparkContext sc = JavaSparkContext.fromSparkContext(sparkSession.sparkContext());
        Tuple2<String, Integer> t1 = new Tuple2<String, Integer>("jack", 28);
        Tuple2<String, Integer> t2 = new Tuple2<String, Integer>("tom", 20);
        Tuple2<String, Integer> t3 = new Tuple2<String, Integer>("jessic", 30);
        Tuple2<String, Integer> t4 = new Tuple2<String, Integer>("david", 18);

        JavaRDD<Tuple2<String, Integer>> dataRDD = sc.parallelize(Arrays.asList(t1, t2, t3,t4));

        //组装rowRDD
        JavaRDD<Row> rowRDD = dataRDD.map(new Function<Tuple2<String, Integer>, Row>() {
            @Override
            public Row call(Tuple2<String, Integer> tup) throws Exception {
                return RowFactory.create(tup._1, tup._2);
            }
        });
        //指定元数据信息
        ArrayList<StructField> structFieldList = new ArrayList<StructField>();
        structFieldList.add(DataTypes.createStructField("name", DataTypes.StringType, true));
        structFieldList.add(DataTypes.createStructField("age", DataTypes.IntegerType, true));
        StructType schema = DataTypes.createStructType(structFieldList);
        //构建DataFrame
        Dataset<Row> stuDf = sparkSession.createDataFrame(rowRDD, schema);

        stuDf.createOrReplaceTempView("student");
        //执行sql查询
        Dataset<Row> resDf = sparkSession.sql("select name,age from student where age > 20");

        //将DataFrame转化为RDD，注意：这里需要转为JavaRDD
        JavaRDD<Row> resRDD = resDf.javaRDD();

        List<Tuple2<String, Integer>> resList = resRDD.map(new Function<Row, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> call(Row row) throws Exception {
                return new Tuple2<String, Integer>(row.getString(0), row.getInt(1));
            }
        }).collect();

        for(Tuple2<String,Integer> tup : resList){
            System.out.println(tup);
        }
        sparkSession.stop();
    }
}
