package com.sinopec.pi.sparkguide

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

case class Detail(age: Int, email: String)

case class Person(name: String, detail: Detail)

object DataFrameDemo {

    def main(args: Array[String]): Unit = {
        val spark = SparkSession
            .builder()
            .appName("Spark SQL basic example")
            .master("local[2]")
            .getOrCreate()
        // This import is needed to use the $-notation
        import spark.implicits._


        // RDD
        val rdd: RDD[Person] = spark.sparkContext.parallelize(
            (1 to 200).map(idx => Person("Andy_" + idx, Detail(idx, "mail_" + idx + "@com.cn"))), 5)

        println(rdd.filter(p => p.detail.age > 10).take(20).mkString("Array(", ", ", ")"))

        // Dataset
        val ds: Dataset[Person] = rdd.toDS()
        ds.filter(p => p.detail.age > 10).show(20)
        // ds.filter($"detail.age" > 10).show(20)

        // DataFrame
        // 通过 case class 和反射机制，自动推断 DataFrame 的 schema
        val peopleDF: DataFrame = rdd.toDF()

        // 提供函数执行，支持嵌套字段
        peopleDF.filter($"detail.age" > 10).show(20)

        // 编译期间无法检查 列名不存在
        //        peopleDF.filter($"tt" > 10).show(20)

        // 通过 SQL 语句执行
        peopleDF.createOrReplaceTempView("people")
        spark.sql("SELECT name FROM people where detail.age > 10").show(20)

        // wait to see the web ui
        Thread.sleep(1000000)
    }
}
