package spark.sql

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

/**
 * @Author Jeremy Zheng
 * @Date 2021/3/17 15:54
 * @Version 1.0
 */
object SparkSQL01_basic_Demo1 {
  def main(args: Array[String]): Unit = {

    //创建SparkSQL的运行环境
    val conf = new SparkConf().setMaster("local[*]").setAppName("sparkSQL")
    val spark = SparkSession.builder().config(conf).getOrCreate()
    import spark.implicits._

    //TODO DataFrame
    val df: DataFrame = spark.read.json("dataSet/test.json")
    //df.show()

    //SQL风格语法
    df.createOrReplaceTempView("user")
    spark.sql("select avg(age) from user").show()
    //DSL风格语法
    //df.select("age").show()

    //TODO DataSet
    val seq =Seq(1,2,3,4)
    val ds1: Dataset[Int] = seq.toDS()
    ds1.show()

    val ds2: Dataset[User] = List(User(1, "zhangsan", 20), User(2, "lisi", 30), User(3, "wangwu", 40)).toDS()
    ds2.show()

    //RDD<=>DataFrame
    val rdd1: RDD[(Int, String, Int)] = spark.sparkContext.makeRDD(List((1, "zhangsan", 30), (2, "lisi", 40)))
    val df1: DataFrame = rdd1.toDF("id", "name", "age")
    val rdd2: RDD[Row] = df1.rdd

    //DataFrame<=>DataSet
    val ds3: Dataset[User] = df.as[User]
    val df3: DataFrame = ds3.toDF()

    //RDD<=>DataSet
    val ds4: Dataset[User] = rdd1.map {
      case (id, name, age) => {
        User(id, name, age)
      }
    }.toDS()

    val rdd4: RDD[User] = ds4.rdd

    //关闭环境
    spark.close()

  }
  case class User(id:Int,name:String,age:Int)
}



