package com.atguigu.bigdata.spark.sql

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

/**
  * create by undeRdoG on  2021-06-17  23:25
  * 凡心所向，素履以往，生如逆旅，一苇以航。
  */
object Spark01_SparkSQL_Basic {

  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkSQL")

    val spark = SparkSession.builder().config(sparkConf).getOrCreate()

    // dataFrame

    /**
    *   DataFrame[Row]  其实是特定类型的  DateSet
    * */
    val df: DataFrame = spark.read.json("datas/user.json")

    df.show()

    // dataFrame => sql

    df.createOrReplaceTempView("user")

    spark.sql("select username from user").show()

    // dataFrame => dsl

    /**
      * 在使用 dataframe 时，如果涉及到转换操作，需要引入转换规则
      *
      * 隐式转换
      **/
    df.select("age").show()

    import spark.implicits._
    df.select($"age" + 1).show()
    df.select('age + 1).show()


    println("============================================================================================")

    /**
    *   dataset
    * */

    val seq = Seq(1,2,3,4)

    val ds: Dataset[Int] = seq.toDS()

    ds.show()



    /**
    *   RDD  <=>  dateFrame
    * */

    val rdd: RDD[(Int, String, Int)] = spark.sparkContext.makeRDD(List((1,"张三",30),(2,"李四",40)))

    val frame: DataFrame = rdd.toDF("id","name","age")
    frame.show()

    val rdd1: RDD[Row] = df.rdd



    /**
    *    df <=> ds
    * */
   val dataset: Dataset[User] = frame.as[User]

    dataset.show()
    dataset.select("name").show()
    dataset.createOrReplaceTempView("User")
    spark.sql("select avg(age) from User").show()

    val dd: DataFrame = dataset.toDF()
  }


  case class User(id: Int,name:String,age:Int)
}
