package day08

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

/**
 * 2.6	IDEA创建SparkSQL程序
 */
object Spark_SQL_3 {
  def main(args: Array[String]): Unit = {

    // 1、创建上下文环境配置对象
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkSQL_demo")

    // 2、创建SparkSession。命名与运行spark-shell得到的SparkSession一致

    //    Spark context Web UI available at http://192.168.152.102:4040
    //    Spark context available as 'sc' (master = local[*], app id = local-1621008291003).
    //    Spark session available as 'spark'.
    //    Welcome to
    //      ____              __
    //      / __/__  ___ _____/ /__
    //    _\ \/ _ \/ _ `/ __/  '_/
    //      /___/ .__/\_,_/_/ /_/\_\   version 2.1.1
    //    /_/
    //
    //    Using Scala version 2.11.8 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_144)
    //    Type in expressions to have them evaluated.
    //    Type :help for more information.

    val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()


    // 3、读取json文件，创建DataFrame
    val df: DataFrame = spark.read.json("input/user.json")
    df.show()
    println("--------------------------------------------")

    // SQL风格语法
    df.createOrReplaceTempView("user")
    spark.sql("select avg(age) from user").show()
    println("------------------------------------------")

    // DSL风格语法
    df.select("username", "age").show()
    println("------------------------------------------")

    // RDD、DataFrame、DateSet之间相互转换
    val sc: SparkContext = spark.sparkContext
    val rdd: RDD[(Int, String, Int)] = sc.makeRDD(List((1, "qiaofeng", 30), (2, "xuzhu", 28), (3, "duanyu", 20)))

    // rdd转DataFrame
    import spark.implicits._
    val rddToDataFrame: DataFrame = rdd.toDF("id", "username", "age")
    rddToDataFrame.show()
    println("-----------------------------------------")

    // rdd转DataSet
    val rddToDataSet: Dataset[User] = rdd.map(tuple => User(tuple._1, tuple._2, tuple._3)).toDS()
    rddToDataSet.show()
    println("-----------------------------------------")

    // DataFrame转rdd
    val dfToRdd: RDD[Row] = rddToDataFrame.rdd
    // RDD  返回的RDD类型为Row，里面提供的getXXX方法可以获取字段值，
    // 类似jdbc处理结果集，但是索引从0开始
    dfToRdd.foreach(row => println(row.getInt(0) + "->" + row.getString(1) + "->" + row.getInt(2)))
    println("-----------------------------------------")

    // DataFrame转DataSet
    val dfToDs: Dataset[User] = rddToDataFrame.as[User]
    dfToDs.show()
    println("-----------------------------------------")

    // DataSet转rdd
    val dsToRdd: RDD[User] = dfToDs.rdd
    dsToRdd.foreach(println)
    println("-----------------------------------------")

    // DataSet转DataFrame
    val dsToDF: DataFrame = dfToDs.toDF()
    dsToDF.show()

    // 关闭SparkSession
    spark.stop()

  }
}

case class User(id: Int, username: String, age: Int)















