package com.zhang.sparksql_1

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

/**
 * @title:
 * @author: zhang
 * @date: 2021/12/10 19:48 
 */
object SparkSQl_Basic_01 {
  def main(args: Array[String]): Unit = {
    //  获取连接对象
    val conf = new SparkConf().setMaster("local[*]").setAppName("spark-sql")
    val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()
    import spark.implicits._
    //todo 创建DataFrame
    val df: DataFrame = spark.read.json("datas/user.json")
    df.show()
    //sql
    df.createOrReplaceTempView("user")

    spark.sql("select * from user").show()
    spark.sql("select avg(age) from user").show()
    //dsl
    df.select($"age"+100).show()

    //todo DataSet
    //DataFrame  => type DataFrame = Dataset[Row]
    val list = List(1,2,3,4)
    val ds: Dataset[Int] = list.toDS()
    ds.show()

    //todo RDD <=> DataFrame
    var rdd = spark.sparkContext.makeRDD(List((1,"zhangsan",30),(2,"lisi",23)))
    val df1: DataFrame = rdd.toDF("id","name","age")
    df1.show()
    val rdd1: RDD[Row] = df1.rdd
    rdd1.collect().foreach(println)

    //todo  DataFrame <=> DataSet
    println(" DataFrame <=> DataSet")
    val ds1: Dataset[User] = df1.as[User]
    ds1.show()
    ds1.toDF().show()

    //todo  rdd <=> DataSet
    val ds2: Dataset[User] = rdd.map {
      case (id, name, age) => User(id, name, age)
    }.toDS()

    val userRDD: RDD[User] = ds2.rdd


    //todo 关闭资源
    spark.stop()
  }

  case class User(id:Int,name:String,age:Int)

}
