package com.catmiao.spark.sql.test

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

/**
 * @title: Spark01_sql_test01
 * @projectName spark_study
 * @description: TODO
 * @author ChengMiao
 * @date 2024/3/20 10:44
 */
object Spark01_sql_test01 {

  def main(args: Array[String]): Unit = {
    val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("spark_sql_01_test")

    // 创建 Spark Session对象
    val sparkSession: SparkSession = SparkSession.builder().config(sparkConf).getOrCreate()
    // 涉及转换操作，需要引入转换规则
    import sparkSession.implicits._


//    // DataFrame
//    val df: DataFrame = sparkSession.read.json("datas/sql/user.json")
    // DF:SQL
//    df.createOrReplaceTempView("user")
//
//    sparkSession.sql("select * from user").show
//
//    // DF:DSL
//    df.select("username","age").show
//    df.select($"username",$"age"+1).show


//    // DataSet
//    val list = List(1, 2, 3, 4)
//    val ds: Dataset[Int] = list.toDS()
//    ds.show
//
//    // DataSet => DataFrame
//    val df1: DataFrame = ds.toDF("id")


    // RDD
    val rdd: RDD[Person] = sparkSession.sparkContext.makeRDD(List(
      Person("kirito", 18, "男"),
      Person("Asuna", 20, "女"),
      Person("Monster", 30, "男")
    ))

    // rdd => DataSet
    val ds: Dataset[Person] = rdd.toDS()
    ds.show
    // DataSet => rdd
    val rdd1: RDD[Person] = ds.rdd

    // 关闭环境
    sparkSession.close()
  }

}

case class Person(name:String,age:Long,sex:String)
