package sparkSQL.study

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

object Spark01_SparkSQL_Basic1 {

    def main(args: Array[String]): Unit = {

        // TODO 创建SparkSQL的运行环境
        val conf = new SparkConf().setAppName("SparkSQL...").setMaster("local[*]")
        val sparkSession = SparkSession.builder().config(conf).getOrCreate()
        import sparkSession.implicits._

        // TODO 执行逻辑操作

        // TODO DataFrame
//        val dataFrame = sparkSession.read.json("src\\main\\scala\\data\\user.json")
//        dataFrame.show()

        //DataFrame ==> SQL
//        dataFrame.createOrReplaceTempView("user")
//        sparkSession.sql("select *  from user").show
        //DataFrame ==> DSL(在使用DataFrame时如果涉及到转换操作，需要引入转换规则)
//        import sparkSession.implicits._
//        dataFrame.select("age", "name").show
//        dataFrame.select('age+1).show

        // TODO DataSet(DataFrame 是特定泛型的 DataSet)
//        val seq = Seq(1, 2, 3, 4)
//        val ds = seq.toDS()
//        ds.show()

        //RDD <==> DataFrame
        val rdd = sparkSession.sparkContext.makeRDD(List((1, "zhangsan", 19), (2, "lisi", 20), (3, "wangwu", 20)))
        val df = rdd.toDF("id", "name", "age")
        //DataFrame <==> DataSet
        val ds = df.as[User]
        val df1 = ds.toDF()
        //RDD <==> DataFrame
        val ds1 = rdd.map {
            case (id, name, age) => {
                User(id, name, age)
            }
        }.toDS()
        val userRDD = ds1.rdd


        // TODO 关闭环境
        sparkSession.stop()
    }
    case class User( id:Int, name:String, age:Int )
}
