package com.yanggu.spark.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object SparkSQL01_Demo {

  def main(args: Array[String]): Unit = {

    //1. 创建上下文环境对象
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkSQL01_Demo")

    //2. 创建sparkSession对象
    val spark = SparkSession.builder().config(sparkConf).getOrCreate()

    //RDD => DateFrame => DataSet 转换需要引入隐式转换规则, 否则无法转换
    //spark不是包名, 而是sparkSession的对象名称
    import spark.implicits._

    //读取json文件
    val df = spark.read.json("input/test.json")

    //df.show

    //SQL语法风格
    df.createOrReplaceTempView("user")
    //spark.sql("select avg(age) from user").show

    //DSL风格语法
    //df.select("username", "age").show

    // RDD => DataFrame => DataSet
    //RDD
    val rdd = spark.sparkContext.makeRDD(List((1, "张三", 30), (2, "李四", 20), (3, "王五", 40)))

    //DataFrame
    val dataFrame = rdd.toDF("id", "name", "age")
//    dataFrame.show

    //DataSet
    val dataSet = dataFrame.as[User]
//    dataSet.show

    // DataSet => DataFrame => RDD
    val df2 = dataSet.toDF

    //返回的rdd类型为row。里面提供了getXxx方法, 可以获取字段的值, 类似于jdbc处理结果集, 但是索引从0开始
    val rdd2 = df2.rdd
    rdd2.foreach(a => println(a.getString(1)))

    // RDD => DataSet
    val dataSet2 = rdd.map {
      case (id, name, age) => User(id, name, age)
    }.toDS

    // DataSet => RDD
    dataSet2.rdd

    spark.stop

  }

}
case class User(id:Int,name:String,age:Int)
