package day01_create

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @author wsl
 * @version 2022-10-12
 *          SparkSession内部封装了SparkContext
 */
object DFAndDS {
  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setAppName("spark sql").setMaster("local[*]")
    val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()

    //    val df: DataFrame = spark.read.json("sparksql/input/user.json")
    //    df.show()


    val sc: SparkContext = spark.sparkContext
    val rdd: RDD[(String, Long)] = sc.textFile("sparksql/input/user.txt")
      .map(
        line => {
          val data: Array[String] = line.split(",")
          (data(0), data(1).toLong)
        }
      )

    //RDD和DF、DS转换必须要导的包(隐式转换),这里的spark是自定义的变量
    import spark.implicits._

    //rdd-->ds
    //普通rdd
    val ds: Dataset[(String, Long)] = rdd.toDS()

    //case
    val userRdd: Dataset[User] = rdd.map {
      case (name, age) => User(name, age)
    }.toDS()



    //ds-->rdd
    ds.rdd
    userRdd.rdd

    spark.stop()


  }
}

case class User(name: String, age: Long)