package com.gt.sql

import com.alibaba.fastjson.{JSON, JSONObject}
import com.gt.SCUtil
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.functions._
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

import java.lang

case class User(name :String, age: Long)
/**
 * DF DS RDD 相互转换
 *
 */
object DF_DS_RDD {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SCUtil.buildLocalSparkSession()
    import spark.implicits._

    //RDD 转 DF DS
    val rddSrc: RDD[String] = spark.sparkContext.textFile("data/user.json")
    val rdd: RDD[User] = rddSrc.map(data => {
      val json: JSONObject = JSON.parseObject(data)
      User(json.getString("name"), json.getLong("age"))
    })
    val df: DataFrame = rdd.toDF()
    val ds: Dataset[User] = rdd.toDS()

    //DF 转 RDD DS
    val rdd1: RDD[Row] = df.rdd
    val ds1: Dataset[User] = df.as[User]

    //DS 转 RDD DF
    val rdd2: RDD[User] = ds.rdd
    val df2: DataFrame = ds.toDF()

    df.show()
    df.printSchema()

    /**=================================*/
    val rddSrc_2: RDD[(String, lang.Long)] = rddSrc.map(data => {
      val json: JSONObject = JSON.parseObject(data)
      (json.getString("name"), json.getLong("age"))
    })

    val df_2: DataFrame = rddSrc_2.toDF()
    df_2.show()
    df_2.printSchema()

    val df_3: DataFrame = rddSrc_2.toDF("aa","bb")
    df_3.show()
    df_3.printSchema()

    spark.close()
  }

}
