package com.basic

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession


object RDD2DF {
  /** 对象
   */
  def demo01(): Unit = {
    val spark: SparkSession = SparkSession.builder()
      .master("local[2]")
      .appName("RDD2DF")
      .getOrCreate()
    import spark.implicits._
    val sc: SparkContext = spark.sparkContext
    //        val rdd = sc.parallelize(1 to 10)

    val rdd = sc.parallelize(Array(User("lisi", 10), User("zs", 20), User("ww", 15)))

    rdd.toDF("name", "age").show

    spark.stop()
  }

  /**
   * 基本数据类型
   */
  def demo02(): Unit = {
    val spark: SparkSession = SparkSession.builder()
      .master("local[2]")
      .appName("RDD2DF")
      .getOrCreate()
    import spark.implicits._
    val sc: SparkContext = spark.sparkContext

    val rdd: RDD[(String, Int)] = sc.parallelize(("lisi", 10) :: ("zs", 20) :: Nil)


    rdd.toDF("name", "name").show

    spark.stop()
  }

  def main(args: Array[String]): Unit = {

    //    demo01() //对象
    demo02() // 基本类型
  }
}

