package com.xf.day06
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

object TestRDD2DataFrame {
  def main(args: Array[String]): Unit = {

    // 创建 SparkConf 对象
    val conf = new SparkConf()
      .setAppName("WordCount")
      .setMaster("local[1]")
      .set("spark.ui.port", "8080")
      .set("spark.driver.host", "127.0.0.1")

    // 创建 SparkContext 对象
    //val sc = new SparkContext(conf)

    // 创建 SparkSession 而不是 SparkContext
    val spark = SparkSession.builder()
      .config(conf)
      .getOrCreate()

    // 从 SparkSession 获取 SparkContext
    val sc = spark.sparkContext
    sc.setLogLevel("ERROR")

    // 启用隐式转换（非常重要！）
    import spark.implicits._

    // 创建列表
    val list = List(("ZhangSan", "18"), ("WangLi", "19"), ("LiHua", "20"))

    val value :RDD[(String, String)] = sc.parallelize(list)

    val df1 : DataFrame = value.toDF("name", "age")

    df1.printSchema()

    df1.show()


  }

}

/**
 * root
 * |-- name: string (nullable = true)
 * |-- age: string (nullable = true)
 *
 * =========================>
 *
 * +--------+---+
 * |    name|age|
 * +--------+---+
 * |ZhangSan| 18|
 * |  WangLi| 19|
 * |   LiHua| 20|
 * +--------+---+
 *
 */
