package cn.doitedu.df_rdd

import cn.doitedu.util.SparkUtil
import org.apache.spark.sql.{DataFrame, Dataset, Row}

/**
 * @Date 22.4.12
 * @Created by HANGGE
 * @Description
 */
object C02_Demo02 {
  def main(args: Array[String]): Unit = {
    // 获取环境
    val session = SparkUtil.getSession
    import session.implicits._

    // 创建 DF
    val df: DataFrame = session.read.option("header", true)
      .option("inferSchema", true)
      .csv("data\\scores\\Scores.csv")
    // 返回值是DataSet
    val ds: Dataset[String] = df.map(row => row.getAs[String]("name"))

    val frame = ds.toDF()

  }

}
