package com.spark.sql

import org.apache.spark.sql.SparkSession

object DF2Dataset {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().appName("DF2DS").master("local[2]").getOrCreate()

    // 读csv文件，返回为DataFrame
    val df = spark.read.option("header", "true").option("inferSchema", "true").csv("file:///Users/username/workspace_code/learn/spark-learn/datasets/infos.csv")
    df.show()
    // df to ds时，需要引入隐式转换
    import spark.implicits._
    val ds = df.as[Info]

    ds.show()

    // 显示两列
    ds.map(row => {
      (row.name, row.age)
    }).show()

    //  map显示两列与select操作功能一致
    ds.select(ds.col("name"), ds.col("age")).show()

    spark.stop()
  }
  case class Info(id: Int, name: String, age: Int, height: Int)
}
