package com.xx.sparkdemo

import org.apache.spark.sql.{Dataset, Row, SparkSession}

/**
 *
 * @author tzp
 * @since 2021/7/1
 */
object LocalTest {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local")
      .getOrCreate()
    val sc = spark.sparkContext
    import spark.implicits._
    import org.apache.spark.sql.functions._

    val data =
      """
        |id,name,age
        |1,a,18
        |2,b,19
        |1,c,20
        |""".stripMargin.lines.toList
    val ds = sc.parallelize(data).toDS()
    val df: Dataset[Row] = spark.read.option("header", true).option("inferSchema",true).csv(ds)
    df.show()

    println(ds.schema)
    println(df.schema)
    println(ds.rdd.toDebugString)
    val x = df.rdd
      .groupBy(f => f.getAs[Int](1))
//        .map(f => f._2.drop())
    x

  }
}
