package cn.doitedu.demos

import org.apache.spark.sql.SparkSession

object ExecutePaln {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("")
      .master("local")
      .getOrCreate()

    /*import spark.implicits._
    spark.createDataset(Seq(("a",2),("b",2),("a",4))).toDF("x","y").createTempView("tmp")


    spark.sql(
      """
        |select
        |x,
        |sum(y) as cnt
        |from tmp
        |group by x
        |having count(1)>1
        |
        |""".stripMargin).explain(true)*/


    val rdd = spark.sparkContext.makeRDD(Seq(2, 3, 4, 5, 6,7,8,9,10),4)
    rdd.take(5)



    spark.close()





  }

}
