package io.sqrtqiezi.spark.dataframe

import org.apache.spark.sql.SparkSession

object TestRead {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local")
      .appName("flight data")
      .getOrCreate()

    val flightData2015 = spark.read
      .option("inferSchema", "true")
      .option("header", "true")
      .csv("data/flight-data/csv/2015-summary.csv")

    println(flightData2015.schema)

    flightData2015.show()

    spark.conf.set("spark.sql.shuffle.partitions", "5")

    val result = flightData2015.sort("count").take(2)
    result.foreach(println)

    println(flightData2015.sort("count").explain)
    //    Thread.sleep(100000)

    //    flightData2015.createOrReplaceGlobalTempView("flight_data_2015")
    flightData2015.createOrReplaceTempView("flight_data_2015")
    val sqlWay = spark.sql(
      """
        |select dest_country_name, count(1)
        |from flight_data_2015
        |group by dest_country_name""".stripMargin)

    val dataFrameWay = flightData2015
      .groupBy("dest_country_name")
      .count()

    println(sqlWay.explain)
    println(dataFrameWay.explain)

    spark.sql("select max(count) as count from flight_data_2015").show
  }
}
