package com.hxk

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SaveMode, SparkSession}

object FlightAnalysis {
  def main(args: Array[String]): Unit = {

    //1.创建sparksession 入口

    val spark = SparkSession.builder()
      .appName("FlightAnalysis")
      .master("local")
      .getOrCreate()


    // Spark的API读取数据，生成DataFrame
//    val airportsDF = spark.read
//      .option("header", "true")
//      .option("inferSchema", "true")
//      .csv("data/airports.csv")

    //airportsDF.printSchema()
    //airportsDF.show(truncate = false, numRows = 100)

//    val flightsDF = spark.read.option("inferSchema", "true").csv("data/2008.csv")
//    flightsDF.printSchema()
//    flightsDF.show()

    //用case class类型的RDD转成DataFrame
    val rawFlightsRDD: RDD[String] = spark.sparkContext.textFile("data/2008.csv")
    val flightsRDD: RDD[Flight] = rawFlightsRDD.flatMap(Flight.parse(_)) //flatMap把option里面的Flight取出
    val flightsDF = spark.createDataFrame(flightsRDD)
    //flightsDF.printSchema()
    //flightsDF.show(false)
    val rawAirportRDD = spark.sparkContext.textFile("data/airports.csv")
    val airportsRDD: RDD[Airport] = rawAirportRDD.flatMap(Airport.parse(_))
    val airportsDF = spark.createDataFrame(airportsRDD)
    //airportsDF.printSchema()
    //airportsDF.show(false)

    //简单的业务查询
    //1.查询出取消的航班
    val canceledFlights = flightsDF.filter(flightsDF("canceled") > 0)
    //canceledFlights.show()

    //2.查询出每一个月取消的航班数
    import spark.implicits._
    val monthCanceledCount = canceledFlights.groupBy($"date.month").count().orderBy($"date.month".asc)
    //monthCanceledCount.show(false)
    /*\
 +-----+-----+
|month|count|
+-----+-----+
|1    |355  |
|2    |206  |
|3    |85   |
|4    |158  |
|5    |127  |
|6    |104  |
|7    |98   |
|8    |154  |
|9    |67   |
|10   |93   |
|11   |65   |
|12   |627  |
+-----+-----+
     */

    //3.查询出所有两个机场之间的航班数
    spark.conf.set("spark.sql.shuffle.partitions", 2)
    val airportsFlightsCount = flightsDF.select($"origin",$"dest").groupBy($"origin",$"dest").count()
        .orderBy($"count".desc,$"origin",$"dest")
    airportsFlightsCount.write.mode(SaveMode.Overwrite).csv("data/tem")

    spark.stop()
}
}
