package com.bigdata.exam.cg

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

object Gaokao {

  def main(args: Array[String]): Unit = {
    // spark-submit --master spark://node01:7077 --deploy-mode cluster --class xxxx param
    val conf: SparkConf = new SparkConf().setMaster("local").setAppName("gaokao")
    val spark: SparkSession = SparkSession.builder()
      .config(conf)
      .getOrCreate()
    spark.sparkContext.setLogLevel("error")

    // 读取文件
    val df1: DataFrame = spark
      .read
      .format("csv")
      .option("sep", ",")
      .option("header", true)
      .load("data/examination2019.csv")

    df1.createOrReplaceTempView("gaokao")
    val df2: DataFrame = spark.sql(
      """
        |select * from gaokao
        |where `批次` like "%本科批%"
        |""".stripMargin)

    df2.printSchema()
    df2.show()

    df2
      .write
      .format("csv")
      .option("sep", "-")
      .mode(SaveMode.Overwrite)
      .save("data/gaokao_csv")

  }
}
