package com.project.export

import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}

object max_min_scores {
  def main(args: Array[String]): Unit = {
    //-1. spark 配置对象
    val sparkConf = new SparkConf()
      .setMaster("local[*]")
    //-2. sparksession对象
    val sparkSession = SparkSession
      .builder()
      .config(sparkConf)
      .enableHiveSupport() //-:开启hive的支持
      .appName("postgraduateInformation")
      .getOrCreate()
    val df_all = sparkSession.sql(
      """
        |select  professional_name,
        |        max(total_score) as max_score,
        |        min(total_score) as min_score
        |from final_project.dwd_postgraduate_information
        |group by professional_name
        |HAVING max_score != '-' AND min_score != '-'
        |order by professional_name
        |limit 100;
        |""".stripMargin
    )
    df_all.write
      .format("jdbc")
      .option("url", "jdbc:mysql://192.168.75.104:3306/finalProject")
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable", "max_min_scores")
      .mode(SaveMode.Overwrite)
      .save()
  }
}
