package scala
import org.apache.spark.sql.functions.avg

import java.util.logging.{Level, Logger}

object job3 {
  def main(): Unit = {
//    Logger.getLogger("org").setLevel(Level.OFF)
    val SparkCreate = new SparkCreate
    val spark = SparkCreate.initializeSparkSession()
    val data = spark.read.format("csv")
      .option("header", "true")
      //读取表头
      .load("hdfs://niit-master:9000/user/niit/Input/room.txt")

    val priceByStructure = data
      .groupBy("户型结构")
      .agg(avg("单价（元/平方米）").alias("平均单价"))
      .orderBy("平均单价")
      .select("户型结构", "平均单价")
      .coalesce(1)

    val mysql = new mysql
    priceByStructure.write.mode("overwrite")
      .jdbc(mysql.URL, "sparkproject.job3", mysql.prpo)

    spark.stop()
  }
}
