package DataAnalysis_qcl

import org.apache.spark.sql.SparkSession

import java.util.Properties

object DataAnalysis05 {
  def main(args: Array[String]): Unit = {
    /*
            分析的每个区的房源信息里面租金大于和低于全北京市平均租金的数量有多少
     */

    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("DataAnalysis05")
      .enableHiveSupport()
      .getOrCreate()

    val mysql_connect=new Properties()
    mysql_connect.setProperty("user","root")
    mysql_connect.setProperty("password","123456")
    mysql_connect.setProperty("driver","com.mysql.jdbc.Driver")

    spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/qcl?useSSL=false","data_processing",mysql_connect)
      .createOrReplaceTempView("data")

    spark.sql("use qcl")
    //  全市的平均租金
     val  avg_money=spark.sql("select  ceil(avg(money)) from data").collect()(0).get(0).toString
    print("打印全市的平均租金:"+avg_money)

    //  求出大于北京平均租金的数量
    spark.sql(
      s"""
        |select
        |downtown,
        |count(*) as high_number
        |from(
        |select
        |*
        |from data
        |where money > ${avg_money}
        |) as t1
        |group by downtown
        |""".stripMargin).createOrReplaceTempView("high")

//  小于北京平均租金的数量，每个区
    spark.sql(
      s"""
         |select
         |downtown,
         |count(*) as low_number
         |from(
         |select
         |*
         |from data
         |where money < ${avg_money}
         |) as t1
         |group by downtown
         |""".stripMargin).createOrReplaceTempView("low")

    //  合并两表
    val result= spark.sql(
      """
        |select
        |high.downtown,
        |high.high_number,
        |low_number
        |from high
        |join low
        |on high.downtown=low.downtown
        |""".stripMargin)

    result.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.40.110:3306/qcl?useSSL=false", "data_analysis05", mysql_connect)

    //  将结果存入hdfs
    result.repartition(1).write.mode("overwrite")
      .csv("/qcl/data_analysis/data_analysis05.csv")


    spark.close()
  }

}
